What Matters for Neural Cross-Lingual Named Entity Recognition: An Empirical Analysis. Huang, X., May, J., & Peng, N. In Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and the 9th International Joint Conference on Natural Language Processing (EMNLP-IJCNLP), pages 6394–6400, Hong Kong, China, November, 2019. Association for Computational Linguistics. Paper doi abstract bibtex Building named entity recognition (NER) models for languages that do not have much training data is a challenging task. While recent work has shown promising results on cross-lingual transfer from high-resource languages, it is unclear what knowledge is transferred. In this paper, we first propose a simple and efficient neural architecture for cross-lingual NER. Experiments show that our model achieves competitive performance with the state-of-the-art. We further explore how transfer learning works for cross-lingual NER on two transferable factors: sequential order and multilingual embedding. Our results shed light on future research for improving cross-lingual NER.
@inproceedings{huang-etal-2019-matters,
title = "What Matters for Neural Cross-Lingual Named Entity Recognition: An Empirical Analysis",
author = "Huang, Xiaolei and
May, Jonathan and
Peng, Nanyun",
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and the 9th International Joint Conference on Natural Language Processing (EMNLP-IJCNLP)",
month = nov,
year = "2019",
address = "Hong Kong, China",
publisher = "Association for Computational Linguistics",
url = "https://www.aclweb.org/anthology/D19-1672",
doi = "10.18653/v1/D19-1672",
pages = "6394--6400",
abstract = "Building named entity recognition (NER) models for languages that do not have much training data is a challenging task. While recent work has shown promising results on cross-lingual transfer from high-resource languages, it is unclear what knowledge is transferred. In this paper, we first propose a simple and efficient neural architecture for cross-lingual NER. Experiments show that our model achieves competitive performance with the state-of-the-art. We further explore how transfer learning works for cross-lingual NER on two transferable factors: sequential order and multilingual embedding. Our results shed light on future research for improving cross-lingual NER.",
}
Downloads: 0
{"_id":"sNSwmuyjKSk2fpvFc","bibbaseid":"huang-may-peng-whatmattersforneuralcrosslingualnamedentityrecognitionanempiricalanalysis-2019","author_short":["Huang, X.","May, J.","Peng, N."],"bibdata":{"bibtype":"inproceedings","type":"inproceedings","title":"What Matters for Neural Cross-Lingual Named Entity Recognition: An Empirical Analysis","author":[{"propositions":[],"lastnames":["Huang"],"firstnames":["Xiaolei"],"suffixes":[]},{"propositions":[],"lastnames":["May"],"firstnames":["Jonathan"],"suffixes":[]},{"propositions":[],"lastnames":["Peng"],"firstnames":["Nanyun"],"suffixes":[]}],"booktitle":"Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and the 9th International Joint Conference on Natural Language Processing (EMNLP-IJCNLP)","month":"November","year":"2019","address":"Hong Kong, China","publisher":"Association for Computational Linguistics","url":"https://www.aclweb.org/anthology/D19-1672","doi":"10.18653/v1/D19-1672","pages":"6394–6400","abstract":"Building named entity recognition (NER) models for languages that do not have much training data is a challenging task. While recent work has shown promising results on cross-lingual transfer from high-resource languages, it is unclear what knowledge is transferred. In this paper, we first propose a simple and efficient neural architecture for cross-lingual NER. Experiments show that our model achieves competitive performance with the state-of-the-art. We further explore how transfer learning works for cross-lingual NER on two transferable factors: sequential order and multilingual embedding. Our results shed light on future research for improving cross-lingual NER.","bibtex":"@inproceedings{huang-etal-2019-matters,\n title = \"What Matters for Neural Cross-Lingual Named Entity Recognition: An Empirical Analysis\",\n author = \"Huang, Xiaolei and\n May, Jonathan and\n Peng, Nanyun\",\n booktitle = \"Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and the 9th International Joint Conference on Natural Language Processing (EMNLP-IJCNLP)\",\n month = nov,\n year = \"2019\",\n address = \"Hong Kong, China\",\n publisher = \"Association for Computational Linguistics\",\n url = \"https://www.aclweb.org/anthology/D19-1672\",\n doi = \"10.18653/v1/D19-1672\",\n pages = \"6394--6400\",\n abstract = \"Building named entity recognition (NER) models for languages that do not have much training data is a challenging task. While recent work has shown promising results on cross-lingual transfer from high-resource languages, it is unclear what knowledge is transferred. In this paper, we first propose a simple and efficient neural architecture for cross-lingual NER. Experiments show that our model achieves competitive performance with the state-of-the-art. We further explore how transfer learning works for cross-lingual NER on two transferable factors: sequential order and multilingual embedding. Our results shed light on future research for improving cross-lingual NER.\",\n}\n\n","author_short":["Huang, X.","May, J.","Peng, N."],"key":"huang-etal-2019-matters","id":"huang-etal-2019-matters","bibbaseid":"huang-may-peng-whatmattersforneuralcrosslingualnamedentityrecognitionanempiricalanalysis-2019","role":"author","urls":{"Paper":"https://www.aclweb.org/anthology/D19-1672"},"metadata":{"authorlinks":{}}},"bibtype":"inproceedings","biburl":"https://jonmay.github.io/webpage/cutelabname/cutelabname.bib","dataSources":["ZdhKtP2cSp3Aki2ge","yjD6WSyKGfBgDWuT5","X5WBAKQabka5TW5z7","M68HJARkDZS53XDxa","hbZSwot2msWk92m5B","fcWjcoAgajPvXWcp7","GvHfaAWP6AfN6oLQE","j3Qzx9HAAC6WtJDHS","5eM3sAccSEpjSDHHQ"],"keywords":[],"search_terms":["matters","neural","cross","lingual","named","entity","recognition","empirical","analysis","huang","may","peng"],"title":"What Matters for Neural Cross-Lingual Named Entity Recognition: An Empirical Analysis","year":2019}