BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding. Devlin, J., Chang, M., Lee, K., & Toutanova, K. In Proceedings of the 2019 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies, Volume 1 (Long and Short Papers), pages 4171–4186, Stroudsburg, PA, USA, 2019. Association for Computational Linguistics.
BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding [link]Paper  doi  bibtex   
@inproceedings{Devlin2018,
address = {Stroudsburg, PA, USA},
archivePrefix = {arXiv},
arxivId = {1810.04805v1},
author = {Devlin, Jacob and Chang, Ming-Wei and Lee, Kenton and Toutanova, Kristina},
booktitle = {Proceedings of the 2019 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies, Volume 1 (Long and Short Papers)},
doi = {10.18653/v1/N19-1423},
eprint = {1810.04805v1},
file = {:Users/shanest/Documents/Library/Devlin et al/Proceedings of the 2019 Conference of the North American Chapter of the Association for Computational Linguistics Human Language Technologie./Devlin et al. - 2019 - BERT Pre-training of Deep Bidirectional Transformers for Language Understanding.pdf:pdf},
keywords = {model},
pages = {4171--4186},
publisher = {Association for Computational Linguistics},
title = {{BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding}},
url = {http://aclweb.org/anthology/N19-1423},
year = {2019}
}

Downloads: 0