XLNet: Generalized Autoregressive Pretraining for Language Understanding. Yang, Z., Dai, Z., Yang, Y., Carbonell, J., Salakhutdinov, R., & Le, Q. V In Advances in Neural Information Processing Systems 32 (NeurIPS 2019), 2019.
XLNet: Generalized Autoregressive Pretraining for Language Understanding [link]Paper  bibtex   
@inproceedings{Yang2019,
archivePrefix = {arXiv},
arxivId = {arXiv:1906.08237v1},
author = {Yang, Zhilin and Dai, Zihang and Yang, Yiming and Carbonell, Jaime and Salakhutdinov, Ruslan and Le, Quoc V},
booktitle = {Advances in Neural Information Processing Systems 32 (NeurIPS 2019)},
eprint = {arXiv:1906.08237v1},
file = {:Users/shanest/Documents/Library/Yang et al/Advances in Neural Information Processing Systems 32 (NeurIPS 2019)/Yang et al. - 2019 - XLNet Generalized Autoregressive Pretraining for Language Understanding.pdf:pdf},
keywords = {model},
title = {{XLNet: Generalized Autoregressive Pretraining for Language Understanding}},
url = {https://papers.nips.cc/paper/8812-xlnet-generalized-autoregressive-pretraining-for-language-understanding},
year = {2019}
}

Downloads: 0