XLM-E: Cross-lingual Language Model Pre-training via ELECTRA. Chi, Z., Huang, S., Dong, L., Ma, S., Singhal, S., Bajaj, P., Song, X., & Wei, F. CoRR, 2021.
XLM-E: Cross-lingual Language Model Pre-training via ELECTRA [link]Paper  bibtex   
@article{DBLP:journals/corr/abs-2106-16138,
  author       = {Zewen Chi and
                  Shaohan Huang and
                  Li Dong and
                  Shuming Ma and
                  Saksham Singhal and
                  Payal Bajaj and
                  Xia Song and
                  Furu Wei},
  title        = {{XLM-E:} Cross-lingual Language Model Pre-training via {ELECTRA}},
  journal      = {CoRR},
  volume       = {abs/2106.16138},
  year         = {2021},
  url          = {https://arxiv.org/abs/2106.16138},
  eprinttype    = {arXiv},
  eprint       = {2106.16138},
  timestamp    = {Mon, 05 Jul 2021 01:00:00 +0200},
  biburl       = {https://dblp.org/rec/journals/corr/abs-2106-16138.bib},
  bibsource    = {dblp computer science bibliography, https://dblp.org}
}

Downloads: 0