What Does BERT Learn about the Structure of Language?. Jawahar, G., Sagot, B., & Seddah, D. In Proceedings of the 57th Annual Meeting of the Association for Computational Linguistics, pages 3651–3657, Stroudsburg, PA, USA, 2019. Association for Computational Linguistics. Paper doi abstract bibtex 3 downloads BERT is a recent language representation model that has surprisingly performed well in diverse language understanding benchmarks. This result indicates the possibility that BERT networks capture structural information about language. In this work, we provide novel support for this claim by performing a series of experiments to unpack the elements of English language structure learned by BERT. We first show that BERT's phrasal representation captures phrase-level information in the lower layers. We also show that BERT's intermediate layers encode a rich hierarchy of linguistic information , with surface features at the bottom, syntactic features in the middle and semantic features at the top. BERT turns out to require deeper layers when long-distance dependency information is required, e.g. to track subject-verb agreement. Finally, we show that BERT representations capture linguistic information in a compositional way that mimics classical, tree-like structures.
@inproceedings{Jawahar2019,
abstract = {BERT is a recent language representation model that has surprisingly performed well in diverse language understanding benchmarks. This result indicates the possibility that BERT networks capture structural information about language. In this work, we provide novel support for this claim by performing a series of experiments to unpack the elements of English language structure learned by BERT. We first show that BERT's phrasal representation captures phrase-level information in the lower layers. We also show that BERT's intermediate layers encode a rich hierarchy of linguistic information , with surface features at the bottom, syntactic features in the middle and semantic features at the top. BERT turns out to require deeper layers when long-distance dependency information is required, e.g. to track subject-verb agreement. Finally, we show that BERT representations capture linguistic information in a compositional way that mimics classical, tree-like structures.},
address = {Stroudsburg, PA, USA},
author = {Jawahar, Ganesh and Sagot, Beno{\^{i}}t and Seddah, Djam{\'{e}}},
booktitle = {Proceedings of the 57th Annual Meeting of the Association for Computational Linguistics},
doi = {10.18653/v1/P19-1356},
file = {:Users/shanest/Documents/Library/Jawahar, Sagot, Seddah/Proceedings of the 57th Annual Meeting of the Association for Computational Linguistics/Jawahar, Sagot, Seddah - 2019 - What Does BERT Learn about the Structure of Language.pdf:pdf},
keywords = {method: diagnostic classifier,phenomenon: compositionality,phenomenon: various},
pages = {3651--3657},
publisher = {Association for Computational Linguistics},
title = {{What Does BERT Learn about the Structure of Language?}},
url = {https://www.aclweb.org/anthology/P19-1356},
year = {2019}
}
Downloads: 3
{"_id":"BMFXYNYvQou9vSNFP","bibbaseid":"jawahar-sagot-seddah-whatdoesbertlearnaboutthestructureoflanguage-2019","authorIDs":[],"author_short":["Jawahar, G.","Sagot, B.","Seddah, D."],"bibdata":{"bibtype":"inproceedings","type":"inproceedings","abstract":"BERT is a recent language representation model that has surprisingly performed well in diverse language understanding benchmarks. This result indicates the possibility that BERT networks capture structural information about language. In this work, we provide novel support for this claim by performing a series of experiments to unpack the elements of English language structure learned by BERT. We first show that BERT's phrasal representation captures phrase-level information in the lower layers. We also show that BERT's intermediate layers encode a rich hierarchy of linguistic information , with surface features at the bottom, syntactic features in the middle and semantic features at the top. BERT turns out to require deeper layers when long-distance dependency information is required, e.g. to track subject-verb agreement. Finally, we show that BERT representations capture linguistic information in a compositional way that mimics classical, tree-like structures.","address":"Stroudsburg, PA, USA","author":[{"propositions":[],"lastnames":["Jawahar"],"firstnames":["Ganesh"],"suffixes":[]},{"propositions":[],"lastnames":["Sagot"],"firstnames":["Benoît"],"suffixes":[]},{"propositions":[],"lastnames":["Seddah"],"firstnames":["Djamé"],"suffixes":[]}],"booktitle":"Proceedings of the 57th Annual Meeting of the Association for Computational Linguistics","doi":"10.18653/v1/P19-1356","file":":Users/shanest/Documents/Library/Jawahar, Sagot, Seddah/Proceedings of the 57th Annual Meeting of the Association for Computational Linguistics/Jawahar, Sagot, Seddah - 2019 - What Does BERT Learn about the Structure of Language.pdf:pdf","keywords":"method: diagnostic classifier,phenomenon: compositionality,phenomenon: various","pages":"3651–3657","publisher":"Association for Computational Linguistics","title":"What Does BERT Learn about the Structure of Language?","url":"https://www.aclweb.org/anthology/P19-1356","year":"2019","bibtex":"@inproceedings{Jawahar2019,\nabstract = {BERT is a recent language representation model that has surprisingly performed well in diverse language understanding benchmarks. This result indicates the possibility that BERT networks capture structural information about language. In this work, we provide novel support for this claim by performing a series of experiments to unpack the elements of English language structure learned by BERT. We first show that BERT's phrasal representation captures phrase-level information in the lower layers. We also show that BERT's intermediate layers encode a rich hierarchy of linguistic information , with surface features at the bottom, syntactic features in the middle and semantic features at the top. BERT turns out to require deeper layers when long-distance dependency information is required, e.g. to track subject-verb agreement. Finally, we show that BERT representations capture linguistic information in a compositional way that mimics classical, tree-like structures.},\naddress = {Stroudsburg, PA, USA},\nauthor = {Jawahar, Ganesh and Sagot, Beno{\\^{i}}t and Seddah, Djam{\\'{e}}},\nbooktitle = {Proceedings of the 57th Annual Meeting of the Association for Computational Linguistics},\ndoi = {10.18653/v1/P19-1356},\nfile = {:Users/shanest/Documents/Library/Jawahar, Sagot, Seddah/Proceedings of the 57th Annual Meeting of the Association for Computational Linguistics/Jawahar, Sagot, Seddah - 2019 - What Does BERT Learn about the Structure of Language.pdf:pdf},\nkeywords = {method: diagnostic classifier,phenomenon: compositionality,phenomenon: various},\npages = {3651--3657},\npublisher = {Association for Computational Linguistics},\ntitle = {{What Does BERT Learn about the Structure of Language?}},\nurl = {https://www.aclweb.org/anthology/P19-1356},\nyear = {2019}\n}\n","author_short":["Jawahar, G.","Sagot, B.","Seddah, D."],"key":"Jawahar2019","id":"Jawahar2019","bibbaseid":"jawahar-sagot-seddah-whatdoesbertlearnaboutthestructureoflanguage-2019","role":"author","urls":{"Paper":"https://www.aclweb.org/anthology/P19-1356"},"keyword":["method: diagnostic classifier","phenomenon: compositionality","phenomenon: various"],"metadata":{"authorlinks":{}},"downloads":3},"bibtype":"inproceedings","biburl":"https://www.shane.st/teaching/575/win20/MachineLearning-interpretability.bib","creationDate":"2020-01-17T05:09:05.722Z","downloads":3,"keywords":["method: diagnostic classifier","phenomenon: compositionality","phenomenon: various"],"search_terms":["bert","learn","structure","language","jawahar","sagot","seddah"],"title":"What Does BERT Learn about the Structure of Language?","year":2019,"dataSources":["okYcdTpf4JJ2zkj7A","znj7izS5PeehdLR3G"]}