Exploring the Limits of Transfer Learning with a Unified Text-to-Text Transformer. Raffel, C., Shazeer, N., Roberts, A., Lee, K., Narang, S., Matena, M., Zhou, Y., Li, W., & Liu, P. J. July, 2020. arXiv:1910.10683 [cs, stat]
Paper doi abstract bibtex Transfer learning, where a model is first pre-trained on a data-rich task before being fine-tuned on a downstream task, has emerged as a powerful technique in natural language processing (NLP). The effectiveness of transfer learning has given rise to a diversity of approaches, methodology, and practice. In this paper, we explore the landscape of transfer learning techniques for NLP by introducing a unified framework that converts all text-based language problems into a text-to-text format. Our systematic study compares pre-training objectives, architectures, unlabeled data sets, transfer approaches, and other factors on dozens of language understanding tasks. By combining the insights from our exploration with scale and our new ``Colossal Clean Crawled Corpus'', we achieve state-of-the-art results on many benchmarks covering summarization, question answering, text classification, and more. To facilitate future work on transfer learning for NLP, we release our data set, pre-trained models, and code.
@misc{raffel_exploring_2020,
title = {Exploring the {Limits} of {Transfer} {Learning} with a {Unified} {Text}-to-{Text} {Transformer}},
url = {http://arxiv.org/abs/1910.10683},
doi = {10.48550/arXiv.1910.10683},
abstract = {Transfer learning, where a model is first pre-trained on a data-rich task before being fine-tuned on a downstream task, has emerged as a powerful technique in natural language processing (NLP). The effectiveness of transfer learning has given rise to a diversity of approaches, methodology, and practice. In this paper, we explore the landscape of transfer learning techniques for NLP by introducing a unified framework that converts all text-based language problems into a text-to-text format. Our systematic study compares pre-training objectives, architectures, unlabeled data sets, transfer approaches, and other factors on dozens of language understanding tasks. By combining the insights from our exploration with scale and our new ``Colossal Clean Crawled Corpus'', we achieve state-of-the-art results on many benchmarks covering summarization, question answering, text classification, and more. To facilitate future work on transfer learning for NLP, we release our data set, pre-trained models, and code.},
urldate = {2023-02-13},
publisher = {arXiv},
author = {Raffel, Colin and Shazeer, Noam and Roberts, Adam and Lee, Katherine and Narang, Sharan and Matena, Michael and Zhou, Yanqi and Li, Wei and Liu, Peter J.},
month = jul,
year = {2020},
note = {arXiv:1910.10683 [cs, stat]},
keywords = {Computer Science - Computation and Language, Computer Science - Machine Learning, Statistics - Machine Learning},
}
Downloads: 0
{"_id":"5MfEHWdaMp6ha4RPC","bibbaseid":"raffel-shazeer-roberts-lee-narang-matena-zhou-li-etal-exploringthelimitsoftransferlearningwithaunifiedtexttotexttransformer-2020","author_short":["Raffel, C.","Shazeer, N.","Roberts, A.","Lee, K.","Narang, S.","Matena, M.","Zhou, Y.","Li, W.","Liu, P. J."],"bibdata":{"bibtype":"misc","type":"misc","title":"Exploring the Limits of Transfer Learning with a Unified Text-to-Text Transformer","url":"http://arxiv.org/abs/1910.10683","doi":"10.48550/arXiv.1910.10683","abstract":"Transfer learning, where a model is first pre-trained on a data-rich task before being fine-tuned on a downstream task, has emerged as a powerful technique in natural language processing (NLP). The effectiveness of transfer learning has given rise to a diversity of approaches, methodology, and practice. In this paper, we explore the landscape of transfer learning techniques for NLP by introducing a unified framework that converts all text-based language problems into a text-to-text format. Our systematic study compares pre-training objectives, architectures, unlabeled data sets, transfer approaches, and other factors on dozens of language understanding tasks. By combining the insights from our exploration with scale and our new ``Colossal Clean Crawled Corpus'', we achieve state-of-the-art results on many benchmarks covering summarization, question answering, text classification, and more. To facilitate future work on transfer learning for NLP, we release our data set, pre-trained models, and code.","urldate":"2023-02-13","publisher":"arXiv","author":[{"propositions":[],"lastnames":["Raffel"],"firstnames":["Colin"],"suffixes":[]},{"propositions":[],"lastnames":["Shazeer"],"firstnames":["Noam"],"suffixes":[]},{"propositions":[],"lastnames":["Roberts"],"firstnames":["Adam"],"suffixes":[]},{"propositions":[],"lastnames":["Lee"],"firstnames":["Katherine"],"suffixes":[]},{"propositions":[],"lastnames":["Narang"],"firstnames":["Sharan"],"suffixes":[]},{"propositions":[],"lastnames":["Matena"],"firstnames":["Michael"],"suffixes":[]},{"propositions":[],"lastnames":["Zhou"],"firstnames":["Yanqi"],"suffixes":[]},{"propositions":[],"lastnames":["Li"],"firstnames":["Wei"],"suffixes":[]},{"propositions":[],"lastnames":["Liu"],"firstnames":["Peter","J."],"suffixes":[]}],"month":"July","year":"2020","note":"arXiv:1910.10683 [cs, stat]","keywords":"Computer Science - Computation and Language, Computer Science - Machine Learning, Statistics - Machine Learning","bibtex":"@misc{raffel_exploring_2020,\n\ttitle = {Exploring the {Limits} of {Transfer} {Learning} with a {Unified} {Text}-to-{Text} {Transformer}},\n\turl = {http://arxiv.org/abs/1910.10683},\n\tdoi = {10.48550/arXiv.1910.10683},\n\tabstract = {Transfer learning, where a model is first pre-trained on a data-rich task before being fine-tuned on a downstream task, has emerged as a powerful technique in natural language processing (NLP). The effectiveness of transfer learning has given rise to a diversity of approaches, methodology, and practice. In this paper, we explore the landscape of transfer learning techniques for NLP by introducing a unified framework that converts all text-based language problems into a text-to-text format. Our systematic study compares pre-training objectives, architectures, unlabeled data sets, transfer approaches, and other factors on dozens of language understanding tasks. By combining the insights from our exploration with scale and our new ``Colossal Clean Crawled Corpus'', we achieve state-of-the-art results on many benchmarks covering summarization, question answering, text classification, and more. To facilitate future work on transfer learning for NLP, we release our data set, pre-trained models, and code.},\n\turldate = {2023-02-13},\n\tpublisher = {arXiv},\n\tauthor = {Raffel, Colin and Shazeer, Noam and Roberts, Adam and Lee, Katherine and Narang, Sharan and Matena, Michael and Zhou, Yanqi and Li, Wei and Liu, Peter J.},\n\tmonth = jul,\n\tyear = {2020},\n\tnote = {arXiv:1910.10683 [cs, stat]},\n\tkeywords = {Computer Science - Computation and Language, Computer Science - Machine Learning, Statistics - Machine Learning},\n}\n\n\n\n","author_short":["Raffel, C.","Shazeer, N.","Roberts, A.","Lee, K.","Narang, S.","Matena, M.","Zhou, Y.","Li, W.","Liu, P. J."],"key":"raffel_exploring_2020-1","id":"raffel_exploring_2020-1","bibbaseid":"raffel-shazeer-roberts-lee-narang-matena-zhou-li-etal-exploringthelimitsoftransferlearningwithaunifiedtexttotexttransformer-2020","role":"author","urls":{"Paper":"http://arxiv.org/abs/1910.10683"},"keyword":["Computer Science - Computation and Language","Computer Science - Machine Learning","Statistics - Machine Learning"],"metadata":{"authorlinks":{}}},"bibtype":"misc","biburl":"https://bibbase.org/zotero/abhishek-p","dataSources":["N4kJAiLiJ7kxfNsoh","h7kKWXpJh2iaX92T5","taWdMrienBzqHC2tC"],"keywords":["computer science - computation and language","computer science - machine learning","statistics - machine learning"],"search_terms":["exploring","limits","transfer","learning","unified","text","text","transformer","raffel","shazeer","roberts","lee","narang","matena","zhou","li","liu"],"title":"Exploring the Limits of Transfer Learning with a Unified Text-to-Text Transformer","year":2020}