BERT for Joint Intent Classification and Slot Filling. Chen, Q., Zhuo, Z., & Wang, W. Paper abstract bibtex Intent classification and slot filling are two essential tasks for natural language understanding. They often suffer from small-scale human-labeled training data, resulting in poor generalization capability, especially for rare words. Recently a new language representation model, BERT (Bidirectional Encoder Representations from Transformers), facilitates pre-training deep bidirectional representations on large-scale unlabeled corpora, and has created state-of-the-art models for a wide variety of natural language processing tasks after simple fine-tuning. However, there has not been much effort on exploring BERT for natural language understanding. In this work, we propose a joint intent classification and slot filling model based on BERT. Experimental results demonstrate that our proposed model achieves significant improvement on intent classification accuracy, slot filling F1, and sentence-level semantic frame accuracy on several public benchmark datasets, compared to the attention-based recurrent neural network models and slot-gated models.
@article{chenBERTJointIntent2019,
archivePrefix = {arXiv},
eprinttype = {arxiv},
eprint = {1902.10909},
primaryClass = {cs},
title = {{{BERT}} for {{Joint Intent Classification}} and {{Slot Filling}}},
url = {http://arxiv.org/abs/1902.10909},
abstract = {Intent classification and slot filling are two essential tasks for natural language understanding. They often suffer from small-scale human-labeled training data, resulting in poor generalization capability, especially for rare words. Recently a new language representation model, BERT (Bidirectional Encoder Representations from Transformers), facilitates pre-training deep bidirectional representations on large-scale unlabeled corpora, and has created state-of-the-art models for a wide variety of natural language processing tasks after simple fine-tuning. However, there has not been much effort on exploring BERT for natural language understanding. In this work, we propose a joint intent classification and slot filling model based on BERT. Experimental results demonstrate that our proposed model achieves significant improvement on intent classification accuracy, slot filling F1, and sentence-level semantic frame accuracy on several public benchmark datasets, compared to the attention-based recurrent neural network models and slot-gated models.},
urldate = {2019-03-01},
date = {2019-02-28},
keywords = {Computer Science - Computation and Language},
author = {Chen, Qian and Zhuo, Zhu and Wang, Wen},
file = {/home/dimitri/Nextcloud/Zotero/storage/ZNEXFXZ9/Chen et al. - 2019 - BERT for Joint Intent Classification and Slot Fill.pdf;/home/dimitri/Nextcloud/Zotero/storage/QK6RL8QE/1902.html}
}
Downloads: 0
{"_id":"3GbwK3uD3heXcM8LH","bibbaseid":"chen-zhuo-wang-bertforjointintentclassificationandslotfilling","authorIDs":[],"author_short":["Chen, Q.","Zhuo, Z.","Wang, W."],"bibdata":{"bibtype":"article","type":"article","archiveprefix":"arXiv","eprinttype":"arxiv","eprint":"1902.10909","primaryclass":"cs","title":"BERT for Joint Intent Classification and Slot Filling","url":"http://arxiv.org/abs/1902.10909","abstract":"Intent classification and slot filling are two essential tasks for natural language understanding. They often suffer from small-scale human-labeled training data, resulting in poor generalization capability, especially for rare words. Recently a new language representation model, BERT (Bidirectional Encoder Representations from Transformers), facilitates pre-training deep bidirectional representations on large-scale unlabeled corpora, and has created state-of-the-art models for a wide variety of natural language processing tasks after simple fine-tuning. However, there has not been much effort on exploring BERT for natural language understanding. In this work, we propose a joint intent classification and slot filling model based on BERT. Experimental results demonstrate that our proposed model achieves significant improvement on intent classification accuracy, slot filling F1, and sentence-level semantic frame accuracy on several public benchmark datasets, compared to the attention-based recurrent neural network models and slot-gated models.","urldate":"2019-03-01","date":"2019-02-28","keywords":"Computer Science - Computation and Language","author":[{"propositions":[],"lastnames":["Chen"],"firstnames":["Qian"],"suffixes":[]},{"propositions":[],"lastnames":["Zhuo"],"firstnames":["Zhu"],"suffixes":[]},{"propositions":[],"lastnames":["Wang"],"firstnames":["Wen"],"suffixes":[]}],"file":"/home/dimitri/Nextcloud/Zotero/storage/ZNEXFXZ9/Chen et al. - 2019 - BERT for Joint Intent Classification and Slot Fill.pdf;/home/dimitri/Nextcloud/Zotero/storage/QK6RL8QE/1902.html","bibtex":"@article{chenBERTJointIntent2019,\n archivePrefix = {arXiv},\n eprinttype = {arxiv},\n eprint = {1902.10909},\n primaryClass = {cs},\n title = {{{BERT}} for {{Joint Intent Classification}} and {{Slot Filling}}},\n url = {http://arxiv.org/abs/1902.10909},\n abstract = {Intent classification and slot filling are two essential tasks for natural language understanding. They often suffer from small-scale human-labeled training data, resulting in poor generalization capability, especially for rare words. Recently a new language representation model, BERT (Bidirectional Encoder Representations from Transformers), facilitates pre-training deep bidirectional representations on large-scale unlabeled corpora, and has created state-of-the-art models for a wide variety of natural language processing tasks after simple fine-tuning. However, there has not been much effort on exploring BERT for natural language understanding. In this work, we propose a joint intent classification and slot filling model based on BERT. Experimental results demonstrate that our proposed model achieves significant improvement on intent classification accuracy, slot filling F1, and sentence-level semantic frame accuracy on several public benchmark datasets, compared to the attention-based recurrent neural network models and slot-gated models.},\n urldate = {2019-03-01},\n date = {2019-02-28},\n keywords = {Computer Science - Computation and Language},\n author = {Chen, Qian and Zhuo, Zhu and Wang, Wen},\n file = {/home/dimitri/Nextcloud/Zotero/storage/ZNEXFXZ9/Chen et al. - 2019 - BERT for Joint Intent Classification and Slot Fill.pdf;/home/dimitri/Nextcloud/Zotero/storage/QK6RL8QE/1902.html}\n}\n\n","author_short":["Chen, Q.","Zhuo, Z.","Wang, W."],"key":"chenBERTJointIntent2019","id":"chenBERTJointIntent2019","bibbaseid":"chen-zhuo-wang-bertforjointintentclassificationandslotfilling","role":"author","urls":{"Paper":"http://arxiv.org/abs/1902.10909"},"keyword":["Computer Science - Computation and Language"],"downloads":0},"bibtype":"article","biburl":"https://raw.githubusercontent.com/dlozeve/newblog/master/bib/all.bib","creationDate":"2020-01-08T20:39:39.270Z","downloads":0,"keywords":["computer science - computation and language"],"search_terms":["bert","joint","intent","classification","slot","filling","chen","zhuo","wang"],"title":"BERT for Joint Intent Classification and Slot Filling","year":null,"dataSources":["3XqdvqRE7zuX4cm8m"]}