Few-Shot Dialogue Generation Without Annotated Data: A Transfer Learning Approach. Shalyminov, I., Lemon, O., Eshghi, A., & Lee, S. In Proceedings of SIGdial 2019, 2019. ACL.
Few-Shot Dialogue Generation Without Annotated Data: A Transfer Learning Approach [link]Website  abstract   bibtex   
Learning with minimal data is one of the key challenges in the development of practical, production-ready goal-oriented dialogue systems. In a real-world enterprise setting where dialogue systems are developed rapidly and are expected to work robustly for an ever-growing variety of domains, products, and scenarios, efficient learning from a limited number of examples becomes indispensable. In this paper, we introduce a technique to achieve state-of-the-art dialogue generation performance in a few-shot setup, without using any annotated data. We do this by leveraging background knowledge from a larger, more highly represented dialogue source --- namely, the MetaLWOz dataset. We evaluate our model on the Stanford Multi-Domain Dialogue Dataset, consisting of human-human goal-oriented dialogues in in-car navigation, appointment scheduling, and weather information domains. We show that our few-shot approach achieves state-of-the art results on that dataset by consistently outperforming the previous best model in terms of BLEU and Entity F1 scores, while being more data-efficient by not requiring any data annotation.
@inproceedings{
 title = {Few-Shot Dialogue Generation Without Annotated Data: A Transfer Learning Approach},
 type = {inproceedings},
 year = {2019},
 websites = {https://arxiv.org/abs/1908.05854},
 publisher = {ACL},
 city = {Stockholm, Sweden},
 id = {99f33106-6acf-34d0-9a9a-371aa2cd305b},
 created = {2019-08-16T08:41:19.815Z},
 file_attached = {false},
 profile_id = {d7d2e6da-aa5b-3ab3-b3f2-a5350adf574a},
 last_modified = {2019-08-20T09:50:49.213Z},
 read = {false},
 starred = {false},
 authored = {true},
 confirmed = {true},
 hidden = {false},
 citation_key = {Shalyminov.etal19b},
 private_publication = {false},
 abstract = {Learning with minimal data is one of the key challenges in the development of practical, production-ready goal-oriented dialogue systems. In a real-world enterprise setting where dialogue systems are developed rapidly and are expected to work robustly for an ever-growing variety of domains, products, and scenarios, efficient learning from a limited number of examples becomes indispensable. In this paper, we introduce a technique to achieve state-of-the-art dialogue generation performance in a few-shot setup, without using any annotated data. We do this by leveraging background knowledge from a larger, more highly represented dialogue source --- namely, the MetaLWOz dataset. We evaluate our model on the Stanford Multi-Domain Dialogue Dataset, consisting of human-human goal-oriented dialogues in in-car navigation, appointment scheduling, and weather information domains. We show that our few-shot approach achieves state-of-the art results on that dataset by consistently outperforming the previous best model in terms of BLEU and Entity F1 scores, while being more data-efficient by not requiring any data annotation.},
 bibtype = {inproceedings},
 author = {Shalyminov, Igor and Lemon, Oliver and Eshghi, Arash and Lee, Sungjin},
 booktitle = {Proceedings of SIGdial 2019},
 keywords = {Dialogue System,Few-Shot,Generation,Knowledge Transfer,Transfer Learning}
}

Downloads: 0