Generating Sentences from a Continuous Space. Bowman, S., R., Vilnis, L., Vinyals, O., Dai, A., M., Jozefowicz, R., & Bengio, S. arXiv:1511.06349 [cs], 5, 2016.
Paper
Website abstract bibtex The standard recurrent neural network language model (RNNLM) generates sentences one word at a time and does not work from an explicit global sentence representation. In this work, we introduce and study an RNN-based variational autoencoder generative model that incorporates distributed latent representations of entire sentences. This factorization allows it to explicitly model holistic properties of sentences such as style, topic, and high-level syntactic features. Samples from the prior over these sentence representations remarkably produce diverse and well-formed sentences through simple deterministic decoding. By examining paths through this latent space, we are able to generate coherent novel sentences that interpolate between known sentences. We present techniques for solving the difficult learning problem presented by this model, demonstrate its effectiveness in imputing missing words, explore many interesting properties of the model's latent sentence space, and present negative results on the use of the model in language modeling.
@article{
title = {Generating Sentences from a Continuous Space},
type = {article},
year = {2016},
keywords = {Computer Science - Computation and Language,Computer Science - Machine Learning},
websites = {http://arxiv.org/abs/1511.06349},
month = {5},
id = {8ce71856-eb90-3d63-8ec5-a2be7ac26b70},
created = {2022-03-28T09:45:02.566Z},
accessed = {2022-03-26},
file_attached = {true},
profile_id = {235249c2-3ed4-314a-b309-b1ea0330f5d9},
group_id = {1ff583c0-be37-34fa-9c04-73c69437d354},
last_modified = {2022-03-29T08:03:45.399Z},
read = {false},
starred = {false},
authored = {false},
confirmed = {true},
hidden = {false},
citation_key = {bowmanGeneratingSentencesContinuous2016},
source_type = {article},
notes = {arXiv: 1511.06349},
private_publication = {false},
abstract = {The standard recurrent neural network language model (RNNLM) generates sentences one word at a time and does not work from an explicit global sentence representation. In this work, we introduce and study an RNN-based variational autoencoder generative model that incorporates distributed latent representations of entire sentences. This factorization allows it to explicitly model holistic properties of sentences such as style, topic, and high-level syntactic features. Samples from the prior over these sentence representations remarkably produce diverse and well-formed sentences through simple deterministic decoding. By examining paths through this latent space, we are able to generate coherent novel sentences that interpolate between known sentences. We present techniques for solving the difficult learning problem presented by this model, demonstrate its effectiveness in imputing missing words, explore many interesting properties of the model's latent sentence space, and present negative results on the use of the model in language modeling.},
bibtype = {article},
author = {Bowman, Samuel R and Vilnis, Luke and Vinyals, Oriol and Dai, Andrew M and Jozefowicz, Rafal and Bengio, Samy},
journal = {arXiv:1511.06349 [cs]}
}
Downloads: 0
{"_id":"Rk3QjP8LGeyq6XiBA","bibbaseid":"bowman-vilnis-vinyals-dai-jozefowicz-bengio-generatingsentencesfromacontinuousspace-2016","author_short":["Bowman, S., R.","Vilnis, L.","Vinyals, O.","Dai, A., M.","Jozefowicz, R.","Bengio, S."],"bibdata":{"title":"Generating Sentences from a Continuous Space","type":"article","year":"2016","keywords":"Computer Science - Computation and Language,Computer Science - Machine Learning","websites":"http://arxiv.org/abs/1511.06349","month":"5","id":"8ce71856-eb90-3d63-8ec5-a2be7ac26b70","created":"2022-03-28T09:45:02.566Z","accessed":"2022-03-26","file_attached":"true","profile_id":"235249c2-3ed4-314a-b309-b1ea0330f5d9","group_id":"1ff583c0-be37-34fa-9c04-73c69437d354","last_modified":"2022-03-29T08:03:45.399Z","read":false,"starred":false,"authored":false,"confirmed":"true","hidden":false,"citation_key":"bowmanGeneratingSentencesContinuous2016","source_type":"article","notes":"arXiv: 1511.06349","private_publication":false,"abstract":"The standard recurrent neural network language model (RNNLM) generates sentences one word at a time and does not work from an explicit global sentence representation. In this work, we introduce and study an RNN-based variational autoencoder generative model that incorporates distributed latent representations of entire sentences. This factorization allows it to explicitly model holistic properties of sentences such as style, topic, and high-level syntactic features. Samples from the prior over these sentence representations remarkably produce diverse and well-formed sentences through simple deterministic decoding. By examining paths through this latent space, we are able to generate coherent novel sentences that interpolate between known sentences. We present techniques for solving the difficult learning problem presented by this model, demonstrate its effectiveness in imputing missing words, explore many interesting properties of the model's latent sentence space, and present negative results on the use of the model in language modeling.","bibtype":"article","author":"Bowman, Samuel R and Vilnis, Luke and Vinyals, Oriol and Dai, Andrew M and Jozefowicz, Rafal and Bengio, Samy","journal":"arXiv:1511.06349 [cs]","bibtex":"@article{\n title = {Generating Sentences from a Continuous Space},\n type = {article},\n year = {2016},\n keywords = {Computer Science - Computation and Language,Computer Science - Machine Learning},\n websites = {http://arxiv.org/abs/1511.06349},\n month = {5},\n id = {8ce71856-eb90-3d63-8ec5-a2be7ac26b70},\n created = {2022-03-28T09:45:02.566Z},\n accessed = {2022-03-26},\n file_attached = {true},\n profile_id = {235249c2-3ed4-314a-b309-b1ea0330f5d9},\n group_id = {1ff583c0-be37-34fa-9c04-73c69437d354},\n last_modified = {2022-03-29T08:03:45.399Z},\n read = {false},\n starred = {false},\n authored = {false},\n confirmed = {true},\n hidden = {false},\n citation_key = {bowmanGeneratingSentencesContinuous2016},\n source_type = {article},\n notes = {arXiv: 1511.06349},\n private_publication = {false},\n abstract = {The standard recurrent neural network language model (RNNLM) generates sentences one word at a time and does not work from an explicit global sentence representation. In this work, we introduce and study an RNN-based variational autoencoder generative model that incorporates distributed latent representations of entire sentences. This factorization allows it to explicitly model holistic properties of sentences such as style, topic, and high-level syntactic features. Samples from the prior over these sentence representations remarkably produce diverse and well-formed sentences through simple deterministic decoding. By examining paths through this latent space, we are able to generate coherent novel sentences that interpolate between known sentences. We present techniques for solving the difficult learning problem presented by this model, demonstrate its effectiveness in imputing missing words, explore many interesting properties of the model's latent sentence space, and present negative results on the use of the model in language modeling.},\n bibtype = {article},\n author = {Bowman, Samuel R and Vilnis, Luke and Vinyals, Oriol and Dai, Andrew M and Jozefowicz, Rafal and Bengio, Samy},\n journal = {arXiv:1511.06349 [cs]}\n}","author_short":["Bowman, S., R.","Vilnis, L.","Vinyals, O.","Dai, A., M.","Jozefowicz, R.","Bengio, S."],"urls":{"Paper":"https://bibbase.org/service/mendeley/bfbbf840-4c42-3914-a463-19024f50b30c/file/02a8fc78-3476-b0e1-616d-35026c39e15e/Bowman_et_al___2016___Generating_Sentences_from_a_Continuous_Space.pdf.pdf","Website":"http://arxiv.org/abs/1511.06349"},"biburl":"https://bibbase.org/service/mendeley/bfbbf840-4c42-3914-a463-19024f50b30c","bibbaseid":"bowman-vilnis-vinyals-dai-jozefowicz-bengio-generatingsentencesfromacontinuousspace-2016","role":"author","keyword":["Computer Science - Computation and Language","Computer Science - Machine Learning"],"metadata":{"authorlinks":{}}},"bibtype":"article","biburl":"https://bibbase.org/service/mendeley/bfbbf840-4c42-3914-a463-19024f50b30c","dataSources":["taWdMrienBzqHC2tC","2252seNhipfTmjEBQ"],"keywords":["computer science - computation and language","computer science - machine learning"],"search_terms":["generating","sentences","continuous","space","bowman","vilnis","vinyals","dai","jozefowicz","bengio"],"title":"Generating Sentences from a Continuous Space","year":2016}