Generating Sequences With Recurrent Neural Networks. Graves, A. 2013.
Generating Sequences With Recurrent Neural Networks [link]Website  abstract   bibtex   
This paper shows how Long Short-term Memory recurrent neural networks can be used to generate complex sequences with long-range structure, simply by predicting one data point at a time. The approach is demonstrated for text (where the data are discrete) and online handwriting (where the data are real-valued). It is then extended to handwriting synthesis by allowing the network to condition its predictions on a text sequence. The resulting system is able to generate highly realistic cursive handwriting in a wide variety of styles.
@article{
 title = {Generating Sequences With Recurrent Neural Networks},
 type = {article},
 year = {2013},
 pages = {1-43},
 websites = {http://arxiv.org/abs/1308.0850},
 id = {5bb177b5-1292-3b75-aaf0-b51ad23d35eb},
 created = {2021-07-12T14:15:35.512Z},
 file_attached = {true},
 profile_id = {ad172e55-c0e8-3aa4-8465-09fac4d5f5c8},
 group_id = {1ff583c0-be37-34fa-9c04-73c69437d354},
 last_modified = {2021-07-12T14:17:17.286Z},
 read = {false},
 starred = {false},
 authored = {false},
 confirmed = {true},
 hidden = {false},
 folder_uuids = {85ed9c29-c272-40dc-a01a-f912101de83a},
 private_publication = {false},
 abstract = {This paper shows how Long Short-term Memory recurrent neural networks can be used to generate complex sequences with long-range structure, simply by predicting one data point at a time. The approach is demonstrated for text (where the data are discrete) and online handwriting (where the data are real-valued). It is then extended to handwriting synthesis by allowing the network to condition its predictions on a text sequence. The resulting system is able to generate highly realistic cursive handwriting in a wide variety of styles.},
 bibtype = {article},
 author = {Graves, Alex}
}

Downloads: 0