Memory Architectures in Recurrent Neural Network Language Models. Yogatama, D., Miao, Y., Melis, G., Ling, W., Kuncoro, A., Dyer, C., & Blunsom, P. , 2018. abstract bibtex We compare and analyze sequential, random access, and stack memory architectures for recurrent neural network language models. Our experiments on the Penn Treebank and Wikitext-2 datasets show that.
@Article{Yogatama2018,
author = {Yogatama, Dani and Miao, Yishu and Melis, Gábor and Ling, Wang and Kuncoro, Adhiguna and Dyer, Chris and Blunsom, Phil},
title = {Memory Architectures in Recurrent Neural Network Language Models},
journal = {},
volume = {},
number = {},
pages = {},
year = {2018},
abstract = {We compare and analyze sequential, random access, and stack memory architectures for recurrent neural network language models. Our experiments on the Penn Treebank and Wikitext-2 datasets show that.},
location = {},
keywords = {}}
Downloads: 0
{"_id":"qdfyi8iuHuS5Hj4YG","bibbaseid":"yogatama-miao-melis-ling-kuncoro-dyer-blunsom-memoryarchitecturesinrecurrentneuralnetworklanguagemodels-2018","authorIDs":[],"author_short":["Yogatama, D.","Miao, Y.","Melis, G.","Ling, W.","Kuncoro, A.","Dyer, C.","Blunsom, P."],"bibdata":{"bibtype":"article","type":"article","author":[{"propositions":[],"lastnames":["Yogatama"],"firstnames":["Dani"],"suffixes":[]},{"propositions":[],"lastnames":["Miao"],"firstnames":["Yishu"],"suffixes":[]},{"propositions":[],"lastnames":["Melis"],"firstnames":["Gábor"],"suffixes":[]},{"propositions":[],"lastnames":["Ling"],"firstnames":["Wang"],"suffixes":[]},{"propositions":[],"lastnames":["Kuncoro"],"firstnames":["Adhiguna"],"suffixes":[]},{"propositions":[],"lastnames":["Dyer"],"firstnames":["Chris"],"suffixes":[]},{"propositions":[],"lastnames":["Blunsom"],"firstnames":["Phil"],"suffixes":[]}],"title":"Memory Architectures in Recurrent Neural Network Language Models","journal":"","volume":"","number":"","pages":"","year":"2018","abstract":"We compare and analyze sequential, random access, and stack memory architectures for recurrent neural network language models. Our experiments on the Penn Treebank and Wikitext-2 datasets show that.","location":"","keywords":"","bibtex":"@Article{Yogatama2018,\nauthor = {Yogatama, Dani and Miao, Yishu and Melis, Gábor and Ling, Wang and Kuncoro, Adhiguna and Dyer, Chris and Blunsom, Phil}, \ntitle = {Memory Architectures in Recurrent Neural Network Language Models}, \njournal = {}, \nvolume = {}, \nnumber = {}, \npages = {}, \nyear = {2018}, \nabstract = {We compare and analyze sequential, random access, and stack memory architectures for recurrent neural network language models. Our experiments on the Penn Treebank and Wikitext-2 datasets show that.}, \nlocation = {}, \nkeywords = {}}\n\n\n","author_short":["Yogatama, D.","Miao, Y.","Melis, G.","Ling, W.","Kuncoro, A.","Dyer, C.","Blunsom, P."],"key":"Yogatama2018","id":"Yogatama2018","bibbaseid":"yogatama-miao-melis-ling-kuncoro-dyer-blunsom-memoryarchitecturesinrecurrentneuralnetworklanguagemodels-2018","role":"author","urls":{},"downloads":0},"bibtype":"article","biburl":"https://gist.githubusercontent.com/stuhlmueller/a37ef2ef4f378ebcb73d249fe0f8377a/raw/6f96f6f779501bd9482896af3e4db4de88c35079/references.bib","creationDate":"2020-01-27T02:13:33.959Z","downloads":0,"keywords":[],"search_terms":["memory","architectures","recurrent","neural","network","language","models","yogatama","miao","melis","ling","kuncoro","dyer","blunsom"],"title":"Memory Architectures in Recurrent Neural Network Language Models","year":2018,"dataSources":["hEoKh4ygEAWbAZ5iy"]}