Memory Architectures in Recurrent Neural Network Language Models. Yogatama, D., Miao, Y., Melis, G., Ling, W., Kuncoro, A., Dyer, C., & Blunsom, P. , 2018.
abstract   bibtex   
We compare and analyze sequential, random access, and stack memory architectures for recurrent neural network language models. Our experiments on the Penn Treebank and Wikitext-2 datasets show that.
@Article{Yogatama2018,
author = {Yogatama, Dani and Miao, Yishu and Melis, Gábor and Ling, Wang and Kuncoro, Adhiguna and Dyer, Chris and Blunsom, Phil}, 
title = {Memory Architectures in Recurrent Neural Network Language Models}, 
journal = {}, 
volume = {}, 
number = {}, 
pages = {}, 
year = {2018}, 
abstract = {We compare and analyze sequential, random access, and stack memory architectures for recurrent neural network language models. Our experiments on the Penn Treebank and Wikitext-2 datasets show that.}, 
location = {}, 
keywords = {}}

Downloads: 0