QANet: Combining Local Convolution with Global Self-Attention for Reading Comprehension. Yu, A. W., Dohan, D., Luong, M., Zhao, R., Chen, K., Norouzi, M., & Le, Q. V , 2018.
abstract   bibtex   
Current end-to-end machine reading and question answering (Q\&A) models are primarily based on recurrent neural networks (RNNs) with attention. Despite their success, these models are often slow.
@Article{Yu2018,
author = {Yu, Adams Wei and Dohan, David and Luong, Minh-Thang and Zhao, Rui and Chen, Kai and Norouzi, Mohammad and Le, Quoc V}, 
title = {QANet: Combining Local Convolution with Global Self-Attention for Reading Comprehension}, 
journal = {}, 
volume = {}, 
number = {}, 
pages = {}, 
year = {2018}, 
abstract = {Current end-to-end machine reading and question answering (Q\\&A) models are primarily based on recurrent neural networks (RNNs) with attention. Despite their success, these models are often slow.}, 
location = {}, 
keywords = {}}

Downloads: 0