QANet: Combining Local Convolution with Global Self-Attention for Reading Comprehension. Yu, A. W., Dohan, D., Luong, M., Zhao, R., Chen, K., Norouzi, M., & Le, Q. V , 2018. abstract bibtex Current end-to-end machine reading and question answering (Q\&A) models are primarily based on recurrent neural networks (RNNs) with attention. Despite their success, these models are often slow.
@Article{Yu2018,
author = {Yu, Adams Wei and Dohan, David and Luong, Minh-Thang and Zhao, Rui and Chen, Kai and Norouzi, Mohammad and Le, Quoc V},
title = {QANet: Combining Local Convolution with Global Self-Attention for Reading Comprehension},
journal = {},
volume = {},
number = {},
pages = {},
year = {2018},
abstract = {Current end-to-end machine reading and question answering (Q\\&A) models are primarily based on recurrent neural networks (RNNs) with attention. Despite their success, these models are often slow.},
location = {},
keywords = {}}
Downloads: 0
{"_id":"Nc2jhYCCqhL6Rkuyz","bibbaseid":"yu-dohan-luong-zhao-chen-norouzi-le-qanetcombininglocalconvolutionwithglobalselfattentionforreadingcomprehension-2018","authorIDs":[],"author_short":["Yu, A. W.","Dohan, D.","Luong, M.","Zhao, R.","Chen, K.","Norouzi, M.","Le, Q. V"],"bibdata":{"bibtype":"article","type":"article","author":[{"propositions":[],"lastnames":["Yu"],"firstnames":["Adams","Wei"],"suffixes":[]},{"propositions":[],"lastnames":["Dohan"],"firstnames":["David"],"suffixes":[]},{"propositions":[],"lastnames":["Luong"],"firstnames":["Minh-Thang"],"suffixes":[]},{"propositions":[],"lastnames":["Zhao"],"firstnames":["Rui"],"suffixes":[]},{"propositions":[],"lastnames":["Chen"],"firstnames":["Kai"],"suffixes":[]},{"propositions":[],"lastnames":["Norouzi"],"firstnames":["Mohammad"],"suffixes":[]},{"propositions":[],"lastnames":["Le"],"firstnames":["Quoc","V"],"suffixes":[]}],"title":"QANet: Combining Local Convolution with Global Self-Attention for Reading Comprehension","journal":"","volume":"","number":"","pages":"","year":"2018","abstract":"Current end-to-end machine reading and question answering (Q\\&A) models are primarily based on recurrent neural networks (RNNs) with attention. Despite their success, these models are often slow.","location":"","keywords":"","bibtex":"@Article{Yu2018,\nauthor = {Yu, Adams Wei and Dohan, David and Luong, Minh-Thang and Zhao, Rui and Chen, Kai and Norouzi, Mohammad and Le, Quoc V}, \ntitle = {QANet: Combining Local Convolution with Global Self-Attention for Reading Comprehension}, \njournal = {}, \nvolume = {}, \nnumber = {}, \npages = {}, \nyear = {2018}, \nabstract = {Current end-to-end machine reading and question answering (Q\\\\&A) models are primarily based on recurrent neural networks (RNNs) with attention. Despite their success, these models are often slow.}, \nlocation = {}, \nkeywords = {}}\n\n\n","author_short":["Yu, A. W.","Dohan, D.","Luong, M.","Zhao, R.","Chen, K.","Norouzi, M.","Le, Q. V"],"key":"Yu2018","id":"Yu2018","bibbaseid":"yu-dohan-luong-zhao-chen-norouzi-le-qanetcombininglocalconvolutionwithglobalselfattentionforreadingcomprehension-2018","role":"author","urls":{},"downloads":0},"bibtype":"article","biburl":"https://gist.githubusercontent.com/stuhlmueller/a37ef2ef4f378ebcb73d249fe0f8377a/raw/6f96f6f779501bd9482896af3e4db4de88c35079/references.bib","creationDate":"2020-01-27T02:13:33.934Z","downloads":0,"keywords":[],"search_terms":["qanet","combining","local","convolution","global","self","attention","reading","comprehension","yu","dohan","luong","zhao","chen","norouzi","le"],"title":"QANet: Combining Local Convolution with Global Self-Attention for Reading Comprehension","year":2018,"dataSources":["hEoKh4ygEAWbAZ5iy"]}