{"_id":"gtA2wBSAEz8oZc3g2","bibbaseid":"gsaialoha-paperreadingtransformervq-2024","author_short":["GSAI-ALOHA"],"bibdata":{"bibtype":"misc","type":"知乎专栏文章","title":"[Paper Reading] Transformer-VQ: 优雅地实现线性复杂度的注意力机制","shorttitle":"[Paper Reading] Transformer-VQ","url":"https://zhuanlan.zhihu.com/p/678412206","abstract":"[Paper Reading] Transformer-VQ: 优雅地实现线性复杂度的注意力机制 - 来自知乎专栏「组会论文分享」,作者: GSAI-ALOHA https://zhuanlan.zhihu.com/p/678412206","language":"en","urldate":"2024-08-24","journal":"组会论文分享","author":[{"firstnames":[],"propositions":[],"lastnames":["GSAI-ALOHA"],"suffixes":[]}],"month":"January","year":"2024","note":"赞数:21;","keywords":"#Deep Learning\\textgreaterAE, #Transformer, #Zhihu, /unread, Attention, Transformer, 注意力","bibtex":"@misc{gsai-aloha_paper_2024,\n\ttype = {知乎专栏文章},\n\ttitle = {[{Paper} {Reading}] {Transformer}-{VQ}: 优雅地实现线性复杂度的注意力机制},\n\tshorttitle = {[{Paper} {Reading}] {Transformer}-{VQ}},\n\turl = {https://zhuanlan.zhihu.com/p/678412206},\n\tabstract = {[Paper Reading] Transformer-VQ: 优雅地实现线性复杂度的注意力机制 - 来自知乎专栏「组会论文分享」,作者: GSAI-ALOHA https://zhuanlan.zhihu.com/p/678412206},\n\tlanguage = {en},\n\turldate = {2024-08-24},\n\tjournal = {组会论文分享},\n\tauthor = {GSAI-ALOHA},\n\tmonth = jan,\n\tyear = {2024},\n\tnote = {赞数:21;},\n\tkeywords = {\\#Deep Learning{\\textgreater}AE, \\#Transformer, \\#Zhihu, /unread, Attention, Transformer, 注意力},\n}\n\n\n\n\n\n\n\n\n\n\n\n","author_short":["GSAI-ALOHA"],"key":"gsai-aloha_paper_2024","id":"gsai-aloha_paper_2024","bibbaseid":"gsaialoha-paperreadingtransformervq-2024","role":"author","urls":{"Paper":"https://zhuanlan.zhihu.com/p/678412206"},"keyword":["#Deep Learning\\textgreaterAE","#Transformer","#Zhihu","/unread","Attention","Transformer","注意力"],"metadata":{"authorlinks":{}},"downloads":0,"html":""},"bibtype":"misc","biburl":"https://bibbase.org/zotero/zzhenry2012","dataSources":["nZHrFJKyxKKDaWYM8"],"keywords":["#deep learning\\textgreaterae","#transformer","#zhihu","/unread","attention","transformer","注意力"],"search_terms":["paper","reading","transformer","gsai-aloha"],"title":"[Paper Reading] Transformer-VQ: 优雅地实现线性复杂度的注意力机制","year":2024}