[Paper Reading] Transformer-VQ: 优雅地实现线性复杂度的注意力机制. GSAI-ALOHA January, 2024. 赞数:21;
[Paper Reading] Transformer-VQ: 优雅地实现线性复杂度的注意力机制 [link]Paper  abstract   bibtex   
[Paper Reading] Transformer-VQ: 优雅地实现线性复杂度的注意力机制 - 来自知乎专栏「组会论文分享」,作者: GSAI-ALOHA https://zhuanlan.zhihu.com/p/678412206
@misc{gsai-aloha_paper_2024,
	type = {知乎专栏文章},
	title = {[{Paper} {Reading}] {Transformer}-{VQ}: 优雅地实现线性复杂度的注意力机制},
	shorttitle = {[{Paper} {Reading}] {Transformer}-{VQ}},
	url = {https://zhuanlan.zhihu.com/p/678412206},
	abstract = {[Paper Reading] Transformer-VQ: 优雅地实现线性复杂度的注意力机制 - 来自知乎专栏「组会论文分享」,作者: GSAI-ALOHA https://zhuanlan.zhihu.com/p/678412206},
	language = {en},
	urldate = {2024-08-24},
	journal = {组会论文分享},
	author = {GSAI-ALOHA},
	month = jan,
	year = {2024},
	note = {赞数:21;},
	keywords = {\#Deep Learning{\textgreater}AE, \#Transformer, \#Zhihu, /unread, Attention, Transformer, 注意力},
}

Downloads: 0