ICQuant: Index Coding enables Low-bit LLM Quantization. Li, X., Hanna, O., Fragouli, C., & Diggavi, S. In Second Conference on Language Modeling, 2025. bibtex @inproceedings{li2025icquant,
type={4},
tags={xinlin,osama},
title={{ICQ}uant: Index Coding enables Low-bit {LLM} Quantization},
author={Xinlin Li and Osama Hanna and Christina Fragouli and Suhas Diggavi},
booktitle={Second Conference on Language Modeling},
year={2025}
}
Downloads: 0
{"_id":"CFefwTx5YMw9NaZ24","bibbaseid":"li-hanna-fragouli-diggavi-icquantindexcodingenableslowbitllmquantization-2025","author_short":["Li, X.","Hanna, O.","Fragouli, C.","Diggavi, S."],"bibdata":{"bibtype":"inproceedings","type":"4","tags":"xinlin,osama","title":"ICQuant: Index Coding enables Low-bit LLM Quantization","author":[{"firstnames":["Xinlin"],"propositions":[],"lastnames":["Li"],"suffixes":[]},{"firstnames":["Osama"],"propositions":[],"lastnames":["Hanna"],"suffixes":[]},{"firstnames":["Christina"],"propositions":[],"lastnames":["Fragouli"],"suffixes":[]},{"firstnames":["Suhas"],"propositions":[],"lastnames":["Diggavi"],"suffixes":[]}],"booktitle":"Second Conference on Language Modeling","year":"2025","bibtex":"@inproceedings{li2025icquant,\n type={4},\n tags={xinlin,osama},\n title={{ICQ}uant: Index Coding enables Low-bit {LLM} Quantization},\n author={Xinlin Li and Osama Hanna and Christina Fragouli and Suhas Diggavi},\n booktitle={Second Conference on Language Modeling},\n year={2025}\n}\n\n\n","author_short":["Li, X.","Hanna, O.","Fragouli, C.","Diggavi, S."],"key":"li2025icquant","id":"li2025icquant","bibbaseid":"li-hanna-fragouli-diggavi-icquantindexcodingenableslowbitllmquantization-2025","role":"author","urls":{},"metadata":{"authorlinks":{}}},"bibtype":"inproceedings","biburl":"https://bibbase.org/network/files/52xaJNXvSgauGDmZR","dataSources":["QNNZyNQzfeETJyTbL","Z6bBhKezwKd3pPWRc"],"keywords":[],"search_terms":["icquant","index","coding","enables","low","bit","llm","quantization","li","hanna","fragouli","diggavi"],"title":"ICQuant: Index Coding enables Low-bit LLM Quantization","year":2025}