The Era of 1-bit LLMs: All Large Language Models are in 1.58 Bits. Ma, S., Wang, H., Ma, L., Wang, L., Wang, W., Huang, S., Dong, L., Wang, R., Xue, J., & Wei, F. CoRR, 2024.
The Era of 1-bit LLMs: All Large Language Models are in 1.58 Bits [link]Paper  doi  bibtex   
@article{DBLP:journals/corr/abs-2402-17764,
  author       = {Shuming Ma and
                  Hongyu Wang and
                  Lingxiao Ma and
                  Lei Wang and
                  Wenhui Wang and
                  Shaohan Huang and
                  Li Dong and
                  Ruiping Wang and
                  Jilong Xue and
                  Furu Wei},
  title        = {The Era of 1-bit LLMs: All Large Language Models are in 1.58 Bits},
  journal      = {CoRR},
  volume       = {abs/2402.17764},
  year         = {2024},
  url          = {https://doi.org/10.48550/arXiv.2402.17764},
  doi          = {10.48550/ARXIV.2402.17764},
  eprinttype    = {arXiv},
  eprint       = {2402.17764},
  timestamp    = {Fri, 19 Jul 2024 01:00:00 +0200},
  biburl       = {https://dblp.org/rec/journals/corr/abs-2402-17764.bib},
  bibsource    = {dblp computer science bibliography, https://dblp.org}
}

Downloads: 0