LoRA: Low-Rank Adaptation of Large Language Models. November, 2023. original-date: 2021-06-18T02:16:35Z
Paper abstract bibtex Code for loralib, an implementation of "LoRA: Low-Rank Adaptation of Large Language Models"
@misc{noauthor_lora_2023,
title = {{LoRA}: {Low}-{Rank} {Adaptation} of {Large} {Language} {Models}},
copyright = {MIT},
shorttitle = {{LoRA}},
url = {https://github.com/microsoft/LoRA},
abstract = {Code for loralib, an implementation of "LoRA: Low-Rank Adaptation of Large Language Models"},
urldate = {2023-11-30},
publisher = {Microsoft},
month = nov,
year = {2023},
note = {original-date: 2021-06-18T02:16:35Z},
keywords = {/unread, adaptation, deberta, deep-learning, gpt-2, gpt-3, language-model, lora, low-rank, pytorch, roberta},
}
Downloads: 0
{"_id":"W4kwNXHeh4igzuqda","bibbaseid":"anonymous-loralowrankadaptationoflargelanguagemodels-2023","bibdata":{"bibtype":"misc","type":"misc","title":"LoRA: Low-Rank Adaptation of Large Language Models","copyright":"MIT","shorttitle":"LoRA","url":"https://github.com/microsoft/LoRA","abstract":"Code for loralib, an implementation of \"LoRA: Low-Rank Adaptation of Large Language Models\"","urldate":"2023-11-30","publisher":"Microsoft","month":"November","year":"2023","note":"original-date: 2021-06-18T02:16:35Z","keywords":"/unread, adaptation, deberta, deep-learning, gpt-2, gpt-3, language-model, lora, low-rank, pytorch, roberta","bibtex":"@misc{noauthor_lora_2023,\n\ttitle = {{LoRA}: {Low}-{Rank} {Adaptation} of {Large} {Language} {Models}},\n\tcopyright = {MIT},\n\tshorttitle = {{LoRA}},\n\turl = {https://github.com/microsoft/LoRA},\n\tabstract = {Code for loralib, an implementation of \"LoRA: Low-Rank Adaptation of Large Language Models\"},\n\turldate = {2023-11-30},\n\tpublisher = {Microsoft},\n\tmonth = nov,\n\tyear = {2023},\n\tnote = {original-date: 2021-06-18T02:16:35Z},\n\tkeywords = {/unread, adaptation, deberta, deep-learning, gpt-2, gpt-3, language-model, lora, low-rank, pytorch, roberta},\n}\n\n\n\n","key":"noauthor_lora_2023","id":"noauthor_lora_2023","bibbaseid":"anonymous-loralowrankadaptationoflargelanguagemodels-2023","role":"","urls":{"Paper":"https://github.com/microsoft/LoRA"},"keyword":["/unread","adaptation","deberta","deep-learning","gpt-2","gpt-3","language-model","lora","low-rank","pytorch","roberta"],"metadata":{"authorlinks":{}},"downloads":0,"html":""},"bibtype":"misc","biburl":"https://bibbase.org/zotero/zzhenry2012","dataSources":["nZHrFJKyxKKDaWYM8"],"keywords":["/unread","adaptation","deberta","deep-learning","gpt-2","gpt-3","language-model","lora","low-rank","pytorch","roberta"],"search_terms":["lora","low","rank","adaptation","large","language","models"],"title":"LoRA: Low-Rank Adaptation of Large Language Models","year":2023}