Knowledge Distillation. Upadhyay, U. May, 2019. 00000Paper abstract bibtex Knowledge distillation is model compression method in which a small model is trained to mimic a pretrained, larger model.
@misc{upadhyay_knowledge_2019,
title = {Knowledge {Distillation}},
url = {https://medium.com/neuralmachine/knowledge-distillation-dc241d7c2322},
abstract = {Knowledge distillation is model compression method in which a small model is trained to mimic a pretrained, larger model.},
language = {en},
urldate = {2019-11-18},
journal = {Medium},
author = {Upadhyay, Ujjwal},
month = may,
year = {2019},
note = {00000}
}
Downloads: 0
{"_id":"qkeKkQK9bnfRAMxLP","bibbaseid":"upadhyay-knowledgedistillation-2019","authorIDs":[],"author_short":["Upadhyay, U."],"bibdata":{"bibtype":"misc","type":"misc","title":"Knowledge Distillation","url":"https://medium.com/neuralmachine/knowledge-distillation-dc241d7c2322","abstract":"Knowledge distillation is model compression method in which a small model is trained to mimic a pretrained, larger model.","language":"en","urldate":"2019-11-18","journal":"Medium","author":[{"propositions":[],"lastnames":["Upadhyay"],"firstnames":["Ujjwal"],"suffixes":[]}],"month":"May","year":"2019","note":"00000","bibtex":"@misc{upadhyay_knowledge_2019,\n\ttitle = {Knowledge {Distillation}},\n\turl = {https://medium.com/neuralmachine/knowledge-distillation-dc241d7c2322},\n\tabstract = {Knowledge distillation is model compression method in which a small model is trained to mimic a pretrained, larger model.},\n\tlanguage = {en},\n\turldate = {2019-11-18},\n\tjournal = {Medium},\n\tauthor = {Upadhyay, Ujjwal},\n\tmonth = may,\n\tyear = {2019},\n\tnote = {00000}\n}\n\n","author_short":["Upadhyay, U."],"key":"upadhyay_knowledge_2019","id":"upadhyay_knowledge_2019","bibbaseid":"upadhyay-knowledgedistillation-2019","role":"author","urls":{"Paper":"https://medium.com/neuralmachine/knowledge-distillation-dc241d7c2322"},"downloads":0,"html":""},"bibtype":"misc","biburl":"http://www.telemidia.puc-rio.br/~alan/files/all.bib","creationDate":"2020-03-03T14:08:14.945Z","downloads":0,"keywords":[],"search_terms":["knowledge","distillation","upadhyay"],"title":"Knowledge Distillation","year":2019,"dataSources":["jAxurbvLP8q5LTdLa"]}