Large scale distributed neural network training through online distillation. Anil, R., Pereyra, G., Passos, A., Ormándi, R., Dahl, G. E., & Hinton, G. E. In ICLR (Poster), 2018. OpenReview.net. Link Paper bibtex @inproceedings{conf/iclr/AnilPPODH18,
added-at = {2019-07-25T00:00:00.000+0200},
author = {Anil, Rohan and Pereyra, Gabriel and Passos, Alexandre and Ormándi, Róbert and Dahl, George E. and Hinton, Geoffrey E.},
biburl = {https://www.bibsonomy.org/bibtex/28347fdf79859f673a70708de1d1b68ae/dblp},
booktitle = {ICLR (Poster)},
crossref = {conf/iclr/2018},
ee = {https://openreview.net/forum?id=rkr1UDeC-},
interhash = {fcef6503b010a52843463bfcd0659f26},
intrahash = {8347fdf79859f673a70708de1d1b68ae},
keywords = {dblp},
publisher = {OpenReview.net},
timestamp = {2019-07-26T11:43:51.000+0200},
title = {Large scale distributed neural network training through online distillation.},
url = {http://dblp.uni-trier.de/db/conf/iclr/iclr2018.html#AnilPPODH18},
year = 2018
}
Downloads: 0
{"_id":"H4bm9eTBTWEM2igr8","bibbaseid":"anil-pereyra-passos-ormndi-dahl-hinton-largescaledistributedneuralnetworktrainingthroughonlinedistillation-2018","authorIDs":["gN5Lfqjgx8P4c7HJT"],"author_short":["Anil, R.","Pereyra, G.","Passos, A.","Ormándi, R.","Dahl, G. E.","Hinton, G. E."],"bibdata":{"bibtype":"inproceedings","type":"inproceedings","added-at":"2019-07-25T00:00:00.000+0200","author":[{"propositions":[],"lastnames":["Anil"],"firstnames":["Rohan"],"suffixes":[]},{"propositions":[],"lastnames":["Pereyra"],"firstnames":["Gabriel"],"suffixes":[]},{"propositions":[],"lastnames":["Passos"],"firstnames":["Alexandre"],"suffixes":[]},{"propositions":[],"lastnames":["Ormándi"],"firstnames":["Róbert"],"suffixes":[]},{"propositions":[],"lastnames":["Dahl"],"firstnames":["George","E."],"suffixes":[]},{"propositions":[],"lastnames":["Hinton"],"firstnames":["Geoffrey","E."],"suffixes":[]}],"biburl":"https://www.bibsonomy.org/bibtex/28347fdf79859f673a70708de1d1b68ae/dblp","booktitle":"ICLR (Poster)","crossref":"conf/iclr/2018","ee":"https://openreview.net/forum?id=rkr1UDeC-","interhash":"fcef6503b010a52843463bfcd0659f26","intrahash":"8347fdf79859f673a70708de1d1b68ae","keywords":"dblp","publisher":"OpenReview.net","timestamp":"2019-07-26T11:43:51.000+0200","title":"Large scale distributed neural network training through online distillation.","url":"http://dblp.uni-trier.de/db/conf/iclr/iclr2018.html#AnilPPODH18","year":"2018","bibtex":"@inproceedings{conf/iclr/AnilPPODH18,\n added-at = {2019-07-25T00:00:00.000+0200},\n author = {Anil, Rohan and Pereyra, Gabriel and Passos, Alexandre and Ormándi, Róbert and Dahl, George E. and Hinton, Geoffrey E.},\n biburl = {https://www.bibsonomy.org/bibtex/28347fdf79859f673a70708de1d1b68ae/dblp},\n booktitle = {ICLR (Poster)},\n crossref = {conf/iclr/2018},\n ee = {https://openreview.net/forum?id=rkr1UDeC-},\n interhash = {fcef6503b010a52843463bfcd0659f26},\n intrahash = {8347fdf79859f673a70708de1d1b68ae},\n keywords = {dblp},\n publisher = {OpenReview.net},\n timestamp = {2019-07-26T11:43:51.000+0200},\n title = {Large scale distributed neural network training through online distillation.},\n url = {http://dblp.uni-trier.de/db/conf/iclr/iclr2018.html#AnilPPODH18},\n year = 2018\n}\n\n","author_short":["Anil, R.","Pereyra, G.","Passos, A.","Ormándi, R.","Dahl, G. E.","Hinton, G. E."],"key":"conf/iclr/AnilPPODH18","id":"conf/iclr/AnilPPODH18","bibbaseid":"anil-pereyra-passos-ormndi-dahl-hinton-largescaledistributedneuralnetworktrainingthroughonlinedistillation-2018","role":"author","urls":{"Link":"https://openreview.net/forum?id=rkr1UDeC-","Paper":"http://dblp.uni-trier.de/db/conf/iclr/iclr2018.html#AnilPPODH18"},"keyword":["dblp"],"downloads":0},"bibtype":"inproceedings","biburl":"http://www.bibsonomy.org/bib/author/Geoffrey Hinton?items=1000","creationDate":"2020-07-24T18:21:29.161Z","downloads":0,"keywords":["dblp"],"search_terms":["large","scale","distributed","neural","network","training","through","online","distillation","anil","pereyra","passos","ormándi","dahl","hinton"],"title":"Large scale distributed neural network training through online distillation.","year":2018,"dataSources":["9qjpnLCP4efAcKjDr"]}