Deep Learning at Scale. Viviani, P., Drocco, M., Baccega, D., Colonnelli, I., & Aldinucci, M. In Proc. of 27th Euromicro Intl. Conference on Parallel Distributed and network-based Processing (PDP), pages 124–131, Pavia, Italy, 2019. IEEE. Paper doi abstract bibtex 1 download This work presents a novel approach to distributed training of deep neural networks (DNNs) that aims to overcome the issues related to mainstream approaches to data parallel training. Established techniques for data parallel training are discussed from both a parallel computing and deep learning perspective, then a different approach is presented that is meant to allow DNN training to scale while retaining good convergence properties. Moreover, an experimental implementation is presented as well as some preliminary results.
@inproceedings{19:deeplearn:pdp,
title = {Deep Learning at Scale},
author = {Paolo Viviani and Maurizio Drocco and Daniele Baccega and Iacopo Colonnelli and Marco Aldinucci},
year = 2019,
booktitle = {Proc. of 27th Euromicro Intl. Conference on Parallel Distributed and network-based Processing (PDP)},
publisher = {IEEE},
address = {Pavia, Italy},
pages = {124--131},
doi = {10.1109/EMPDP.2019.8671552},
url = {https://iris.unito.it/retrieve/handle/2318/1695211/487778/19_deeplearning_PDP.pdf},
abstract = {This work presents a novel approach to distributed training of deep neural networks (DNNs) that aims to overcome the issues related to mainstream approaches to data parallel training. Established techniques for data parallel training are discussed from both a parallel computing and deep learning perspective, then a different approach is presented that is meant to allow DNN training to scale while retaining good convergence properties. Moreover, an experimental implementation is presented as well as some preliminary results.},
date-modified = {2019-03-22 22:49:35 +0100},
keywords = {deep learning, distributed computing, machine learning, large scale, C++},
bdsk-url-1 = {https://iris.unito.it/retrieve/handle/2318/1695211/487778/19_deeplearning_PDP.pdf}
}
Downloads: 1
{"_id":"2NoQphfLMcJQDumzj","bibbaseid":"viviani-drocco-baccega-colonnelli-aldinucci-deeplearningatscale-2019","authorIDs":["5bb4c6bfa3c8aa100000009b","5e56a10de177dede0100003d","5e57aa4a041daade01000117","5e57e56ae391bbde01000188","6N3SwThRx9K5KAAin","bCc4amuPNTGgofXzr","q2TGqs9bcGBtp2e9n","tk6WiXXRRtqShqqSB","x5LAG7RBh2PAecafG"],"author_short":["Viviani, P.","Drocco, M.","Baccega, D.","Colonnelli, I.","Aldinucci, M."],"bibdata":{"bibtype":"inproceedings","type":"inproceedings","title":"Deep Learning at Scale","author":[{"firstnames":["Paolo"],"propositions":[],"lastnames":["Viviani"],"suffixes":[]},{"firstnames":["Maurizio"],"propositions":[],"lastnames":["Drocco"],"suffixes":[]},{"firstnames":["Daniele"],"propositions":[],"lastnames":["Baccega"],"suffixes":[]},{"firstnames":["Iacopo"],"propositions":[],"lastnames":["Colonnelli"],"suffixes":[]},{"firstnames":["Marco"],"propositions":[],"lastnames":["Aldinucci"],"suffixes":[]}],"year":"2019","booktitle":"Proc. of 27th Euromicro Intl. Conference on Parallel Distributed and network-based Processing (PDP)","publisher":"IEEE","address":"Pavia, Italy","pages":"124–131","doi":"10.1109/EMPDP.2019.8671552","url":"https://iris.unito.it/retrieve/handle/2318/1695211/487778/19_deeplearning_PDP.pdf","abstract":"This work presents a novel approach to distributed training of deep neural networks (DNNs) that aims to overcome the issues related to mainstream approaches to data parallel training. Established techniques for data parallel training are discussed from both a parallel computing and deep learning perspective, then a different approach is presented that is meant to allow DNN training to scale while retaining good convergence properties. Moreover, an experimental implementation is presented as well as some preliminary results.","date-modified":"2019-03-22 22:49:35 +0100","keywords":"deep learning, distributed computing, machine learning, large scale, C++","bdsk-url-1":"https://iris.unito.it/retrieve/handle/2318/1695211/487778/19_deeplearning_PDP.pdf","bibtex":"@inproceedings{19:deeplearn:pdp,\r\n title = {Deep Learning at Scale},\r\n author = {Paolo Viviani and Maurizio Drocco and Daniele Baccega and Iacopo Colonnelli and Marco Aldinucci},\r\n year = 2019,\r\n booktitle = {Proc. of 27th Euromicro Intl. Conference on Parallel Distributed and network-based Processing (PDP)},\r\n publisher = {IEEE},\r\n address = {Pavia, Italy},\r\n pages = {124--131},\r\n doi = {10.1109/EMPDP.2019.8671552},\r\n url = {https://iris.unito.it/retrieve/handle/2318/1695211/487778/19_deeplearning_PDP.pdf},\r\n abstract = {This work presents a novel approach to distributed training of deep neural networks (DNNs) that aims to overcome the issues related to mainstream approaches to data parallel training. Established techniques for data parallel training are discussed from both a parallel computing and deep learning perspective, then a different approach is presented that is meant to allow DNN training to scale while retaining good convergence properties. Moreover, an experimental implementation is presented as well as some preliminary results.},\r\n date-modified = {2019-03-22 22:49:35 +0100},\r\n keywords = {deep learning, distributed computing, machine learning, large scale, C++},\r\n bdsk-url-1 = {https://iris.unito.it/retrieve/handle/2318/1695211/487778/19_deeplearning_PDP.pdf}\r\n}\r\n","author_short":["Viviani, P.","Drocco, M.","Baccega, D.","Colonnelli, I.","Aldinucci, M."],"key":"19:deeplearn:pdp","id":"19:deeplearn:pdp","bibbaseid":"viviani-drocco-baccega-colonnelli-aldinucci-deeplearningatscale-2019","role":"author","urls":{"Paper":"https://iris.unito.it/retrieve/handle/2318/1695211/487778/19_deeplearning_PDP.pdf"},"keyword":["deep learning","distributed computing","machine learning","large scale","C++"],"metadata":{"authorlinks":{"viviani, p":"https://bibbase.org/show?bib=https://bibbase.org/network/files/SXHKYtfTv3ba9BmDx&msg=preview&fileId=SXHKYtfTv3ba9BmDx"}},"downloads":1},"bibtype":"inproceedings","biburl":"https://raw.githubusercontent.com/paoloviviani/bibliography/master/viviani.bib","creationDate":"2019-11-27T10:14:36.998Z","downloads":1,"keywords":["deep learning","distributed computing","machine learning","large scale","c++"],"search_terms":["deep","learning","scale","viviani","drocco","baccega","colonnelli","aldinucci"],"title":"Deep Learning at Scale","year":2019,"dataSources":["XCfvaPF2g4xqh898E","FtNJan832qddKbHDz","WR8eprAvs6fBPvjR2","ujpKmnnjrbovxsYJh","bnaxeHFm2N3eaGmGd","cqD6ryMyGP3TmcjSu","JbC4F4KbYHD7sX85r"]}