An overview of gradient descent optimization algorithms. Ruder, S. arXiv:1609.04747 [cs], June, 2017. arXiv: 1609.04747
Paper abstract bibtex Gradient descent optimization algorithms, while increasingly popular, are often used as black-box optimizers, as practical explanations of their strengths and weaknesses are hard to come by. This article aims to provide the reader with intuitions with regard to the behaviour of different algorithms that will allow her to put them to use. In the course of this overview, we look at different variants of gradient descent, summarize challenges, introduce the most common optimization algorithms, review architectures in a parallel and distributed setting, and investigate additional strategies for optimizing gradient descent.
@article{ruder_overview_2017,
title = {An overview of gradient descent optimization algorithms},
url = {http://arxiv.org/abs/1609.04747},
abstract = {Gradient descent optimization algorithms, while increasingly popular, are often used as black-box optimizers, as practical explanations of their strengths and weaknesses are hard to come by. This article aims to provide the reader with intuitions with regard to the behaviour of different algorithms that will allow her to put them to use. In the course of this overview, we look at different variants of gradient descent, summarize challenges, introduce the most common optimization algorithms, review architectures in a parallel and distributed setting, and investigate additional strategies for optimizing gradient descent.},
language = {en},
urldate = {2020-05-15},
journal = {arXiv:1609.04747 [cs]},
author = {Ruder, Sebastian},
month = jun,
year = {2017},
note = {arXiv: 1609.04747},
keywords = {Computer Science - Machine Learning, ⛔ No DOI found},
}
Downloads: 0
{"_id":"cNhFGwY5sCxKSeThE","bibbaseid":"ruder-anoverviewofgradientdescentoptimizationalgorithms-2017","author_short":["Ruder, S."],"bibdata":{"bibtype":"article","type":"article","title":"An overview of gradient descent optimization algorithms","url":"http://arxiv.org/abs/1609.04747","abstract":"Gradient descent optimization algorithms, while increasingly popular, are often used as black-box optimizers, as practical explanations of their strengths and weaknesses are hard to come by. This article aims to provide the reader with intuitions with regard to the behaviour of different algorithms that will allow her to put them to use. In the course of this overview, we look at different variants of gradient descent, summarize challenges, introduce the most common optimization algorithms, review architectures in a parallel and distributed setting, and investigate additional strategies for optimizing gradient descent.","language":"en","urldate":"2020-05-15","journal":"arXiv:1609.04747 [cs]","author":[{"propositions":[],"lastnames":["Ruder"],"firstnames":["Sebastian"],"suffixes":[]}],"month":"June","year":"2017","note":"arXiv: 1609.04747","keywords":"Computer Science - Machine Learning, ⛔ No DOI found","bibtex":"@article{ruder_overview_2017,\n\ttitle = {An overview of gradient descent optimization algorithms},\n\turl = {http://arxiv.org/abs/1609.04747},\n\tabstract = {Gradient descent optimization algorithms, while increasingly popular, are often used as black-box optimizers, as practical explanations of their strengths and weaknesses are hard to come by. This article aims to provide the reader with intuitions with regard to the behaviour of different algorithms that will allow her to put them to use. In the course of this overview, we look at different variants of gradient descent, summarize challenges, introduce the most common optimization algorithms, review architectures in a parallel and distributed setting, and investigate additional strategies for optimizing gradient descent.},\n\tlanguage = {en},\n\turldate = {2020-05-15},\n\tjournal = {arXiv:1609.04747 [cs]},\n\tauthor = {Ruder, Sebastian},\n\tmonth = jun,\n\tyear = {2017},\n\tnote = {arXiv: 1609.04747},\n\tkeywords = {Computer Science - Machine Learning, ⛔ No DOI found},\n}\n\n\n\n","author_short":["Ruder, S."],"key":"ruder_overview_2017","id":"ruder_overview_2017","bibbaseid":"ruder-anoverviewofgradientdescentoptimizationalgorithms-2017","role":"author","urls":{"Paper":"http://arxiv.org/abs/1609.04747"},"keyword":["Computer Science - Machine Learning","⛔ No DOI found"],"metadata":{"authorlinks":{}},"html":""},"bibtype":"article","biburl":"https://bibbase.org/zotero/apolea","dataSources":["CmHEoydhafhbkXXt5","f9QNc6eirHM3cmhSh"],"keywords":["computer science - machine learning","⛔ no doi found"],"search_terms":["overview","gradient","descent","optimization","algorithms","ruder"],"title":"An overview of gradient descent optimization algorithms","year":2017}