Automatic differentiation in machine learning: a survey. Baydin, A. G., Pearlmutter, B. A., Radul, A. A., & Siskind, J. M. 2015. cite arxiv:1502.05767Comment: 43 pages, 5 figuresPaper abstract bibtex Derivatives, mostly in the form of gradients and Hessians, are ubiquitous in machine learning. Automatic differentiation (AD), also called algorithmic differentiation or simply "autodiff", is a family of techniques similar to but more general than backpropagation for efficiently and accurately evaluating derivatives of numeric functions expressed as computer programs. AD is a small but established field with applications in areas including computational fluid dynamics, atmospheric sciences, and engineering design optimization. Until very recently, the fields of machine learning and AD have largely been unaware of each other and, in some cases, have independently discovered each other's results. Despite its relevance, general-purpose AD has been missing from the machine learning toolbox, a situation slowly changing with its ongoing adoption under the names "dynamic computational graphs" and "differentiable programming". We survey the intersection of AD and machine learning, cover applications where AD has direct relevance, and address the main implementation techniques. By precisely defining the main differentiation techniques and their interrelationships, we aim to bring clarity to the usage of the terms "autodiff", "automatic differentiation", and "symbolic differentiation" as these are encountered more and more in machine learning settings.
@misc{baydin2015automatic,
abstract = {Derivatives, mostly in the form of gradients and Hessians, are ubiquitous in
machine learning. Automatic differentiation (AD), also called algorithmic
differentiation or simply "autodiff", is a family of techniques similar to but
more general than backpropagation for efficiently and accurately evaluating
derivatives of numeric functions expressed as computer programs. AD is a small
but established field with applications in areas including computational fluid
dynamics, atmospheric sciences, and engineering design optimization. Until very
recently, the fields of machine learning and AD have largely been unaware of
each other and, in some cases, have independently discovered each other's
results. Despite its relevance, general-purpose AD has been missing from the
machine learning toolbox, a situation slowly changing with its ongoing adoption
under the names "dynamic computational graphs" and "differentiable
programming". We survey the intersection of AD and machine learning, cover
applications where AD has direct relevance, and address the main implementation
techniques. By precisely defining the main differentiation techniques and their
interrelationships, we aim to bring clarity to the usage of the terms
"autodiff", "automatic differentiation", and "symbolic differentiation" as
these are encountered more and more in machine learning settings.},
added-at = {2018-06-26T08:30:34.000+0200},
author = {Baydin, Atilim Gunes and Pearlmutter, Barak A. and Radul, Alexey Andreyevich and Siskind, Jeffrey Mark},
biburl = {https://www.bibsonomy.org/bibtex/2edc58f693ea8957b7976b4c1fb6bb331/achakraborty},
description = {[1502.05767] Automatic differentiation in machine learning: a survey},
interhash = {6622f93bca54363f6b17d3dfd83da195},
intrahash = {edc58f693ea8957b7976b4c1fb6bb331},
keywords = {deep-learning paper 2015 pytorch symbolic-computation arxiv calculus},
note = {cite arxiv:1502.05767Comment: 43 pages, 5 figures},
timestamp = {2018-06-26T08:30:34.000+0200},
title = {Automatic differentiation in machine learning: a survey},
url = {http://arxiv.org/abs/1502.05767},
year = 2015
}
Downloads: 0
{"_id":"v9Ryui9FQKmTY3m9a","bibbaseid":"baydin-pearlmutter-radul-siskind-automaticdifferentiationinmachinelearningasurvey-2015","author_short":["Baydin, A. G.","Pearlmutter, B. A.","Radul, A. A.","Siskind, J. M."],"bibdata":{"bibtype":"misc","type":"misc","abstract":"Derivatives, mostly in the form of gradients and Hessians, are ubiquitous in machine learning. Automatic differentiation (AD), also called algorithmic differentiation or simply \"autodiff\", is a family of techniques similar to but more general than backpropagation for efficiently and accurately evaluating derivatives of numeric functions expressed as computer programs. AD is a small but established field with applications in areas including computational fluid dynamics, atmospheric sciences, and engineering design optimization. Until very recently, the fields of machine learning and AD have largely been unaware of each other and, in some cases, have independently discovered each other's results. Despite its relevance, general-purpose AD has been missing from the machine learning toolbox, a situation slowly changing with its ongoing adoption under the names \"dynamic computational graphs\" and \"differentiable programming\". We survey the intersection of AD and machine learning, cover applications where AD has direct relevance, and address the main implementation techniques. By precisely defining the main differentiation techniques and their interrelationships, we aim to bring clarity to the usage of the terms \"autodiff\", \"automatic differentiation\", and \"symbolic differentiation\" as these are encountered more and more in machine learning settings.","added-at":"2018-06-26T08:30:34.000+0200","author":[{"propositions":[],"lastnames":["Baydin"],"firstnames":["Atilim","Gunes"],"suffixes":[]},{"propositions":[],"lastnames":["Pearlmutter"],"firstnames":["Barak","A."],"suffixes":[]},{"propositions":[],"lastnames":["Radul"],"firstnames":["Alexey","Andreyevich"],"suffixes":[]},{"propositions":[],"lastnames":["Siskind"],"firstnames":["Jeffrey","Mark"],"suffixes":[]}],"biburl":"https://www.bibsonomy.org/bibtex/2edc58f693ea8957b7976b4c1fb6bb331/achakraborty","description":"[1502.05767] Automatic differentiation in machine learning: a survey","interhash":"6622f93bca54363f6b17d3dfd83da195","intrahash":"edc58f693ea8957b7976b4c1fb6bb331","keywords":"deep-learning paper 2015 pytorch symbolic-computation arxiv calculus","note":"cite arxiv:1502.05767Comment: 43 pages, 5 figures","timestamp":"2018-06-26T08:30:34.000+0200","title":"Automatic differentiation in machine learning: a survey","url":"http://arxiv.org/abs/1502.05767","year":"2015","bibtex":"@misc{baydin2015automatic,\n abstract = {Derivatives, mostly in the form of gradients and Hessians, are ubiquitous in\r\nmachine learning. Automatic differentiation (AD), also called algorithmic\r\ndifferentiation or simply \"autodiff\", is a family of techniques similar to but\r\nmore general than backpropagation for efficiently and accurately evaluating\r\nderivatives of numeric functions expressed as computer programs. AD is a small\r\nbut established field with applications in areas including computational fluid\r\ndynamics, atmospheric sciences, and engineering design optimization. Until very\r\nrecently, the fields of machine learning and AD have largely been unaware of\r\neach other and, in some cases, have independently discovered each other's\r\nresults. Despite its relevance, general-purpose AD has been missing from the\r\nmachine learning toolbox, a situation slowly changing with its ongoing adoption\r\nunder the names \"dynamic computational graphs\" and \"differentiable\r\nprogramming\". We survey the intersection of AD and machine learning, cover\r\napplications where AD has direct relevance, and address the main implementation\r\ntechniques. By precisely defining the main differentiation techniques and their\r\ninterrelationships, we aim to bring clarity to the usage of the terms\r\n\"autodiff\", \"automatic differentiation\", and \"symbolic differentiation\" as\r\nthese are encountered more and more in machine learning settings.},\n added-at = {2018-06-26T08:30:34.000+0200},\n author = {Baydin, Atilim Gunes and Pearlmutter, Barak A. and Radul, Alexey Andreyevich and Siskind, Jeffrey Mark},\n biburl = {https://www.bibsonomy.org/bibtex/2edc58f693ea8957b7976b4c1fb6bb331/achakraborty},\n description = {[1502.05767] Automatic differentiation in machine learning: a survey},\n interhash = {6622f93bca54363f6b17d3dfd83da195},\n intrahash = {edc58f693ea8957b7976b4c1fb6bb331},\n keywords = {deep-learning paper 2015 pytorch symbolic-computation arxiv calculus},\n note = {cite arxiv:1502.05767Comment: 43 pages, 5 figures},\n timestamp = {2018-06-26T08:30:34.000+0200},\n title = {Automatic differentiation in machine learning: a survey},\n url = {http://arxiv.org/abs/1502.05767},\n year = 2015\n}\n\n","author_short":["Baydin, A. G.","Pearlmutter, B. A.","Radul, A. A.","Siskind, J. M."],"key":"baydin2015automatic-1","id":"baydin2015automatic-1","bibbaseid":"baydin-pearlmutter-radul-siskind-automaticdifferentiationinmachinelearningasurvey-2015","role":"author","urls":{"Paper":"http://arxiv.org/abs/1502.05767"},"keyword":["deep-learning paper 2015 pytorch symbolic-computation arxiv calculus"],"metadata":{"authorlinks":{}}},"bibtype":"misc","biburl":"http://www.bibsonomy.org/bib/author/barak ?items=1000","dataSources":["sNDcqxpX5cBQ5Pu7G"],"keywords":["deep-learning paper 2015 pytorch symbolic-computation arxiv calculus"],"search_terms":["automatic","differentiation","machine","learning","survey","baydin","pearlmutter","radul","siskind"],"title":"Automatic differentiation in machine learning: a survey","year":2015}