Adaptive Subgradient Methods for Online Learning and Stochastic Optimization. Duchi, J., Hazan, E., & Singer, Y. The Journal of Machine Learning Research, 12(null):2121-2159, 7, 2011. Paper abstract bibtex We present a new family of subgradient methods that dynamically incorporate knowledge of the geometry of the data observed in earlier iterations to perform more informative gradient-based learning. Metaphorically, the adaptation allows us to find needles in haystacks in the form of very predictive but rarely seen features. Our paradigm stems from recent advances in stochastic optimization and online learning which employ proximal functions to control the gradient steps of the algorithm. We describe and analyze an apparatus for adaptively modifying the proximal function, which significantly simplifies setting a learning rate and results in regret guarantees that are provably as good as the best proximal function that can be chosen in hindsight. We give several efficient algorithms for empirical risk minimization problems with common and important regularization functions and domain constraints. We experimentally study our theoretical analysis and show that adaptive subgradient methods outperform state-of-the-art, yet non-adaptive, subgradient algorithms.
@article{
title = {Adaptive Subgradient Methods for Online Learning and Stochastic Optimization},
type = {article},
year = {2011},
pages = {2121-2159},
volume = {12},
month = {7},
id = {4e10f41a-7dd6-3b12-9f35-f31cc2767a04},
created = {2022-03-28T09:45:03.734Z},
file_attached = {true},
profile_id = {235249c2-3ed4-314a-b309-b1ea0330f5d9},
group_id = {1ff583c0-be37-34fa-9c04-73c69437d354},
last_modified = {2022-03-29T08:06:10.997Z},
read = {false},
starred = {false},
authored = {false},
confirmed = {true},
hidden = {false},
citation_key = {duchiAdaptiveSubgradientMethods2011},
source_type = {article},
private_publication = {false},
abstract = {We present a new family of subgradient methods that dynamically incorporate knowledge of the geometry of the data observed in earlier iterations to perform more informative gradient-based learning. Metaphorically, the adaptation allows us to find needles in haystacks in the form of very predictive but rarely seen features. Our paradigm stems from recent advances in stochastic optimization and online learning which employ proximal functions to control the gradient steps of the algorithm. We describe and analyze an apparatus for adaptively modifying the proximal function, which significantly simplifies setting a learning rate and results in regret guarantees that are provably as good as the best proximal function that can be chosen in hindsight. We give several efficient algorithms for empirical risk minimization problems with common and important regularization functions and domain constraints. We experimentally study our theoretical analysis and show that adaptive subgradient methods outperform state-of-the-art, yet non-adaptive, subgradient algorithms.},
bibtype = {article},
author = {Duchi, John and Hazan, Elad and Singer, Yoram},
journal = {The Journal of Machine Learning Research},
number = {null}
}
Downloads: 0
{"_id":"oCsiLL8hQc6CMvHcw","bibbaseid":"duchi-hazan-singer-adaptivesubgradientmethodsforonlinelearningandstochasticoptimization-2011","downloads":0,"creationDate":"2017-11-27T01:11:35.889Z","title":"Adaptive Subgradient Methods for Online Learning and Stochastic Optimization","author_short":["Duchi, J.","Hazan, E.","Singer, Y."],"year":2011,"bibtype":"article","biburl":"https://bibbase.org/service/mendeley/bfbbf840-4c42-3914-a463-19024f50b30c","bibdata":{"title":"Adaptive Subgradient Methods for Online Learning and Stochastic Optimization","type":"article","year":"2011","pages":"2121-2159","volume":"12","month":"7","id":"4e10f41a-7dd6-3b12-9f35-f31cc2767a04","created":"2022-03-28T09:45:03.734Z","file_attached":"true","profile_id":"235249c2-3ed4-314a-b309-b1ea0330f5d9","group_id":"1ff583c0-be37-34fa-9c04-73c69437d354","last_modified":"2022-03-29T08:06:10.997Z","read":false,"starred":false,"authored":false,"confirmed":"true","hidden":false,"citation_key":"duchiAdaptiveSubgradientMethods2011","source_type":"article","private_publication":false,"abstract":"We present a new family of subgradient methods that dynamically incorporate knowledge of the geometry of the data observed in earlier iterations to perform more informative gradient-based learning. Metaphorically, the adaptation allows us to find needles in haystacks in the form of very predictive but rarely seen features. Our paradigm stems from recent advances in stochastic optimization and online learning which employ proximal functions to control the gradient steps of the algorithm. We describe and analyze an apparatus for adaptively modifying the proximal function, which significantly simplifies setting a learning rate and results in regret guarantees that are provably as good as the best proximal function that can be chosen in hindsight. We give several efficient algorithms for empirical risk minimization problems with common and important regularization functions and domain constraints. We experimentally study our theoretical analysis and show that adaptive subgradient methods outperform state-of-the-art, yet non-adaptive, subgradient algorithms.","bibtype":"article","author":"Duchi, John and Hazan, Elad and Singer, Yoram","journal":"The Journal of Machine Learning Research","number":"null","bibtex":"@article{\n title = {Adaptive Subgradient Methods for Online Learning and Stochastic Optimization},\n type = {article},\n year = {2011},\n pages = {2121-2159},\n volume = {12},\n month = {7},\n id = {4e10f41a-7dd6-3b12-9f35-f31cc2767a04},\n created = {2022-03-28T09:45:03.734Z},\n file_attached = {true},\n profile_id = {235249c2-3ed4-314a-b309-b1ea0330f5d9},\n group_id = {1ff583c0-be37-34fa-9c04-73c69437d354},\n last_modified = {2022-03-29T08:06:10.997Z},\n read = {false},\n starred = {false},\n authored = {false},\n confirmed = {true},\n hidden = {false},\n citation_key = {duchiAdaptiveSubgradientMethods2011},\n source_type = {article},\n private_publication = {false},\n abstract = {We present a new family of subgradient methods that dynamically incorporate knowledge of the geometry of the data observed in earlier iterations to perform more informative gradient-based learning. Metaphorically, the adaptation allows us to find needles in haystacks in the form of very predictive but rarely seen features. Our paradigm stems from recent advances in stochastic optimization and online learning which employ proximal functions to control the gradient steps of the algorithm. We describe and analyze an apparatus for adaptively modifying the proximal function, which significantly simplifies setting a learning rate and results in regret guarantees that are provably as good as the best proximal function that can be chosen in hindsight. We give several efficient algorithms for empirical risk minimization problems with common and important regularization functions and domain constraints. We experimentally study our theoretical analysis and show that adaptive subgradient methods outperform state-of-the-art, yet non-adaptive, subgradient algorithms.},\n bibtype = {article},\n author = {Duchi, John and Hazan, Elad and Singer, Yoram},\n journal = {The Journal of Machine Learning Research},\n number = {null}\n}","author_short":["Duchi, J.","Hazan, E.","Singer, Y."],"urls":{"Paper":"https://bibbase.org/service/mendeley/bfbbf840-4c42-3914-a463-19024f50b30c/file/5b8f7eb7-4e5e-6eaf-3893-ec19e6ba103e/Duchi_et_al___2011___Adaptive_Subgradient_Methods_for_Online_Learning_a.pdf.pdf"},"biburl":"https://bibbase.org/service/mendeley/bfbbf840-4c42-3914-a463-19024f50b30c","bibbaseid":"duchi-hazan-singer-adaptivesubgradientmethodsforonlinelearningandstochasticoptimization-2011","role":"author","metadata":{"authorlinks":{}},"downloads":0},"search_terms":["adaptive","subgradient","methods","online","learning","stochastic","optimization","duchi","hazan","singer"],"keywords":[],"authorIDs":[],"dataSources":["C5FtkvWWggFfMJTFX","ya2CyA73rpZseyrZ8","2252seNhipfTmjEBQ"]}