GRADIENT CONVERGENCE IN GRADIENT METHODS. Bertsekas, D. P & Tsitsiklis, J. N . Introduction. abstract bibtex For the classical gradient method Xt+l = xt - -ytVf(xt) and several deterministic and stochastic variants, we discuss the issue of convergence of the gradient sequence Vf(xt) and the attendant issue of stationarity of limit points of xt. W;"e assume that Vf is Lipschitz continuous, and that the stepsize at diminishes to 0 and satisfies standard stochastic approximation conditions. We show that either f(xt) - -oo or else f(xt) converges to a finite value and Vf(.t) – 0 (with probability 1 in the stochastic case). Existing results assume various boundedness conditions such as boundedness from below of f, or boundedness of Vf(xt), or boundedness of Xt.
@article{bertsekas_gradient_nodate,
title = {{GRADIENT} {CONVERGENCE} {IN} {GRADIENT} {METHODS}},
abstract = {For the classical gradient method Xt+l = xt - -ytVf(xt) and several deterministic and stochastic variants, we discuss the issue of convergence of the gradient sequence Vf(xt) and the attendant issue of stationarity of limit points of xt. W;"e assume that Vf is Lipschitz continuous, and that the stepsize at diminishes to 0 and satisfies standard stochastic approximation conditions. We show that either f(xt) - -oo or else f(xt) converges to a finite value and Vf(.t) -- 0 (with probability 1 in the stochastic case). Existing results assume various boundedness conditions such as boundedness from below of f, or boundedness of Vf(xt), or boundedness of Xt.},
language = {en},
journal = {. Introduction},
author = {Bertsekas, Dimitri P and Tsitsiklis, John N},
pages = {24}
}
Downloads: 0
{"_id":"Y8cdLZRyCiHSFAvHP","bibbaseid":"bertsekas-tsitsiklis-gradientconvergenceingradientmethods","authorIDs":[],"author_short":["Bertsekas, D. P","Tsitsiklis, J. N"],"bibdata":{"bibtype":"article","type":"article","title":"GRADIENT CONVERGENCE IN GRADIENT METHODS","abstract":"For the classical gradient method Xt+l = xt - -ytVf(xt) and several deterministic and stochastic variants, we discuss the issue of convergence of the gradient sequence Vf(xt) and the attendant issue of stationarity of limit points of xt. W;\"e assume that Vf is Lipschitz continuous, and that the stepsize at diminishes to 0 and satisfies standard stochastic approximation conditions. We show that either f(xt) - -oo or else f(xt) converges to a finite value and Vf(.t) – 0 (with probability 1 in the stochastic case). Existing results assume various boundedness conditions such as boundedness from below of f, or boundedness of Vf(xt), or boundedness of Xt.","language":"en","journal":". Introduction","author":[{"propositions":[],"lastnames":["Bertsekas"],"firstnames":["Dimitri","P"],"suffixes":[]},{"propositions":[],"lastnames":["Tsitsiklis"],"firstnames":["John","N"],"suffixes":[]}],"pages":"24","bibtex":"@article{bertsekas_gradient_nodate,\n\ttitle = {{GRADIENT} {CONVERGENCE} {IN} {GRADIENT} {METHODS}},\n\tabstract = {For the classical gradient method Xt+l = xt - -ytVf(xt) and several deterministic and stochastic variants, we discuss the issue of convergence of the gradient sequence Vf(xt) and the attendant issue of stationarity of limit points of xt. W;\"e assume that Vf is Lipschitz continuous, and that the stepsize at diminishes to 0 and satisfies standard stochastic approximation conditions. We show that either f(xt) - -oo or else f(xt) converges to a finite value and Vf(.t) -- 0 (with probability 1 in the stochastic case). Existing results assume various boundedness conditions such as boundedness from below of f, or boundedness of Vf(xt), or boundedness of Xt.},\n\tlanguage = {en},\n\tjournal = {. Introduction},\n\tauthor = {Bertsekas, Dimitri P and Tsitsiklis, John N},\n\tpages = {24}\n}\n\n","author_short":["Bertsekas, D. P","Tsitsiklis, J. N"],"key":"bertsekas_gradient_nodate","id":"bertsekas_gradient_nodate","bibbaseid":"bertsekas-tsitsiklis-gradientconvergenceingradientmethods","role":"author","urls":{},"downloads":0,"html":""},"bibtype":"article","biburl":"https://bibbase.org/zotero/asneha213","creationDate":"2019-06-06T20:57:45.714Z","downloads":0,"keywords":[],"search_terms":["gradient","convergence","gradient","methods","bertsekas","tsitsiklis"],"title":"GRADIENT CONVERGENCE IN GRADIENT METHODS","year":null,"dataSources":["fjacg9txEnNSDwee6"]}