Fixed-form variational posterior approximation through stochastic linear regression. Salimans, T. & Knowles, D. A. *Bayesian Analysis*, 8(4):837–882, International Society for Bayesian Analysis, 2013. Winner of the Lindley Prize!Paper doi abstract bibtex We propose a general algorithm for approximating nonstandard Bayesian posterior distributions. The algorithm minimizes the Kullback-Leibler divergence of an approximating distribution to the intractable posterior distribution. Our method can be used to approximate any posterior distribution, provided that it is given in closed form up to the proportionality constant. The approximation can be any distribution in the exponential family or any mixture of such distributions, which means that it can be made arbitrarily precise. Several examples illustrate the speed and accuracy of our approximation method in practice.

@article{salimans2013,
Abstract = {We propose a general algorithm for approximating nonstandard Bayesian posterior distributions. The algorithm minimizes the Kullback-Leibler divergence of an approximating distribution to the intractable posterior distribution. Our method can be used to approximate any posterior distribution, provided that it is given in closed form up to the proportionality constant. The approximation can be any distribution in the exponential family or any mixture of such distributions, which means that it can be made arbitrarily precise. Several examples illustrate the speed and accuracy of our approximation method in practice.},
Archiveprefix = {arXiv},
Arxivid = {1206.6679},
Author = {Salimans, Tim and Knowles, David A.},
Bibbase_Note = {<span style="color: green">Winner of the Lindley Prize!</span>},
Doi = {10.1214/13-BA858},
Eprint = {1206.6679},
Isbn = {1401.2135},
Issn = {19360975},
Journal = {Bayesian Analysis},
Keywords = {Machine Learning/Statistics},
Number = {4},
Pages = {837--882},
Publisher = {International Society for Bayesian Analysis},
Title = {{Fixed-form variational posterior approximation through stochastic linear regression}},
Url = {http://projecteuclid.org/euclid.ba/1386166315},
Volume = {8},
Year = {2013}}

Downloads: 0

{"_id":"3CBWj4vrjJQngGeTp","bibbaseid":"salimans-knowles-fixedformvariationalposteriorapproximationthroughstochasticlinearregression-2013","downloads":0,"creationDate":"2017-05-18T02:00:25.667Z","title":"Fixed-form variational posterior approximation through stochastic linear regression","author_short":["Salimans, T.","Knowles, D. A."],"year":2013,"bibtype":"article","biburl":"http://cs.stanford.edu/people/davidknowles/reordered.bib","bibdata":{"bibtype":"article","type":"article","abstract":"We propose a general algorithm for approximating nonstandard Bayesian posterior distributions. The algorithm minimizes the Kullback-Leibler divergence of an approximating distribution to the intractable posterior distribution. Our method can be used to approximate any posterior distribution, provided that it is given in closed form up to the proportionality constant. The approximation can be any distribution in the exponential family or any mixture of such distributions, which means that it can be made arbitrarily precise. Several examples illustrate the speed and accuracy of our approximation method in practice.","archiveprefix":"arXiv","arxivid":"1206.6679","author":[{"propositions":[],"lastnames":["Salimans"],"firstnames":["Tim"],"suffixes":[]},{"propositions":[],"lastnames":["Knowles"],"firstnames":["David","A."],"suffixes":[]}],"bibbase_note":"<span style=\"color: green\">Winner of the Lindley Prize!</span>","doi":"10.1214/13-BA858","eprint":"1206.6679","isbn":"1401.2135","issn":"19360975","journal":"Bayesian Analysis","keywords":"Machine Learning/Statistics","number":"4","pages":"837–882","publisher":"International Society for Bayesian Analysis","title":"Fixed-form variational posterior approximation through stochastic linear regression","url":"http://projecteuclid.org/euclid.ba/1386166315","volume":"8","year":"2013","bibtex":"@article{salimans2013,\n\tAbstract = {We propose a general algorithm for approximating nonstandard Bayesian posterior distributions. The algorithm minimizes the Kullback-Leibler divergence of an approximating distribution to the intractable posterior distribution. Our method can be used to approximate any posterior distribution, provided that it is given in closed form up to the proportionality constant. The approximation can be any distribution in the exponential family or any mixture of such distributions, which means that it can be made arbitrarily precise. Several examples illustrate the speed and accuracy of our approximation method in practice.},\n\tArchiveprefix = {arXiv},\n\tArxivid = {1206.6679},\n\tAuthor = {Salimans, Tim and Knowles, David A.},\n\tBibbase_Note = {<span style=\"color: green\">Winner of the Lindley Prize!</span>},\n\tDoi = {10.1214/13-BA858},\n\tEprint = {1206.6679},\n\tIsbn = {1401.2135},\n\tIssn = {19360975},\n\tJournal = {Bayesian Analysis},\n\tKeywords = {Machine Learning/Statistics},\n\tNumber = {4},\n\tPages = {837--882},\n\tPublisher = {International Society for Bayesian Analysis},\n\tTitle = {{Fixed-form variational posterior approximation through stochastic linear regression}},\n\tUrl = {http://projecteuclid.org/euclid.ba/1386166315},\n\tVolume = {8},\n\tYear = {2013}}\n\n","author_short":["Salimans, T.","Knowles, D. A."],"key":"salimans2013","id":"salimans2013","bibbaseid":"salimans-knowles-fixedformvariationalposteriorapproximationthroughstochasticlinearregression-2013","role":"author","urls":{"Paper":"http://projecteuclid.org/euclid.ba/1386166315"},"keyword":["Machine Learning/Statistics"],"downloads":0},"search_terms":["fixed","form","variational","posterior","approximation","through","stochastic","linear","regression","salimans","knowles"],"keywords":["approximate inference","stochastic approximation","variational bayes","approximate inference","stochastic approximation","variational bayes","machine learning","statistics","biology","genetics","machine learning/statistics"],"authorIDs":[],"dataSources":["E5kTWRuqMhy8QxJbW"]}