Sparse Variational Inference: Bayesian Coresets from Scratch. Campbell, T. & Beronov, B. In Conference on Neural Information Processing Systems (NeurIPS), pages 11457–11468, 2019. 1st prize, Student poster competition, AICan (Annual Meeting, Pan-Canadian AI Strategy, Canadian Institute for Advanced Research). Vancouver, Canada, Dec. 9, 2019Link Paper Poster abstract bibtex 2 downloads The proliferation of automated inference algorithms in Bayesian statistics has provided practitioners newfound access to fast, reproducible data analysis and powerful statistical models. Designing automated methods that are also both computationally scalable and theoretically sound, however, remains a significant challenge. Recent work on Bayesian coresets takes the approach of compressing the dataset before running a standard inference algorithm, providing both scalability and guarantees on posterior approximation error. But the automation of past coreset methods is limited because they depend on the availability of a reasonable coarse posterior approximation, which is difficult to specify in practice. In the present work we remove this requirement by formulating coreset construction as sparsity-constrained variational inference within an exponential family. This perspective leads to a novel construction via greedy optimization, and also provides a unifying information-geometric view of present and past methods. The proposed Riemannian coreset construction algorithm is fully automated, requiring no problem-specific inputs aside from the probabilistic model and dataset. In addition to being significantly easier to use than past methods, experiments demonstrate that past coreset constructions are fundamentally limited by the fixed coarse posterior approximation; in contrast, the proposed algorithm is able to continually improve the coreset, providing state-of-the-art Bayesian dataset summarization with orders-of-magnitude reduction in KL divergence to the exact posterior.
@inproceedings{CAM-19,
title={Sparse Variational Inference: Bayesian Coresets from Scratch},
author={Campbell, Trevor and Beronov, Boyan},
booktitle={Conference on Neural Information Processing Systems (NeurIPS)},
pages={11457--11468},
year={2019},
eid = {arXiv:1906.03329},
archivePrefix = {arXiv},
eprint = {1906.03329},
support = {D3M},
url_Link={http://papers.nips.cc/paper/9322-sparse-variational-inference-bayesian-coresets-from-scratch},
url_Paper={http://papers.nips.cc/paper/9322-sparse-variational-inference-bayesian-coresets-from-scratch.pdf},
url_Poster={https://github.com/plai-group/bibliography/raw/master/presentations_posters/CAM-19.pdf},
bibbase_note={1st prize, Student poster competition, AICan (Annual Meeting, Pan-Canadian AI Strategy, Canadian Institute for Advanced Research). Vancouver, Canada, Dec. 9, 2019},
abstract={The proliferation of automated inference algorithms in Bayesian statistics has provided practitioners newfound access to fast, reproducible data analysis and powerful statistical models. Designing automated methods that are also both computationally scalable and theoretically sound, however, remains a significant challenge. Recent work on Bayesian coresets takes the approach of compressing the dataset before running a standard inference algorithm, providing both scalability and guarantees on posterior approximation error. But the automation of past coreset methods is limited because they depend on the availability of a reasonable coarse posterior approximation, which is difficult to specify in practice. In the present work we remove this requirement by formulating coreset construction as sparsity-constrained variational inference within an exponential family. This perspective leads to a novel construction via greedy optimization, and also provides a unifying information-geometric view of present and past methods. The proposed Riemannian coreset construction algorithm is fully automated, requiring no problem-specific inputs aside from the probabilistic model and dataset. In addition to being significantly easier to use than past methods, experiments demonstrate that past coreset constructions are fundamentally limited by the fixed coarse posterior approximation; in contrast, the proposed algorithm is able to continually improve the coreset, providing state-of-the-art Bayesian dataset summarization with orders-of-magnitude reduction in KL divergence to the exact posterior.}
}
Downloads: 2
{"_id":"YsvXsLuxMmLson2tY","bibbaseid":"campbell-beronov-sparsevariationalinferencebayesiancoresetsfromscratch-2019","authorIDs":[],"author_short":["Campbell, T.","Beronov, B."],"bibdata":{"bibtype":"inproceedings","type":"inproceedings","title":"Sparse Variational Inference: Bayesian Coresets from Scratch","author":[{"propositions":[],"lastnames":["Campbell"],"firstnames":["Trevor"],"suffixes":[]},{"propositions":[],"lastnames":["Beronov"],"firstnames":["Boyan"],"suffixes":[]}],"booktitle":"Conference on Neural Information Processing Systems (NeurIPS)","pages":"11457–11468","year":"2019","eid":"arXiv:1906.03329","archiveprefix":"arXiv","eprint":"1906.03329","support":"D3M","url_link":"http://papers.nips.cc/paper/9322-sparse-variational-inference-bayesian-coresets-from-scratch","url_paper":"http://papers.nips.cc/paper/9322-sparse-variational-inference-bayesian-coresets-from-scratch.pdf","url_poster":"https://github.com/plai-group/bibliography/raw/master/presentations_posters/CAM-19.pdf","bibbase_note":"1st prize, Student poster competition, AICan (Annual Meeting, Pan-Canadian AI Strategy, Canadian Institute for Advanced Research). Vancouver, Canada, Dec. 9, 2019","abstract":"The proliferation of automated inference algorithms in Bayesian statistics has provided practitioners newfound access to fast, reproducible data analysis and powerful statistical models. Designing automated methods that are also both computationally scalable and theoretically sound, however, remains a significant challenge. Recent work on Bayesian coresets takes the approach of compressing the dataset before running a standard inference algorithm, providing both scalability and guarantees on posterior approximation error. But the automation of past coreset methods is limited because they depend on the availability of a reasonable coarse posterior approximation, which is difficult to specify in practice. In the present work we remove this requirement by formulating coreset construction as sparsity-constrained variational inference within an exponential family. This perspective leads to a novel construction via greedy optimization, and also provides a unifying information-geometric view of present and past methods. The proposed Riemannian coreset construction algorithm is fully automated, requiring no problem-specific inputs aside from the probabilistic model and dataset. In addition to being significantly easier to use than past methods, experiments demonstrate that past coreset constructions are fundamentally limited by the fixed coarse posterior approximation; in contrast, the proposed algorithm is able to continually improve the coreset, providing state-of-the-art Bayesian dataset summarization with orders-of-magnitude reduction in KL divergence to the exact posterior.","bibtex":"@inproceedings{CAM-19,\n title={Sparse Variational Inference: Bayesian Coresets from Scratch},\n author={Campbell, Trevor and Beronov, Boyan},\n booktitle={Conference on Neural Information Processing Systems (NeurIPS)},\n pages={11457--11468},\n year={2019},\n eid = {arXiv:1906.03329},\n archivePrefix = {arXiv},\n eprint = {1906.03329},\n support = {D3M},\n url_Link={http://papers.nips.cc/paper/9322-sparse-variational-inference-bayesian-coresets-from-scratch},\n url_Paper={http://papers.nips.cc/paper/9322-sparse-variational-inference-bayesian-coresets-from-scratch.pdf},\n url_Poster={https://github.com/plai-group/bibliography/raw/master/presentations_posters/CAM-19.pdf},\n bibbase_note={1st prize, Student poster competition, AICan (Annual Meeting, Pan-Canadian AI Strategy, Canadian Institute for Advanced Research). Vancouver, Canada, Dec. 9, 2019},\n abstract={The proliferation of automated inference algorithms in Bayesian statistics has provided practitioners newfound access to fast, reproducible data analysis and powerful statistical models. Designing automated methods that are also both computationally scalable and theoretically sound, however, remains a significant challenge. Recent work on Bayesian coresets takes the approach of compressing the dataset before running a standard inference algorithm, providing both scalability and guarantees on posterior approximation error. But the automation of past coreset methods is limited because they depend on the availability of a reasonable coarse posterior approximation, which is difficult to specify in practice. In the present work we remove this requirement by formulating coreset construction as sparsity-constrained variational inference within an exponential family. This perspective leads to a novel construction via greedy optimization, and also provides a unifying information-geometric view of present and past methods. The proposed Riemannian coreset construction algorithm is fully automated, requiring no problem-specific inputs aside from the probabilistic model and dataset. In addition to being significantly easier to use than past methods, experiments demonstrate that past coreset constructions are fundamentally limited by the fixed coarse posterior approximation; in contrast, the proposed algorithm is able to continually improve the coreset, providing state-of-the-art Bayesian dataset summarization with orders-of-magnitude reduction in KL divergence to the exact posterior.}\n}\n\n","author_short":["Campbell, T.","Beronov, B."],"key":"CAM-19","id":"CAM-19","bibbaseid":"campbell-beronov-sparsevariationalinferencebayesiancoresetsfromscratch-2019","role":"author","urls":{" link":"http://papers.nips.cc/paper/9322-sparse-variational-inference-bayesian-coresets-from-scratch"," paper":"http://papers.nips.cc/paper/9322-sparse-variational-inference-bayesian-coresets-from-scratch.pdf"," poster":"https://github.com/plai-group/bibliography/raw/master/presentations_posters/CAM-19.pdf"},"metadata":{"authorlinks":{}},"downloads":2},"bibtype":"inproceedings","biburl":"https://raw.githubusercontent.com/plai-group/bibliography/master/group_publications.bib","creationDate":"2020-02-28T19:23:48.698Z","downloads":2,"keywords":[],"search_terms":["sparse","variational","inference","bayesian","coresets","scratch","campbell","beronov"],"title":"Sparse Variational Inference: Bayesian Coresets from Scratch","year":2019,"dataSources":["7avRLRrz2ifJGMKcD","BKH7YtW7K7WNMA3cj","wyN5DxtoT6AQuiXnm"]}