Structured Conditional Continuous Normalizing Flows for Efficient Amortized Inference in Graphical Models. Weilbach, C., Beronov, B., Wood, F., & Harvey, W. In Proceedings of the Twenty Third International Conference on Artificial Intelligence and Statistics (AISTATS), pages 4441–4451, 2020. PMLR 108:4441-4451
Link
Paper
Poster abstract bibtex 6 downloads We exploit minimally faithful inversion of graphical model structures to specify sparse continuous normalizing flows (CNFs) for amortized inference. We find that the sparsity of this factorization can be exploited to reduce the numbers of parameters in the neural network, adaptive integration steps of the flow, and consequently FLOPs at both training and inference time without decreasing performance in comparison to unconstrained flows. By expressing the structure inversion as a compilation pass in a probabilistic programming language, we are able to apply it in a novel way to models as complex as convolutional neural networks. Furthermore, we extend the training objective for CNFs in the context of inference amortization to the symmetric Kullback-Leibler divergence, and demonstrate its theoretical and practical advantages.
@inproceedings{WEI-20,
title={Structured Conditional Continuous Normalizing Flows for Efficient Amortized Inference in Graphical Models},
author={Weilbach, Christian and Beronov, Boyan and Wood, Frank and Harvey, William},
booktitle={Proceedings of the Twenty Third International Conference on Artificial Intelligence and Statistics (AISTATS)},
pages={4441--4451},
year={2020},
url_Link={http://proceedings.mlr.press/v108/weilbach20a.html},
url_Paper={http://proceedings.mlr.press/v108/weilbach20a/weilbach20a.pdf},
url_Poster={https://github.com/plai-group/bibliography/blob/master/presentations_posters/PROBPROG2020_WEI.pdf},
support = {D3M},
bibbase_note = {PMLR 108:4441-4451},
abstract = {We exploit minimally faithful inversion of graphical model structures to specify sparse continuous normalizing flows (CNFs) for amortized inference. We find that the sparsity of this factorization can be exploited to reduce the numbers of parameters in the neural network, adaptive integration steps of the flow, and consequently FLOPs at both training and inference time without decreasing performance in comparison to unconstrained flows. By expressing the structure inversion as a compilation pass in a probabilistic programming language, we are able to apply it in a novel way to models as complex as convolutional neural networks. Furthermore, we extend the training objective for CNFs in the context of inference amortization to the symmetric Kullback-Leibler divergence, and demonstrate its theoretical and practical advantages.}
}
Downloads: 6
{"_id":"4rgkdz3tvzMTbFXD4","bibbaseid":"weilbach-beronov-wood-harvey-structuredconditionalcontinuousnormalizingflowsforefficientamortizedinferenceingraphicalmodels-2020","authorIDs":[],"author_short":["Weilbach, C.","Beronov, B.","Wood, F.","Harvey, W."],"bibdata":{"bibtype":"inproceedings","type":"inproceedings","title":"Structured Conditional Continuous Normalizing Flows for Efficient Amortized Inference in Graphical Models","author":[{"propositions":[],"lastnames":["Weilbach"],"firstnames":["Christian"],"suffixes":[]},{"propositions":[],"lastnames":["Beronov"],"firstnames":["Boyan"],"suffixes":[]},{"propositions":[],"lastnames":["Wood"],"firstnames":["Frank"],"suffixes":[]},{"propositions":[],"lastnames":["Harvey"],"firstnames":["William"],"suffixes":[]}],"booktitle":"Proceedings of the Twenty Third International Conference on Artificial Intelligence and Statistics (AISTATS)","pages":"4441–4451","year":"2020","url_link":"http://proceedings.mlr.press/v108/weilbach20a.html","url_paper":"http://proceedings.mlr.press/v108/weilbach20a/weilbach20a.pdf","url_poster":"https://github.com/plai-group/bibliography/blob/master/presentations_posters/PROBPROG2020_WEI.pdf","support":"D3M","bibbase_note":"PMLR 108:4441-4451","abstract":"We exploit minimally faithful inversion of graphical model structures to specify sparse continuous normalizing flows (CNFs) for amortized inference. We find that the sparsity of this factorization can be exploited to reduce the numbers of parameters in the neural network, adaptive integration steps of the flow, and consequently FLOPs at both training and inference time without decreasing performance in comparison to unconstrained flows. By expressing the structure inversion as a compilation pass in a probabilistic programming language, we are able to apply it in a novel way to models as complex as convolutional neural networks. Furthermore, we extend the training objective for CNFs in the context of inference amortization to the symmetric Kullback-Leibler divergence, and demonstrate its theoretical and practical advantages.","bibtex":"@inproceedings{WEI-20,\n title={Structured Conditional Continuous Normalizing Flows for Efficient Amortized Inference in Graphical Models},\n author={Weilbach, Christian and Beronov, Boyan and Wood, Frank and Harvey, William},\n booktitle={Proceedings of the Twenty Third International Conference on Artificial Intelligence and Statistics (AISTATS)},\n pages={4441--4451},\n year={2020},\n url_Link={http://proceedings.mlr.press/v108/weilbach20a.html},\n url_Paper={http://proceedings.mlr.press/v108/weilbach20a/weilbach20a.pdf},\n url_Poster={https://github.com/plai-group/bibliography/blob/master/presentations_posters/PROBPROG2020_WEI.pdf},\n support = {D3M},\n bibbase_note = {PMLR 108:4441-4451},\n abstract = {We exploit minimally faithful inversion of graphical model structures to specify sparse continuous normalizing flows (CNFs) for amortized inference. We find that the sparsity of this factorization can be exploited to reduce the numbers of parameters in the neural network, adaptive integration steps of the flow, and consequently FLOPs at both training and inference time without decreasing performance in comparison to unconstrained flows. By expressing the structure inversion as a compilation pass in a probabilistic programming language, we are able to apply it in a novel way to models as complex as convolutional neural networks. Furthermore, we extend the training objective for CNFs in the context of inference amortization to the symmetric Kullback-Leibler divergence, and demonstrate its theoretical and practical advantages.}\n}\n\n","author_short":["Weilbach, C.","Beronov, B.","Wood, F.","Harvey, W."],"key":"WEI-20","id":"WEI-20","bibbaseid":"weilbach-beronov-wood-harvey-structuredconditionalcontinuousnormalizingflowsforefficientamortizedinferenceingraphicalmodels-2020","role":"author","urls":{" link":"http://proceedings.mlr.press/v108/weilbach20a.html"," paper":"http://proceedings.mlr.press/v108/weilbach20a/weilbach20a.pdf"," poster":"https://github.com/plai-group/bibliography/blob/master/presentations_posters/PROBPROG2020_WEI.pdf"},"metadata":{"authorlinks":{}},"downloads":6},"bibtype":"inproceedings","biburl":"https://raw.githubusercontent.com/plai-group/bibliography/master/group_publications.bib","creationDate":"2020-07-06T22:10:50.027Z","downloads":6,"keywords":[],"search_terms":["structured","conditional","continuous","normalizing","flows","efficient","amortized","inference","graphical","models","weilbach","beronov","wood","harvey"],"title":"Structured Conditional Continuous Normalizing Flows for Efficient Amortized Inference in Graphical Models","year":2020,"dataSources":["7avRLRrz2ifJGMKcD","BKH7YtW7K7WNMA3cj","wyN5DxtoT6AQuiXnm"]}