Approximating the Kullback Leibler divergence between Gaussian mixture models. Hershey, J., R. & Olsen, P., A. ICASSP, IEEE International Conference on Acoustics, Speech and Signal Processing - Proceedings, 2007. Paper doi abstract bibtex The Kullback Leibler (KL) Divergence is a widely used tool in statistics and pattern recognition. The KL divergence between two Gaussian Mixture Models (GMMs) is frequently needed in the fields of speech and image recognition. Unfortunately the KL divergence between two GMMs is not analytically tractable, nor does any efficient computational algorithm exist. Some techniques cope with this problem by replacing the KL divergence with other functions that can be computed efficiently. We introduce two new methods, the variational approximation and the variational upper bound, and compare them to existing methods. We discuss seven different techniques in total and weigh the benefits of each one against the others. To conclude we evaluate the performance of each one through numerical experiments. © 2007 IEEE.
@article{
title = {Approximating the Kullback Leibler divergence between Gaussian mixture models},
type = {article},
year = {2007},
keywords = {Gaussian mixture models,Kullback Leibler divergence,Unscented transformation,Variational methods},
volume = {4},
id = {18738d45-942f-3890-834a-3d740e9fba92},
created = {2024-10-11T16:18:25.875Z},
accessed = {2024-10-11},
file_attached = {true},
profile_id = {f1f70cad-e32d-3de2-a3c0-be1736cb88be},
group_id = {5ec9cc91-a5d6-3de5-82f3-3ef3d98a89c1},
last_modified = {2024-12-19T10:08:39.172Z},
read = {true},
starred = {false},
authored = {false},
confirmed = {false},
hidden = {false},
folder_uuids = {df28411a-ed7f-4991-8358-d39685eb4bf0},
private_publication = {false},
abstract = {The Kullback Leibler (KL) Divergence is a widely used tool in statistics and pattern recognition. The KL divergence between two Gaussian Mixture Models (GMMs) is frequently needed in the fields of speech and image recognition. Unfortunately the KL divergence between two GMMs is not analytically tractable, nor does any efficient computational algorithm exist. Some techniques cope with this problem by replacing the KL divergence with other functions that can be computed efficiently. We introduce two new methods, the variational approximation and the variational upper bound, and compare them to existing methods. We discuss seven different techniques in total and weigh the benefits of each one against the others. To conclude we evaluate the performance of each one through numerical experiments. © 2007 IEEE.},
bibtype = {article},
author = {Hershey, John R. and Olsen, Peder A.},
doi = {10.1109/ICASSP.2007.366913},
journal = {ICASSP, IEEE International Conference on Acoustics, Speech and Signal Processing - Proceedings}
}
Downloads: 0
{"_id":"7ze5MokHh39Fizc4K","bibbaseid":"hershey-olsen-approximatingthekullbackleiblerdivergencebetweengaussianmixturemodels-2007","author_short":["Hershey, J., R.","Olsen, P., A."],"bibdata":{"title":"Approximating the Kullback Leibler divergence between Gaussian mixture models","type":"article","year":"2007","keywords":"Gaussian mixture models,Kullback Leibler divergence,Unscented transformation,Variational methods","volume":"4","id":"18738d45-942f-3890-834a-3d740e9fba92","created":"2024-10-11T16:18:25.875Z","accessed":"2024-10-11","file_attached":"true","profile_id":"f1f70cad-e32d-3de2-a3c0-be1736cb88be","group_id":"5ec9cc91-a5d6-3de5-82f3-3ef3d98a89c1","last_modified":"2024-12-19T10:08:39.172Z","read":"true","starred":false,"authored":false,"confirmed":false,"hidden":false,"folder_uuids":"df28411a-ed7f-4991-8358-d39685eb4bf0","private_publication":false,"abstract":"The Kullback Leibler (KL) Divergence is a widely used tool in statistics and pattern recognition. The KL divergence between two Gaussian Mixture Models (GMMs) is frequently needed in the fields of speech and image recognition. Unfortunately the KL divergence between two GMMs is not analytically tractable, nor does any efficient computational algorithm exist. Some techniques cope with this problem by replacing the KL divergence with other functions that can be computed efficiently. We introduce two new methods, the variational approximation and the variational upper bound, and compare them to existing methods. We discuss seven different techniques in total and weigh the benefits of each one against the others. To conclude we evaluate the performance of each one through numerical experiments. © 2007 IEEE.","bibtype":"article","author":"Hershey, John R. and Olsen, Peder A.","doi":"10.1109/ICASSP.2007.366913","journal":"ICASSP, IEEE International Conference on Acoustics, Speech and Signal Processing - Proceedings","bibtex":"@article{\n title = {Approximating the Kullback Leibler divergence between Gaussian mixture models},\n type = {article},\n year = {2007},\n keywords = {Gaussian mixture models,Kullback Leibler divergence,Unscented transformation,Variational methods},\n volume = {4},\n id = {18738d45-942f-3890-834a-3d740e9fba92},\n created = {2024-10-11T16:18:25.875Z},\n accessed = {2024-10-11},\n file_attached = {true},\n profile_id = {f1f70cad-e32d-3de2-a3c0-be1736cb88be},\n group_id = {5ec9cc91-a5d6-3de5-82f3-3ef3d98a89c1},\n last_modified = {2024-12-19T10:08:39.172Z},\n read = {true},\n starred = {false},\n authored = {false},\n confirmed = {false},\n hidden = {false},\n folder_uuids = {df28411a-ed7f-4991-8358-d39685eb4bf0},\n private_publication = {false},\n abstract = {The Kullback Leibler (KL) Divergence is a widely used tool in statistics and pattern recognition. The KL divergence between two Gaussian Mixture Models (GMMs) is frequently needed in the fields of speech and image recognition. Unfortunately the KL divergence between two GMMs is not analytically tractable, nor does any efficient computational algorithm exist. Some techniques cope with this problem by replacing the KL divergence with other functions that can be computed efficiently. We introduce two new methods, the variational approximation and the variational upper bound, and compare them to existing methods. We discuss seven different techniques in total and weigh the benefits of each one against the others. To conclude we evaluate the performance of each one through numerical experiments. © 2007 IEEE.},\n bibtype = {article},\n author = {Hershey, John R. and Olsen, Peder A.},\n doi = {10.1109/ICASSP.2007.366913},\n journal = {ICASSP, IEEE International Conference on Acoustics, Speech and Signal Processing - Proceedings}\n}","author_short":["Hershey, J., R.","Olsen, P., A."],"urls":{"Paper":"https://bibbase.org/service/mendeley/bfbbf840-4c42-3914-a463-19024f50b30c/file/314f7368-99cf-ca4f-00f6-558fc0a9fe52/full_text.pdf.pdf"},"biburl":"https://bibbase.org/service/mendeley/bfbbf840-4c42-3914-a463-19024f50b30c","bibbaseid":"hershey-olsen-approximatingthekullbackleiblerdivergencebetweengaussianmixturemodels-2007","role":"author","keyword":["Gaussian mixture models","Kullback Leibler divergence","Unscented transformation","Variational methods"],"metadata":{"authorlinks":{}}},"bibtype":"article","biburl":"https://bibbase.org/service/mendeley/bfbbf840-4c42-3914-a463-19024f50b30c","dataSources":["7YGNF4MLRQi3TLvha","2252seNhipfTmjEBQ"],"keywords":["gaussian mixture models","kullback leibler divergence","unscented transformation","variational methods"],"search_terms":["approximating","kullback","leibler","divergence","between","gaussian","mixture","models","hershey","olsen"],"title":"Approximating the Kullback Leibler divergence between Gaussian mixture models","year":2007}