The Differential Entropy of Mixtures: New Bounds and Applications. Melbourne, J., Talukdar, S., Bhaban, S., Madiman, M., & Salapaka, M., V. 5, 2018. Paper Website doi abstract bibtex Mixture distributions are extensively used as a modeling tool in diverse areas from machine learning to communications engineering to physics, and obtaining bounds on the entropy of probability distributions is of fundamental importance in many of these applications. This article provides sharp bounds on the entropy concavity deficit, which is the difference between the entropy of the mixture and the weighted sum of entropies of constituent components. Toward establishing lower and upper bounds on the concavity deficit, results that are of importance in their own right are obtained. In order to obtain nontrivial upper bounds, properties of the skew-divergence are developed and notions of "skew" $f$-divergences are introduced; a reverse Pinsker inequality and a bound on Jensen-Shannon divergence are obtained along the way. Complementary lower bounds are derived with special attention paid to the case that corresponds to independent summation of a continuous and a discrete random variable. Several applications of the bounds are delineated, including to mutual information of additive noise channels, thermodynamics of computation, and functional inequalities.
@article{
title = {The Differential Entropy of Mixtures: New Bounds and Applications},
type = {article},
year = {2018},
websites = {http://arxiv.org/abs/1805.11257,http://dx.doi.org/10.1109/TIT.2022.3140661},
month = {5},
day = {29},
id = {b138a3d9-8e31-336d-91a7-cbe78611ccdf},
created = {2024-12-13T09:36:58.854Z},
file_attached = {true},
profile_id = {f1f70cad-e32d-3de2-a3c0-be1736cb88be},
group_id = {5ec9cc91-a5d6-3de5-82f3-3ef3d98a89c1},
last_modified = {2025-01-08T08:34:15.971Z},
read = {true},
starred = {false},
authored = {false},
confirmed = {false},
hidden = {false},
folder_uuids = {df28411a-ed7f-4991-8358-d39685eb4bf0},
private_publication = {false},
abstract = {Mixture distributions are extensively used as a modeling tool in diverse areas from machine learning to communications engineering to physics, and obtaining bounds on the entropy of probability distributions is of fundamental importance in many of these applications. This article provides sharp bounds on the entropy concavity deficit, which is the difference between the entropy of the mixture and the weighted sum of entropies of constituent components. Toward establishing lower and upper bounds on the concavity deficit, results that are of importance in their own right are obtained. In order to obtain nontrivial upper bounds, properties of the skew-divergence are developed and notions of "skew" $f$-divergences are introduced; a reverse Pinsker inequality and a bound on Jensen-Shannon divergence are obtained along the way. Complementary lower bounds are derived with special attention paid to the case that corresponds to independent summation of a continuous and a discrete random variable. Several applications of the bounds are delineated, including to mutual information of additive noise channels, thermodynamics of computation, and functional inequalities.},
bibtype = {article},
author = {Melbourne, James and Talukdar, Saurav and Bhaban, Shreyas and Madiman, Mokshay and Salapaka, Murti V.},
doi = {10.1109/TIT.2022.3140661}
}
Downloads: 0
{"_id":"4xyP5tn779HNB3q2E","bibbaseid":"melbourne-talukdar-bhaban-madiman-salapaka-thedifferentialentropyofmixturesnewboundsandapplications-2018","author_short":["Melbourne, J.","Talukdar, S.","Bhaban, S.","Madiman, M.","Salapaka, M., V."],"bibdata":{"title":"The Differential Entropy of Mixtures: New Bounds and Applications","type":"article","year":"2018","websites":"http://arxiv.org/abs/1805.11257,http://dx.doi.org/10.1109/TIT.2022.3140661","month":"5","day":"29","id":"b138a3d9-8e31-336d-91a7-cbe78611ccdf","created":"2024-12-13T09:36:58.854Z","file_attached":"true","profile_id":"f1f70cad-e32d-3de2-a3c0-be1736cb88be","group_id":"5ec9cc91-a5d6-3de5-82f3-3ef3d98a89c1","last_modified":"2025-01-08T08:34:15.971Z","read":"true","starred":false,"authored":false,"confirmed":false,"hidden":false,"folder_uuids":"df28411a-ed7f-4991-8358-d39685eb4bf0","private_publication":false,"abstract":"Mixture distributions are extensively used as a modeling tool in diverse areas from machine learning to communications engineering to physics, and obtaining bounds on the entropy of probability distributions is of fundamental importance in many of these applications. This article provides sharp bounds on the entropy concavity deficit, which is the difference between the entropy of the mixture and the weighted sum of entropies of constituent components. Toward establishing lower and upper bounds on the concavity deficit, results that are of importance in their own right are obtained. In order to obtain nontrivial upper bounds, properties of the skew-divergence are developed and notions of \"skew\" $f$-divergences are introduced; a reverse Pinsker inequality and a bound on Jensen-Shannon divergence are obtained along the way. Complementary lower bounds are derived with special attention paid to the case that corresponds to independent summation of a continuous and a discrete random variable. Several applications of the bounds are delineated, including to mutual information of additive noise channels, thermodynamics of computation, and functional inequalities.","bibtype":"article","author":"Melbourne, James and Talukdar, Saurav and Bhaban, Shreyas and Madiman, Mokshay and Salapaka, Murti V.","doi":"10.1109/TIT.2022.3140661","bibtex":"@article{\n title = {The Differential Entropy of Mixtures: New Bounds and Applications},\n type = {article},\n year = {2018},\n websites = {http://arxiv.org/abs/1805.11257,http://dx.doi.org/10.1109/TIT.2022.3140661},\n month = {5},\n day = {29},\n id = {b138a3d9-8e31-336d-91a7-cbe78611ccdf},\n created = {2024-12-13T09:36:58.854Z},\n file_attached = {true},\n profile_id = {f1f70cad-e32d-3de2-a3c0-be1736cb88be},\n group_id = {5ec9cc91-a5d6-3de5-82f3-3ef3d98a89c1},\n last_modified = {2025-01-08T08:34:15.971Z},\n read = {true},\n starred = {false},\n authored = {false},\n confirmed = {false},\n hidden = {false},\n folder_uuids = {df28411a-ed7f-4991-8358-d39685eb4bf0},\n private_publication = {false},\n abstract = {Mixture distributions are extensively used as a modeling tool in diverse areas from machine learning to communications engineering to physics, and obtaining bounds on the entropy of probability distributions is of fundamental importance in many of these applications. This article provides sharp bounds on the entropy concavity deficit, which is the difference between the entropy of the mixture and the weighted sum of entropies of constituent components. Toward establishing lower and upper bounds on the concavity deficit, results that are of importance in their own right are obtained. In order to obtain nontrivial upper bounds, properties of the skew-divergence are developed and notions of \"skew\" $f$-divergences are introduced; a reverse Pinsker inequality and a bound on Jensen-Shannon divergence are obtained along the way. Complementary lower bounds are derived with special attention paid to the case that corresponds to independent summation of a continuous and a discrete random variable. Several applications of the bounds are delineated, including to mutual information of additive noise channels, thermodynamics of computation, and functional inequalities.},\n bibtype = {article},\n author = {Melbourne, James and Talukdar, Saurav and Bhaban, Shreyas and Madiman, Mokshay and Salapaka, Murti V.},\n doi = {10.1109/TIT.2022.3140661}\n}","author_short":["Melbourne, J.","Talukdar, S.","Bhaban, S.","Madiman, M.","Salapaka, M., V."],"urls":{"Paper":"https://bibbase.org/service/mendeley/bfbbf840-4c42-3914-a463-19024f50b30c/file/6ccab56c-2688-956f-084d-d3b628122522/180511257v2.pdf.pdf","Website":"http://arxiv.org/abs/1805.11257,http://dx.doi.org/10.1109/TIT.2022.3140661"},"biburl":"https://bibbase.org/service/mendeley/bfbbf840-4c42-3914-a463-19024f50b30c","bibbaseid":"melbourne-talukdar-bhaban-madiman-salapaka-thedifferentialentropyofmixturesnewboundsandapplications-2018","role":"author","metadata":{"authorlinks":{}},"downloads":0},"bibtype":"article","biburl":"https://bibbase.org/service/mendeley/bfbbf840-4c42-3914-a463-19024f50b30c","dataSources":["2252seNhipfTmjEBQ"],"keywords":[],"search_terms":["differential","entropy","mixtures","new","bounds","applications","melbourne","talukdar","bhaban","madiman","salapaka"],"title":"The Differential Entropy of Mixtures: New Bounds and Applications","year":2018}