Extending the Beta divergence to complex values. Vaz, C. & Narayanan, S. *Pattern Recognition Letters*, Apr, 2020. Paper doi abstract bibtex Various information-theoretic divergences have been proposed for the cost function in tasks such as matrix factorization and clustering. One class of divergence is called the Beta divergence. By varying a real-valued parameter beta, the Beta divergence connects several well-known divergences, such as the Euclidean distance, Kullback-Leibler divergence, and Itakura-Saito divergence. Unfortunately, the Beta divergence is properly defined only for positive real values, hindering its use for measuring distances between complex-valued data points. We define a new divergence, the Complex Beta divergence, that operates on complex values, and show that it coincides with the standard Beta divergence when the data is restricted to be in phase. Moreover, we show that different values of beta place different penalties on errors in magnitude and phase.

@article{VAZ2020,
abstract = {Various information-theoretic divergences have been proposed for the cost function in tasks such as matrix factorization and clustering. One class of divergence is called the Beta divergence. By varying a real-valued parameter beta, the Beta divergence connects several well-known divergences, such as the Euclidean distance, Kullback-Leibler divergence, and Itakura-Saito divergence. Unfortunately, the Beta divergence is properly defined only for positive real values, hindering its use for measuring distances between complex-valued data points. We define a new divergence, the Complex Beta divergence, that operates on complex values, and show that it coincides with the standard Beta divergence when the data is restricted to be in phase. Moreover, we show that different values of beta place different penalties on errors in magnitude and phase.},
author = {Colin Vaz and Shrikanth Narayanan},
doi = {https://doi.org/10.1016/j.patrec.2020.11.005},
issn = {0167-8655},
journal = {Pattern Recognition Letters},
keywords = {Information theory, KL divergence, Objective function, Young's inequality},
link = {http://sail.usc.edu/publications/files/VazBetaDivergence-PRL.pdf},
title = {Extending the Beta divergence to complex values},
url = {http://www.sciencedirect.com/science/article/pii/S0167865520304104},
year = {2020},
month = {Apr}
}

Downloads: 0

{"_id":"MrB6v8o8ECuy2zEtC","bibbaseid":"vaz-narayanan-extendingthebetadivergencetocomplexvalues-2020","author_short":["Vaz, C.","Narayanan, S."],"bibdata":{"bibtype":"article","type":"article","abstract":"Various information-theoretic divergences have been proposed for the cost function in tasks such as matrix factorization and clustering. One class of divergence is called the Beta divergence. By varying a real-valued parameter beta, the Beta divergence connects several well-known divergences, such as the Euclidean distance, Kullback-Leibler divergence, and Itakura-Saito divergence. Unfortunately, the Beta divergence is properly defined only for positive real values, hindering its use for measuring distances between complex-valued data points. We define a new divergence, the Complex Beta divergence, that operates on complex values, and show that it coincides with the standard Beta divergence when the data is restricted to be in phase. Moreover, we show that different values of beta place different penalties on errors in magnitude and phase.","author":[{"firstnames":["Colin"],"propositions":[],"lastnames":["Vaz"],"suffixes":[]},{"firstnames":["Shrikanth"],"propositions":[],"lastnames":["Narayanan"],"suffixes":[]}],"doi":"https://doi.org/10.1016/j.patrec.2020.11.005","issn":"0167-8655","journal":"Pattern Recognition Letters","keywords":"Information theory, KL divergence, Objective function, Young's inequality","link":"http://sail.usc.edu/publications/files/VazBetaDivergence-PRL.pdf","title":"Extending the Beta divergence to complex values","url":"http://www.sciencedirect.com/science/article/pii/S0167865520304104","year":"2020","month":"Apr","bibtex":"@article{VAZ2020,\n abstract = {Various information-theoretic divergences have been proposed for the cost function in tasks such as matrix factorization and clustering. One class of divergence is called the Beta divergence. By varying a real-valued parameter beta, the Beta divergence connects several well-known divergences, such as the Euclidean distance, Kullback-Leibler divergence, and Itakura-Saito divergence. Unfortunately, the Beta divergence is properly defined only for positive real values, hindering its use for measuring distances between complex-valued data points. We define a new divergence, the Complex Beta divergence, that operates on complex values, and show that it coincides with the standard Beta divergence when the data is restricted to be in phase. Moreover, we show that different values of beta place different penalties on errors in magnitude and phase.},\n author = {Colin Vaz and Shrikanth Narayanan},\n doi = {https://doi.org/10.1016/j.patrec.2020.11.005},\n issn = {0167-8655},\n journal = {Pattern Recognition Letters},\n keywords = {Information theory, KL divergence, Objective function, Young's inequality},\n link = {http://sail.usc.edu/publications/files/VazBetaDivergence-PRL.pdf},\n title = {Extending the Beta divergence to complex values},\n url = {http://www.sciencedirect.com/science/article/pii/S0167865520304104},\n year = {2020},\n month = {Apr}\n}\n\n","author_short":["Vaz, C.","Narayanan, S."],"bibbaseid":"vaz-narayanan-extendingthebetadivergencetocomplexvalues-2020","role":"author","urls":{"Paper":"http://www.sciencedirect.com/science/article/pii/S0167865520304104"},"keyword":["Information theory","KL divergence","Objective function","Young's inequality"],"metadata":{"authorlinks":{}}},"bibtype":"article","biburl":"https://bibbase.org/f/nWhKb4SffvhfreEmj/shri-isi-edu.bib","dataSources":["P3nQrSLkFzGGSmKJQ","Reikhy6EiDXFTcuR9"],"keywords":["information theory","kl divergence","objective function","young's inequality"],"search_terms":["extending","beta","divergence","complex","values","vaz","narayanan"],"title":"Extending the Beta divergence to complex values","year":2020}