Discriminative Joint Vector And Component Reduction For Gaussian Mixture Models. Bar-Yosef, Y. & Bistritz, Y. In 2019 27th European Signal Processing Conference (EUSIPCO), pages 1-5, Sep., 2019. Paper doi abstract bibtex We introduce a discriminative parametric vector dimensionality reduction algorithm for Gaussian mixtures that is performed jointly with mixture component reduction. The reduction algorithm is based on the variational maximum mutual information (VMMI) method, which in contrast to other reduction algorithms, requires only the parameters of existing high order and high dimensional mixture models. The idea behind the proposed approach, called JVC-VMMI (for joint vector and component VMMI), differs significantly from traditional classification approaches that perform separately dimensionality reduction first, and then use the low-dimensional feature vector for training lower order models. The fact that the JVC-VMMI approach is relieved from using the original data samples admits an extremely efficient computation of the reduced models optimized for the classification task. We report experiments in vowel classification in which JVC-VMMI outperformed conventional Linear Discriminant Analysis (LDA) and Neighborhood Component Analysis (NCA) dimensionality reduction methods.
@InProceedings{8903142,
author = {Y. Bar-Yosef and Y. Bistritz},
booktitle = {2019 27th European Signal Processing Conference (EUSIPCO)},
title = {Discriminative Joint Vector And Component Reduction For Gaussian Mixture Models},
year = {2019},
pages = {1-5},
abstract = {We introduce a discriminative parametric vector dimensionality reduction algorithm for Gaussian mixtures that is performed jointly with mixture component reduction. The reduction algorithm is based on the variational maximum mutual information (VMMI) method, which in contrast to other reduction algorithms, requires only the parameters of existing high order and high dimensional mixture models. The idea behind the proposed approach, called JVC-VMMI (for joint vector and component VMMI), differs significantly from traditional classification approaches that perform separately dimensionality reduction first, and then use the low-dimensional feature vector for training lower order models. The fact that the JVC-VMMI approach is relieved from using the original data samples admits an extremely efficient computation of the reduced models optimized for the classification task. We report experiments in vowel classification in which JVC-VMMI outperformed conventional Linear Discriminant Analysis (LDA) and Neighborhood Component Analysis (NCA) dimensionality reduction methods.},
keywords = {feature extraction;Gaussian processes;learning (artificial intelligence);linear discriminant analysis;mixture models;optimisation;pattern classification;discriminative parametric vector dimensionality reduction algorithm;Gaussian mixtures;mixture component reduction;variational maximum mutual information method;high dimensional mixture models;low-dimensional feature vector;JVC-VMMI approach;linear discriminant analysis;discriminative joint vector;Gaussian mixture models;neighborhood component analysis dimensionality reduction methods;classification approaches;joint vector-and-component VMMI;Optimization;Computational modeling;Mixture models;Dimensionality reduction;Mutual information;Mathematical model;Training;Dimensionality reduction;Gaussian mixture models;Discriminative learning;Hierarchical clustering},
doi = {10.23919/EUSIPCO.2019.8903142},
issn = {2076-1465},
month = {Sep.},
url = {https://www.eurasip.org/proceedings/eusipco/eusipco2019/proceedings/papers/1570529375.pdf},
}
Downloads: 0
{"_id":"oxh7jrewNqw2k2yLD","bibbaseid":"baryosef-bistritz-discriminativejointvectorandcomponentreductionforgaussianmixturemodels-2019","authorIDs":[],"author_short":["Bar-Yosef, Y.","Bistritz, Y."],"bibdata":{"bibtype":"inproceedings","type":"inproceedings","author":[{"firstnames":["Y."],"propositions":[],"lastnames":["Bar-Yosef"],"suffixes":[]},{"firstnames":["Y."],"propositions":[],"lastnames":["Bistritz"],"suffixes":[]}],"booktitle":"2019 27th European Signal Processing Conference (EUSIPCO)","title":"Discriminative Joint Vector And Component Reduction For Gaussian Mixture Models","year":"2019","pages":"1-5","abstract":"We introduce a discriminative parametric vector dimensionality reduction algorithm for Gaussian mixtures that is performed jointly with mixture component reduction. The reduction algorithm is based on the variational maximum mutual information (VMMI) method, which in contrast to other reduction algorithms, requires only the parameters of existing high order and high dimensional mixture models. The idea behind the proposed approach, called JVC-VMMI (for joint vector and component VMMI), differs significantly from traditional classification approaches that perform separately dimensionality reduction first, and then use the low-dimensional feature vector for training lower order models. The fact that the JVC-VMMI approach is relieved from using the original data samples admits an extremely efficient computation of the reduced models optimized for the classification task. We report experiments in vowel classification in which JVC-VMMI outperformed conventional Linear Discriminant Analysis (LDA) and Neighborhood Component Analysis (NCA) dimensionality reduction methods.","keywords":"feature extraction;Gaussian processes;learning (artificial intelligence);linear discriminant analysis;mixture models;optimisation;pattern classification;discriminative parametric vector dimensionality reduction algorithm;Gaussian mixtures;mixture component reduction;variational maximum mutual information method;high dimensional mixture models;low-dimensional feature vector;JVC-VMMI approach;linear discriminant analysis;discriminative joint vector;Gaussian mixture models;neighborhood component analysis dimensionality reduction methods;classification approaches;joint vector-and-component VMMI;Optimization;Computational modeling;Mixture models;Dimensionality reduction;Mutual information;Mathematical model;Training;Dimensionality reduction;Gaussian mixture models;Discriminative learning;Hierarchical clustering","doi":"10.23919/EUSIPCO.2019.8903142","issn":"2076-1465","month":"Sep.","url":"https://www.eurasip.org/proceedings/eusipco/eusipco2019/proceedings/papers/1570529375.pdf","bibtex":"@InProceedings{8903142,\n author = {Y. Bar-Yosef and Y. Bistritz},\n booktitle = {2019 27th European Signal Processing Conference (EUSIPCO)},\n title = {Discriminative Joint Vector And Component Reduction For Gaussian Mixture Models},\n year = {2019},\n pages = {1-5},\n abstract = {We introduce a discriminative parametric vector dimensionality reduction algorithm for Gaussian mixtures that is performed jointly with mixture component reduction. The reduction algorithm is based on the variational maximum mutual information (VMMI) method, which in contrast to other reduction algorithms, requires only the parameters of existing high order and high dimensional mixture models. The idea behind the proposed approach, called JVC-VMMI (for joint vector and component VMMI), differs significantly from traditional classification approaches that perform separately dimensionality reduction first, and then use the low-dimensional feature vector for training lower order models. The fact that the JVC-VMMI approach is relieved from using the original data samples admits an extremely efficient computation of the reduced models optimized for the classification task. We report experiments in vowel classification in which JVC-VMMI outperformed conventional Linear Discriminant Analysis (LDA) and Neighborhood Component Analysis (NCA) dimensionality reduction methods.},\n keywords = {feature extraction;Gaussian processes;learning (artificial intelligence);linear discriminant analysis;mixture models;optimisation;pattern classification;discriminative parametric vector dimensionality reduction algorithm;Gaussian mixtures;mixture component reduction;variational maximum mutual information method;high dimensional mixture models;low-dimensional feature vector;JVC-VMMI approach;linear discriminant analysis;discriminative joint vector;Gaussian mixture models;neighborhood component analysis dimensionality reduction methods;classification approaches;joint vector-and-component VMMI;Optimization;Computational modeling;Mixture models;Dimensionality reduction;Mutual information;Mathematical model;Training;Dimensionality reduction;Gaussian mixture models;Discriminative learning;Hierarchical clustering},\n doi = {10.23919/EUSIPCO.2019.8903142},\n issn = {2076-1465},\n month = {Sep.},\n url = {https://www.eurasip.org/proceedings/eusipco/eusipco2019/proceedings/papers/1570529375.pdf},\n}\n\n","author_short":["Bar-Yosef, Y.","Bistritz, Y."],"key":"8903142","id":"8903142","bibbaseid":"baryosef-bistritz-discriminativejointvectorandcomponentreductionforgaussianmixturemodels-2019","role":"author","urls":{"Paper":"https://www.eurasip.org/proceedings/eusipco/eusipco2019/proceedings/papers/1570529375.pdf"},"keyword":["feature extraction;Gaussian processes;learning (artificial intelligence);linear discriminant analysis;mixture models;optimisation;pattern classification;discriminative parametric vector dimensionality reduction algorithm;Gaussian mixtures;mixture component reduction;variational maximum mutual information method;high dimensional mixture models;low-dimensional feature vector;JVC-VMMI approach;linear discriminant analysis;discriminative joint vector;Gaussian mixture models;neighborhood component analysis dimensionality reduction methods;classification approaches;joint vector-and-component VMMI;Optimization;Computational modeling;Mixture models;Dimensionality reduction;Mutual information;Mathematical model;Training;Dimensionality reduction;Gaussian mixture models;Discriminative learning;Hierarchical clustering"],"metadata":{"authorlinks":{}},"downloads":0},"bibtype":"inproceedings","biburl":"https://raw.githubusercontent.com/Roznn/EUSIPCO/main/eusipco2019url.bib","creationDate":"2021-02-11T19:15:22.165Z","downloads":0,"keywords":["feature extraction;gaussian processes;learning (artificial intelligence);linear discriminant analysis;mixture models;optimisation;pattern classification;discriminative parametric vector dimensionality reduction algorithm;gaussian mixtures;mixture component reduction;variational maximum mutual information method;high dimensional mixture models;low-dimensional feature vector;jvc-vmmi approach;linear discriminant analysis;discriminative joint vector;gaussian mixture models;neighborhood component analysis dimensionality reduction methods;classification approaches;joint vector-and-component vmmi;optimization;computational modeling;mixture models;dimensionality reduction;mutual information;mathematical model;training;dimensionality reduction;gaussian mixture models;discriminative learning;hierarchical clustering"],"search_terms":["discriminative","joint","vector","component","reduction","gaussian","mixture","models","bar-yosef","bistritz"],"title":"Discriminative Joint Vector And Component Reduction For Gaussian Mixture Models","year":2019,"dataSources":["NqWTiMfRR56v86wRs","r6oz3cMyC99QfiuHW"]}