abstract bibtex

We introduce a maximum Lq-likelihood estimation (MLqE) of mixture models using our proposed expectation-maximization (EM) algorithm, namely the EM algorithm with Lq-likelihood (EM-Lq). Properties of the MLqE obtained from the proposed EM-Lq are studied through simulated mixture model data. Compared with the maximum likelihood estimation (MLE), which is obtained from the EM algorithm, the MLqE provides a more robust estimation against outliers for small sample sizes. In particular, we study the performance of the MLqE in the context of the gross error model, where the true model of interest is a mixture of two normal distributions, and the contamination component is a third normal distribution with a large variance. A numerical comparison between the MLqE and the MLE for this gross error model is presented in terms of Kullback–Leibler (KL) distance and relative efficiency.

@article{ title = {Maximum L q -Likelihood Estimation via the Expectation-Maximization Algorithm: A Robust Estimation of Mixture Models}, type = {article}, year = {2013}, identifiers = {[object Object]}, keywords = {Gross error model,Robustness}, pages = {914-928}, volume = {108}, websites = {http://www.tandfonline.com/doi/abs/10.1080/01621459.2013.787933#.VYPMzFxViko,http://www.tandfonline.com.proxy3.library.jhu.edu/doi/abs/10.1080/01621459.2013.787933#.UuJtQmQo7og}, month = {9}, publisher = {Taylor & Francis Group}, day = {27}, id = {db40a3e1-8afd-3c75-aba0-86ecb7e6332c}, created = {2015-06-19T08:32:48.000Z}, accessed = {2014-01-24}, file_attached = {false}, profile_id = {182bbbf9-24a3-3af3-9ed6-563e8f89259b}, group_id = {8d229673-0aec-3014-b0f6-eda47f83e147}, last_modified = {2015-06-19T08:32:49.000Z}, read = {false}, starred = {true}, authored = {false}, confirmed = {true}, hidden = {false}, citation_key = {Qin2013a}, language = {en}, abstract = {We introduce a maximum Lq-likelihood estimation (MLqE) of mixture models using our proposed expectation-maximization (EM) algorithm, namely the EM algorithm with Lq-likelihood (EM-Lq). Properties of the MLqE obtained from the proposed EM-Lq are studied through simulated mixture model data. Compared with the maximum likelihood estimation (MLE), which is obtained from the EM algorithm, the MLqE provides a more robust estimation against outliers for small sample sizes. In particular, we study the performance of the MLqE in the context of the gross error model, where the true model of interest is a mixture of two normal distributions, and the contamination component is a third normal distribution with a large variance. A numerical comparison between the MLqE and the MLE for this gross error model is presented in terms of Kullback–Leibler (KL) distance and relative efficiency.}, bibtype = {article}, author = {Qin, Yichen and Priebe, Carey E.}, journal = {Journal of the American Statistical Association}, number = {503} }

Downloads: 0