Efficient multivariate entropy estimation via $k$-nearest neighbour distances. Berrett, T. B., Samworth, R. J., & Yuan, M. The Annals of Statistics, 47(1):288–318, February, 2019. Publisher: Institute of Mathematical Statistics
Efficient multivariate entropy estimation via $k$-nearest neighbour distances [link]Paper  doi  abstract   bibtex   
Many statistical procedures, including goodness-of-fit tests and methods for independent component analysis, rely critically on the estimation of the entropy of a distribution. In this paper, we seek entropy estimators that are efficient and achieve the local asymptotic minimax lower bound with respect to squared error loss. To this end, we study weighted averages of the estimators originally proposed by Kozachenko and Leonenko [Probl. Inform. Transm. 23 (1987), 95–101], based on the $k$-nearest neighbour distances of a sample of $n$ independent and identically distributed random vectors in ${\}mathbb\{R\}{\textasciicircum}\{d\}$. A careful choice of weights enables us to obtain an efficient estimator in arbitrary dimensions, given sufficient smoothness, while the original unweighted estimator is typically only efficient when $d{\}leq 3$. In addition to the new estimator proposed and theoretical understanding provided, our results facilitate the construction of asymptotically valid confidence intervals for the entropy of asymptotically minimal width.
@article{berrett_efficient_2019,
	title = {Efficient multivariate entropy estimation via \$k\$-nearest neighbour distances},
	volume = {47},
	issn = {0090-5364, 2168-8966},
	url = {https://projecteuclid.org/journals/annals-of-statistics/volume-47/issue-1/Efficient-multivariate-entropy-estimation-via-k-nearest-neighbour-distances/10.1214/18-AOS1688.full},
	doi = {10.1214/18-AOS1688},
	abstract = {Many statistical procedures, including goodness-of-fit tests and methods for independent component analysis, rely critically on the estimation of the entropy of a distribution. In this paper, we seek entropy estimators that are efficient and achieve the local asymptotic minimax lower bound with respect to squared error loss. To this end, we study weighted averages of the estimators originally proposed by Kozachenko and Leonenko [Probl. Inform. Transm. 23 (1987), 95–101], based on the \$k\$-nearest neighbour distances of a sample of \$n\$ independent and identically distributed random vectors in \${\textbackslash}mathbb\{R\}{\textasciicircum}\{d\}\$. A careful choice of weights enables us to obtain an efficient estimator in arbitrary dimensions, given sufficient smoothness, while the original unweighted estimator is typically only efficient when \$d{\textbackslash}leq 3\$. In addition to the new estimator proposed and theoretical understanding provided, our results facilitate the construction of asymptotically valid confidence intervals for the entropy of asymptotically minimal width.},
	number = {1},
	urldate = {2023-03-24},
	journal = {The Annals of Statistics},
	author = {Berrett, Thomas B. and Samworth, Richard J. and Yuan, Ming},
	month = feb,
	year = {2019},
	note = {Publisher: Institute of Mathematical Statistics},
	keywords = {62G20, 62G05, efficiency, Entropy estimation, Kozachenko–Leonenko estimator, weighted nearest neighbours},
	pages = {288--318},
	file = {Full Text PDF:/Users/soumikp/Zotero/storage/L4QT355T/Berrett et al. - 2019 - Efficient multivariate entropy estimation via \$k\$-.pdf:application/pdf},
}

Downloads: 0