@inproceedings{34dd33590ccb4b7f8c6e635e71eb66c8,
title = "Optimal entropy estimation on large alphabets via best polynomial approximation",
abstract = "Consider the problem of estimating the Shannon entropy of a distribution on k elements from n independent samples. We show that the minimax mean-square error is within universal multiplicative constant factors of k over n log k + log2k over n. This implies the recent result of Valiant-Valiant [1] that the minimal sample size for consistent entropy estimation scales according to Θ(k over log k). The apparatus of best polynomial approximation plays a key role in both the minimax lower bound and the construction of optimal estimators.",
keywords = "best polynomial approximation, entropy estimation, functional estimation, high-dimensional statistics, large alphabet",
author = "Yihong Wu and Pengkun Yang",
note = "Funding Information: This research was supported by the National Science Foundation under Grant IIS-1447879, and CCF-1423088; IEEE International Symposium on Information Theory, ISIT 2015 ; Conference date: 14-06-2015 Through 19-06-2015",
year = "2015",
month = sep,
day = "28",
doi = "10.1109/ISIT.2015.7282570",
language = "English (US)",
series = "IEEE International Symposium on Information Theory - Proceedings",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "824--828",
booktitle = "Proceedings - 2015 IEEE International Symposium on Information Theory, ISIT 2015",
address = "United States",
}