@inproceedings{b3d5b57a45314119999976b82686a19a,
title = "Semi-supervised training of Gaussian mixture models by conditional entropy minimization",
abstract = "In this paper, we propose a new semi-supervised training method for Gaussian Mixture Models. We add a conditional entropy minimizer to the maximum mutual information criteria, which enables to incorporate unlabeled data in a discriminative training fashion. The training method is simple but surprisingly effective. The preconditioned conjugate gradient method provides a reasonable convergence rate for parameter update. The phonetic classification experiments on the TIMIT corpus demonstrate significant improvements due to unlabeled data via our training criteria.",
keywords = "Conditional entropy, Gaussian Mixture Models, Phonetic classification, Semi-supervised learning",
author = "Huang, \{Jui Ting\} and Mark Hasegawa-Johnson",
note = "This research was supported by NSF 07-03624. Opinions and findings are those of the authors, and are not endorsed by the NSF.",
year = "2010",
language = "English (US)",
series = "Proceedings of the 11th Annual Conference of the International Speech Communication Association, INTERSPEECH 2010",
publisher = "International Speech Communication Association",
pages = "1353--1356",
booktitle = "Proceedings of the 11th Annual Conference of the International Speech Communication Association, INTERSPEECH 2010",
}