@inproceedings{1f6cc67020394453b5534bb4b5952886,
title = "Accuracy vs. Availability heuristic in multimodal affect detection in the wild",
abstract = "This paper discusses multimodal affect detection from a fusion of facial expressions and interaction features derived from students' interactions with an educational game in the noisy real-world context of a computer-enabled classroom. Log data of students' interactions with the game and face videos from 133 students were recorded in a computer-enabled classroom over a two day period. Human observers live annotated learning-centered affective states such as engagement, confusion, and frustration. The face-only detectors were more accurate than interaction-only detectors. Multimodal affect detectors did not show any substantial improvement in accuracy over the face-only detectors. However, the face-only detectors were only applicable to 65% of the cases due to face registration errors caused by excessive movement, occlusion, poor lighting, and other factors. Multimodal fusion techniques were able to improve the applicability of detectors to 98% of cases without sacrificing classification accuracy. Balancing the accuracy vs. applicability tradeoff appears to be an important feature of multimodal affect detection.",
keywords = "Affect, Affect detection, Facial expressions, Interaction, Missing data",
author = "Nigel Bosch and Huili Chen and Ryan Baker and Valerie Shute and Sidney D'mello",
note = "Publisher Copyright: {\textcopyright} 2015 ACM.; ACM International Conference on Multimodal Interaction, ICMI 2015 ; Conference date: 09-11-2015 Through 13-11-2015",
year = "2015",
month = nov,
day = "9",
doi = "10.1145/2818346.2820739",
language = "English (US)",
series = "ICMI 2015 - Proceedings of the 2015 ACM International Conference on Multimodal Interaction",
publisher = "Association for Computing Machinery, Inc",
pages = "267--274",
booktitle = "ICMI 2015 - Proceedings of the 2015 ACM International Conference on Multimodal Interaction",
}