@inproceedings{c7d07b1a46e54abfb9014546d53fd0dd,
title = "Bimodal HCI-related affect recognition",
abstract = "Perhaps the most fundamental application of affective computing would be Human-Computer Interaction (HCI) in which the computer is able to detect and track the user's affective states, and make corresponding feedback. The human multi-sensor affect system defines the expectation of multimodal affect analyzer. In this paper, we present our efforts toward audio-visual HCI-related affect recognition. With HCI applications in mind, we take into account some special affective states which indicate users' cognitive/motivational states. Facing the fact that a facial expression is influenced by both an affective state and speech content, we apply a smoothing method to extract the information of the affective state from facial features. In our fusion stage, a voting method is applied to combine audio and visual modalities so that the final affect recognition accuracy is greatly improved. We test our bimodal affect recognition approach on 38 subjects with 11 HCI-related affect states. The extensive experimental results show that the average person-dependent affect recognition accuracy is almost 90% for our bimodal fusion.",
keywords = "Affect recognition, Affective computing, Emotion recognition, HCI, Multimodal human-computer interaction",
author = "Zhihong Zeng and Juin Tu and Ming Liu and Tong Zhang and Nicholas Rizzolo and Zhenqiu Zhang and Huang, {Thomas S.} and Dan Roth and Stephen Levinson",
year = "2004",
doi = "10.1145/1027933.1027958",
language = "English (US)",
isbn = "1581139543",
series = "ICMI'04 - Sixth International Conference on Multimodal Interfaces",
publisher = "Association for Computing Machinery",
pages = "137--143",
booktitle = "ICMI'04 - Sixth International Conference on Multimodal Interfaces",
address = "United States",
note = "ICMI'04 - Sixth International Conference on Multimodal Interfaces ; Conference date: 14-10-2004 Through 15-10-2004",
}