@inproceedings{453cec7bc2794414b782aed67476291a,
title = "Novices who focused or experts who didn't? How effort and expertise cues affect judgments of crowd work",
abstract = "Crowd feedback services offer a new method for acquiring feedback during design. A key problem is that the services only return the feedback without any cues about the people who provided it. In this paper, we investigate two cues of a feedback provider - the effort invested in a feedback task and expertise in the domain. First, we tested how positive and negative cues of a provider's effort and expertise affected perceived quality of the feedback. Results showed both cues affected perceived quality, but primarily when the cues were negative. The results also showed that effort cues affected perceived quality as much as expertise. In a second study, we explored the use of behavioral data for modeling effort for feedback tasks. For a binary classification, the models achieved up to 92% accuracy relative to human raters. This result validates the feasibility of implementing effort cues in crowd services. The contributions of this work will enable increased transparency in crowd feedback services, benefiting both designers and feedback providers.",
keywords = "Creativity, Crowdsourcing, Design, Feedback",
author = "Wu, {Y. Wayne} and Bailey, {Brian P.}",
note = "Publisher Copyright: {\textcopyright} 2016 ACM.; 34th Annual Conference on Human Factors in Computing Systems, CHI 2016 ; Conference date: 07-05-2016 Through 12-05-2016",
year = "2016",
month = may,
day = "7",
doi = "10.1145/2858036.2858330",
language = "English (US)",
series = "Conference on Human Factors in Computing Systems - Proceedings",
publisher = "Association for Computing Machinery",
pages = "4086--4097",
booktitle = "CHI 2016 - Proceedings, 34th Annual CHI Conference on Human Factors in Computing Systems",
address = "United States",
}