@inproceedings{c5b7594717a6498b932e34a3deb4fa6c,
title = "PFunk-H: Approximate query processing using perceptual models",
abstract = "Interactive visualization tools (e.g., crossfilter) are critical to many data analysts by making the discovery and verification of hypotheses quick and seamless. Increasing data sizes has made the scalability of these tools a necessity. To bridge the gap between data sizes and interactivity, many visualization systems have turned to sampling-based approximate query processing frameworks. However, these systems are currently oblivious to human perceptual visual accuracy. This could either lead to overly aggressive sampling when the approximation accuracy is higher than needed or an incorrect visual rendering when the accuracy is too lax. Thus, for both correctness and efficiency, we propose to use empirical knowledge of human perceptual limitations to automatically bound the error of approximate answers meant for visualization. This paper explores a preliminary model of sampling-based approximate query processing that uses perceptual models (encoded as functions) to construct approximate answers intended for visualization. We present initial results that show that the approximate and non-approximate answers for a given query differ by a perceptually indiscernible amount, as defined by perceptual functions.",
author = "Daniel Alabi and Eugene Wu",
note = "This work is supported in part by NSF 1527765.; 1st Workshop on Human-in-the-Loop Data Analytics, HILDA 2016 ; Conference date: 26-06-2016",
year = "2016",
month = jun,
day = "26",
doi = "10.1145/2939502.2939512",
language = "English (US)",
series = "HILDA 2016 - Proceedings of the Workshop on Human-In-the-Loop Data Analytics",
publisher = "Association for Computing Machinery",
booktitle = "HILDA 2016 - Proceedings of the Workshop on Human-In-the-Loop Data Analytics",
address = "United States",
}