@inproceedings{02508bcb5dc94b4fbaa72f177bc8abb8,
title = "Seeing Aural: An Installation Transferring the Materials You Gaze to Sounds You Hear",
abstract = "We present an audio installation allowing participants to create sounds by looking at different parts of surfaces or objects in various color and texture. By hacking a desktop eye tracker to track gaze points in 3D space, and deploying a webcam to pick up the hue, saturation and brightness of gazed material, we create an experimental musical instrument that can be played by eye gaze. Users have the freedom of choosing different raw materials and explore ways of seeing as a mean of self-expression to create a unique audio experience.",
keywords = "eye tracking, sound design, gaze interaction",
author = "Yi-Ching Huang and Kuan-Ying Wu and Mon-Chu Chen",
year = "2014",
doi = "10.1145/2540930.2555198",
language = "Niet gedefinieerd",
isbn = "9781450326353",
series = "TEI '14",
publisher = "Association for Computing Machinery, Inc",
pages = "323–324",
booktitle = "Proceedings of the 8th International Conference on Tangible, Embedded and Embodied Interaction",
address = "Verenigde Staten van Amerika",
}