@conference {181,
title = {Finding the FOO: a pilot study for a multimodal interface},
booktitle = {Systems, Man and Cybernetics, 2003. IEEE International Conference on},
year = {2003},
month = {10/2003},
abstract = {In our research on intuitive means for humans and intelligent, mobile robots to collaborate, we use a multimodal interface that supports speech and gestural inputs. As a preliminary step to evaluate our approach and to identify practical areas for future work, we conducted a wizard-of-Oz pilot study with five participants who each collaborated with a robot on a search task in a separate room. The goal was to find a sign in the robot{\textquoteright}s environment with the word "FOO" printed on it. Using a subset of our multimodal interface, participants were told to direct the collaboration. As their subordinate, the robot would understand their utterances and gestures, and recognize objects and structures in the search space. Participants conversed with the robot through a wireless microphone and headphone and, for gestural input, used a touch screen displaying alternative views of the robot{\textquoteright}s environment to indicate locations and objects.},
keywords = {dynamic autonomy, gestural inputs, gesture recognition, headphone, human computer interaction, human-robot interaction, intelligent communication, intelligent robots, man-machine systems, mobile robots, multimodal interface, natural language interfaces, object recognition, speech inputs, speech recognition, touch screen, wireless microphone, wizard-of-Oz pilot study},
doi = {10.1109/ICSMC.2003.1244386},
author = {Dennis Perzanowski and Derek P Brock and William Adams and Magdalena D Bugajska and A C Schultz and J G Trafton and Samuel Blisard and Marjorie Skubic}
}