@inproceedings{Feuerstack2007, Author = {Sebastian Feuerstack and Marco Blumendorf and Sahin Albayrak}, Title = {Prototyping of Multimodal Interactions for Smart Environments based on Task Models}, Year = {2007}, Booktitle = {Constructing Ambient Intelligence: AmI 2007 Workshops Darmstadt}, type = {inproceedings}, note = {Smart environments offer interconnected sensors, devices, and appliances that can be considered for interaction to substantially extend the potentially available modality mix. This promises a more natural and situation aware human computer interaction. Te}, Abstract = {Smart environments offer interconnected sensors, devices, and appliances that can be considered for interaction to substantially extend the potentially available modality mix. This promises a more natural and situation aware human computer interaction. Technical challenges and differences in interaction principles for distinct modalities restrict multimodal systems to specialized systems supporting specific situations only. To overcome these limitations enabling an easier integration of new modalities supporting interaction in smart environments, we propose a task-based notation that can be interpreted at runtime. The notation supports evolutionary prototyping of new interaction styles for already existing interactive systems. We eliminate the gap between design- and runtime, since support for additional modalities can be prototyped at runtime to an already existing interactive system.} } @COMMENT{Bibtex file generated on }