@article {9998,
title = {Exploring real world points of interest: Design and evaluation of object-centric exploration techniques for augmented reality},
journal = {Pervasive and Mobile Computing},
year = {In Press},
abstract = {Augmented reality (AR) enables users to retrieve additional information about real world objects and locations. Exploring such location-based information in AR requires physical movement to different viewpoints, which may be tiring and even infeasible when viewpoints are out of reach. In this paper, we present object-centric exploration techniques for handheld AR that allow users to access information freely using a virtual copy metaphor. We focus on the design of techniques that allow the exploration of large real world objects. We evaluated our interfaces in a series of studies in controlled conditions and compared them to a 3D map interface, which is a more common method for accessing location-based information. Based on our findings, we put forward design recommendations that should be considered by future generations of location-based AR browsers, 3D tourist guides or situated urban planning.},
doi = {10.1016/j.pmcj.2014.08.010},
author = {Tatzgern, Markus and Raphael Grasset and Eduardo Veas and Denis Kalkofen and Hartmut Seichter and Dieter Schmalstieg}
}
@article {9998,
title = {The Utility of Magic Lens Interfaces on Handheld Devices for Touristic Map Navigation},
year = {In Press},
abstract = {This paper investigates the utility of the Magic Lens metaphor on small screen handheld devices for map navigation given state of the art computer vision tracking. We investigate both performance and user experience aspects. In contrast to previous studies a semi-controlled field experiment (n=18) in a ski resort indicated significant longer task completion times for a Magic Lens compared to a Static Peephole interface in an information browsing task. A follow-up controlled laboratory study (n=21) investigated the impact of the workspace size on the performance and usability of both interfaces. We show that for small workspaces Static Peephole outperforms Magic Lens. As workspace size increases performance gets equivalent and subjective measurements indicate less demand and better usability for Magic Lens. Finally, we discuss the relevance of our findings for the application of Magic Lens interfaces for map interaction in touristic contexts.},
doi = {10.1016/j.pmcj.2014.08.005},
author = {Grubert, Jens and Raphael Grasset and Pahud, Michel and Dieter Schmalstieg and Hartmut Seichter}
}
@article {321,
title = {Augmented Reality (AR) Capture \& Play},
number = {US2014/037932},
year = {2014},
month = {05/2014},
publisher = {Qualcomm Inc, San Diego},
address = {United States of America},
issn = {20140344762A1},
author = {Raphael Grasset and Hartmut Seichter}
}
@conference {9998,
title = {Towards User Perspective Augmented Reality for Public Displays},
booktitle = {International Symposium on Mixed and Augmented Reality 2014 },
year = {2014},
month = {09/2014},
publisher = {IEEE},
organization = {IEEE},
address = {Munich, Germany},
abstract = {We work towards ad-hoc augmentation of public displays on handheld devices, supporting user perspective rendering of display content. Our prototype system only requires access to a screencast of the public display, which can be easily provided through common streaming platforms and is otherwise self-contained. Hence, it easily scales to multiple users.},
doi = {10.1109/ISMAR.2014.6948443},
author = {Grubert, Jens and Hartmut Seichter and Dieter Schmalstieg}
}
@conference {2013,
title = {Designing Mobile Augmented Reality},
booktitle = {Proceedings of the 15th International Conference on Human-computer Interaction with Mobile Devices and Services},
year = {2013},
publisher = {ACM},
organization = {ACM},
address = {New York, NY, USA},
keywords = {adaptive user interfaces, Augmented Reality, design, mobile},
isbn = {978-1-4503-2273-7},
doi = {10.1145/2493190.2499472},
url = {http://doi.acm.org/10.1145/2493190.2499472},
author = {Hartmut Seichter and Grubert, Jens and Tobias Langlotz}
}
@conference {2013,
title = {Exploring Distant Objects with Augmented Reality},
booktitle = {Joint Virtual Reality Conference of EGVE - EuroVR},
year = {2013},
month = {12/2013},
abstract = {Augmented reality (AR) enables users to retrieve additional information about the real world objects and locations. Exploring such location-based information in AR requires physical movement to different viewpoints, which may be tiring and even infeasible when viewpoints are out of reach. In this paper, we present object-centric exploration techniques for handheld AR that allow users to access information freely using a virtual copy metaphor to explore large real world objects. We evaluated our interfaces in controlled conditions and collected first experiences in a real world pilot study. Based on our findings, we put forward design recommendations that should be considered by future generations of location-based AR browsers, 3D tourist guides, or in situated urban planning.
},
author = {Tatzgern, Markus and Raphael Grasset and Eduardo Veas and Denis Kalkofen and Hartmut Seichter and Dieter Schmalstieg}
}
@conference {262,
title = {360{\textdegree} Panoramic Overviews for Location-Based Services},
booktitle = {CHI 2012},
year = {2012},
publisher = {ACM},
organization = {ACM},
address = {Austin, Texas, USA},
abstract = {We investigate 360{\textdegree} panoramas as overviews to supportusers in the task of locating objects in the surrounding environment. Panoramas are typically visualized as rectangular photographs, but this does not provide clear cues for physical directions in the environment. In this paper, we conducted a series of studies with three different shapes: Frontal, Top-Down and Bird{\textquoteright}s Eye; the last two shapes are chosen be-cause they provide a clearer representation of the spatial mapping between panorama and environment. Our resultsshow that good readability of the panorama is most impor-tant and that a clear representation of the spatial mapping plays a secondary role. This paper is the first to provideunderstanding on how users exploit 360{\textdegree} panoramic overviews to locate objects in the surrounding environment andhow different design factors can affect user performance.},
author = {Mulloni, Alessandro and Hartmut Seichter and Andreas D{\"u}nser and Patrick Baudisch and Dieter Schmalstieg}
}
@conference {9998,
title = {Experiences with the Impact of Tracking Technology in Mobile Augmented Reality Evaluations},
booktitle = {MobiVis Workshop at MobileHCI 2012},
year = {2012},
month = {09/2012},
publisher = {ACM},
organization = {ACM},
address = {San Francisco, CA},
abstract = {In this paper, we discuss the impact of tracking technologyon user studies of mobile augmented reality applications.We present findings from several of our previous publications in the field, discussing how tracking technology canimpact, influence and compromise experimental results.Lessons learned from our experience show that suitabletracking technology is a key requirement and a fundamentalfactor in the user experience of the application. Trackingtechnology should therefore be considered not only duringimplementation but also as a factor in the design andevaluation phases.
},
author = {Mulloni, Alessandro and Grubert, Jens and Hartmut Seichter and Tobias Langlotz and Raphael Grasset and Gerhard Reitmayr and Dieter Schmalstieg}
}
@conference {9998,
title = {A GPGPU Accelerated Descriptor for Mobile Devices},
booktitle = {ISMAR 2012},
year = {2012},
month = {11/2012},
publisher = {IEEE},
organization = {IEEE},
address = {Atlanta, Georgia, U.S.A.},
abstract = {We present a modified upright SURF feature descriptor for mobile phone GPUs. Our implementation called uSURF-ES is multiple times faster than a comparable CPU variant on the same device. Our results proof the feasibility of modern mobile graphics accelerators for GPGPU tasks especially for the detection phase in natural feature tracking used in Augmented Reality applications.},
keywords = {Augmented Reality, GPGPU, OpenGL ES, SURF},
isbn = {978-1-4673-4661-0},
doi = {http://dx.doi.org/10.1109/ISMAR.2012.6402578},
author = {Robert Hofmann and Hartmut Seichter and Gerhard Reitmayr}
}
@conference {264,
title = {Indoor Navigation with Mixed Reality World-in-Miniature Views and Sparse Localization on Mobile Devices},
booktitle = {AVI 2012},
year = {2012},
month = {05/2012},
abstract = {We present the design of an interface that provides continuous navigational support for indoor scenarios where localization is only available at sparse, discrete locations (info points). Our inter- face combines turn-by-turn instructions with a World-in- Miniature (WIM). In a previous study, we showed that using an Augmented Reality WIM at info points, and turn-by-turn instructions elsewhere, is a valid support for navigation inside an unknown building. In particular, we highlighted that users value the WIM as a tool for monitoring their location in the building. In this work, we focus on using the WIM continuously, not only at info points, to support navigation. We adapt the WIM views to the quality of localization by transitioning within Mixed Reality: we use Augmented Reality to provide an overview of the whole path at info points and Virtual Reality to communicate the next instruction when localization is not available. Our results from a new user study validate our interface design and show that users exploit not only turn-by-turn instructions but also the WIM throughout the path, to navigate with our interface. This paper provides insight on how a low-infrastructure indoor solution can support human navigational abilities effectively.},
author = {Mulloni, Alessandro and Hartmut Seichter and Dieter Schmalstieg}
}
@conference {9998,
title = {OmniKinect: Real-Time Dense Volumetric Data Acquisition and Applications},
booktitle = {VRST},
year = {2012},
month = {12/2012},
publisher = {ACM},
organization = {ACM},
address = {Vancover, Canada},
abstract = {Real-time three-dimensional acquisition of real-world scenes has many important applications in computer graphics, com- puter vision and human-computer interaction. Inexpensive depth sensors such as the Microsoft Kinect allow to lever- age the development of such applications. However, this technology is still relatively recent, and no detailed studies on its scalability to dense and view-independent acquisition have been reported. This paper addresses the question of what can be done with a larger number of Kinects used simultaneously. We describe an interference-reducing phys- ical setup, a calibration procedure and an extension to the KinectFusion algorithm, which allows to produce high qual- ity volumetric reconstructions from multiple Kinects whilst overcoming systematic errors in the depth measurements. We also report on enhancing image based visual hull ren- dering by depth measurements, and compare the results to KinectFusion. Our system provides practical insight into achievable spatial and radial range and into bandwidth re- quirements for depth data acquisition. Finally, we present a number of practical applications of our system.},
author = {Bernhard Kainz and Stefan Hauswiesner and Gerhard Reitmayr and Markus Steinberger and Raphael Grasset and Lukas Gruber and Eduardo Veas and Denis Kalkofen and Hartmut Seichter and Dieter Schmalstieg}
}
@conference {257,
title = {Augmented Visualization with Natural Feature Tracking},
booktitle = {10th ACM International Conference on Mobile and Ubiquitous Multimedia},
year = {2011},
month = {12/2011},
publisher = {ACM},
organization = {ACM},
address = {Beijing, China},
abstract = {Visualization systems often make use of large monitors or projection screens to display complex information. Even very sophisticated systems that exhibit complex user interfaces do usually not exploit advanced input and output devices. The interaction is typically limited to computer mouse and keyboard. One of the reasons for the lack of interaction devices is the high cost of special hardware.},
isbn = {978-1-4503-1096-3},
doi = {http://dx.doi.org/10.1145/2107596.2107597},
author = {S{\"o}r{\"o}s, Gabor and Rautek, Peter and Hartmut Seichter and Eduard Gr{\"o}ller}
}
@conference {269,
title = {Enhancing Handheld Navigation Systems with Augmented Reality},
booktitle = {Mobile HCI 2011, Workshop on Mobile Augmented Reality},
year = {2011},
abstract = {We investigate the role of augmented reality (AR) as a new kind of handheld interface to enhance navigation. We integrate AR with other more common interfaces into a handheld navigation system, and we conduct an exploratory study to see where and how people exploit the AR interface. Based on previous work on augmented photographs, we hypothesize that AR is more useful as a support for wayfinding at static locations just before road intersections. In partial contradiction with our hypotheses, our results show that AR is used mostly while walking, usually shortly before and after road intersections. Our results help drawing considerations informing both the design of AR interfaces and the development of tracking technologies.},
author = {Mulloni, Alessandro and Hartmut Seichter and Dieter Schmalstieg}
}
@conference {111,
title = {Handheld Augmented Reality Indoor Navigation with Activity-Based Instructions},
booktitle = {Mobile HCI},
year = {2011},
month = {08/2011},
abstract = {We present a novel design of an augmented reality interface to support indoor navigation. We combine activity-based instructions with sparse 3D localisation at selected info points in the building. Based on localisation accuracy and the users{\textquoteright} activities, such as walking or standing still, the interface adapts the visualisation by changing the density and quality of information shown. We refine and validate our design through user involvement in pilot studies. We finally present the results of a comparative study conducted to validate the effectiveness of our design and to explore how the presence of info points affects users{\textquoteright} performance on indoor navigation tasks. The results of this study validate our design and show an improvement in task performance when info points are present, which act as confirmation points and provide an overview of the task.},
author = {Mulloni, Alessandro and Hartmut Seichter and Dieter Schmalstieg}
}
@conference {251,
title = {User Experiences with Augmented Reality Aided Navigation on Phones},
booktitle = {ISMAR 2011},
year = {2011},
month = {10/2011},
abstract = {We investigate user experiences when using augmented reality (AR) as a new aid to navigation. We integrate AR with other more common interfaces into a handheld navigation system, and we conduct an exploratory study to see where and how people exploit AR. Based on previous work on augmented photographs, we hypothesize that AR is used more to support wayfinding at static locations when users approach a road intersection. In partial contrast to this hypothesis, our results from a user evaluation hint that users will expect to use the system while walking. Further, our results also show that AR is usually exploited shortly before and after road intersections, suggesting that tracking support will be mostly needed in proximity of road intersections.},
author = {Mulloni, Alessandro and Hartmut Seichter and Dieter Schmalstieg}
}
@article {10.1109/ISUVR.2010.19,
title = {Evaluation of Tangible User Interfaces for Desktop AR},
journal = {International Symposium on Ubiquitous Virtual Reality},
year = {2010},
pages = {36-39},
publisher = {IEEE Computer Society},
address = {Los Alamitos, CA, USA},
abstract = {In this work we evaluated the usability of tangible user interaction for traditional desktop augmented reality environments. More specifically, we compared physical sliders and tracked paddles, and traditional mouse input for a system control task. While task accuracy was the same for all interfaces, mouse input performed the fastest and input with a tracked paddle the slowest. Performance with the physical sliders fell between those two. We present these results along with various findings from user comments, and discuss how they may influence the design of future desktop AR systems.},
isbn = {978-0-7695-4124-2},
doi = {http://doi.ieeecomputersociety.org/10.1109/ISUVR.2010.19},
author = {Andreas D{\"u}nser and Julian Looser and Raphael Grasset and Hartmut Seichter and Mark Billinghurst}
}
@conference {ISMAR2009a,
title = {Multitouch Interaction for Tangible User Interfaces},
booktitle = {ISMAR 2009},
year = {2009},
month = {10/2009},
publisher = {IEEE},
organization = {IEEE},
address = {Orlando, Florida},
abstract = {We introduce a novel touch-based interaction technique for Tangible User Interfaces (TUIs) in Augmented Reality (AR) applications. The technique allows for direct access and manipulation of virtual content on a registered tracking target, is robust and lightweight, and can be applied in numerous tracking and interaction scenarios.},
doi = {http://dx.doi.org/10.1109/ISMAR.2009.5336455},
author = {Hartmut Seichter and Raphael Grasset and Julian Looser and Mark Billinghurst}
}
@conference {2009,
title = {PAD-based multimodal affective fusion},
booktitle = {Affective Computing and Intelligent Interaction and Workshops, 2009. ACII 2009. 3rd International Conference on},
year = {2009},
abstract = {The study of multimodality is comparatively less developed for Affective interfaces than for their traditional counterparts. However, one condition for the successful development of Affective interface technologies is the development of frameworks for the real-time multimodal fusion. In this paper, we describe an approach to multimodal affective fusion, which relies on a dimensional model, Pleasure-Arousal-Dominance (PAD) to support the fusion of affective modalities, each input modality being represented as a PAD vector. We describe how this model supports both affective content fusion and temporal fusion within a unified approach. We report results from early user studies which confirm the existence of a correlation between measured affective input and user temperament scores.},
keywords = {affective content fusion, affective interface technology, affective modalities, Appraisal, Atherosclerosis, Digital art, dimensional model, Displays, emotion recognition, Humans, image fusion, Merging, multimodal affective fusion, multimodality, pleasure-arousal-dominance, real-time multimodal fusion, Robustness, Speech, temporal fusion, User interfaces, user temperament scores},
doi = {10.1109/ACII.2009.5349552},
author = {Stephen W. Gilroy and Marc Cavazza and Markus Niranen and Elisabeth Andr{\'e} and Thurid Vogt and Jerome Urbain and Maurice Benayoun and Hartmut Seichter and Mark Billinghurst}
}
@conference {350,
title = {A Quadratic Deformation Model for Facial Expression Recognition},
booktitle = {DICTA 2009},
year = {2009},
month = {12/2009},
address = {Melbourne, Australia},
abstract = {In this paper we propose a novel approach for recognizing facial expressions based on using an Active Appearance Model facial feature tracking system with the quadratic deformation model representations of facial expressions. Thirty seven Facial Feature points are tracked based on the MPEG-4 Facial Animation Parameters layout. The proposed approach relies on the Euclidian distance measures between the tracked feature points and the reference deformed facial feature points of the six main expressions (smile, sad, fear, disgust, surprise, and anger). An evaluation of 30 model subjects, selected randomly from the Cohn-Kanade Database, was carried out. Results show that the main six facial expressions can successfully be recognized with an overall recognition accuracy of 89\%. The proposed approach yields to promising recognition rates and can be used in real time applications.},
doi = {10.1109/DICTA.2009.51},
author = {Mohammad Obaid and R Mukundan and Roland Goecke and Mark Billinghurst and Hartmut Seichter}
}
@inbook {328,
title = {Tangible interfaces for ambient augmented reality applications},
booktitle = {Human-Centric Interfaces for Ambient Intelligence},
year = {2009},
pages = {281-298},
chapter = {11},
author = {Mark Billinghurst and Raphael Grasset and Hartmut Seichter}
}
@inbook {357,
title = {Towards Ambient Augmented Reality with Tangible Interfaces},
booktitle = {Human-Computer Interaction. Ambient, Ubiquitous and Intelligent Interaction},
series = {Lecture Notes in Computer Science},
volume = {5612/2009},
year = {2009},
publisher = {Springer Berlin},
organization = {Springer Berlin},
address = {Heidelberg},
abstract = {Ambient Interface research has the goal of embedding technology that disappears into the user{\textquoteright}s surroundings. In many ways Augmented Reality (AR) technology is complimentary to this in that AR interfaces seamlessly enhances the real environment with virtual information overlay. The two merge together in context aware Ambient AR applications, which allow users to easily perceive and interact with Ambient Interfaces by using AR overlay of the real world. In this paper we describe how Tangible Interaction techniques can be used for Ambient AR applications. We will present a conceptual framework for Ambient Tangible AR Interface, a new generation of software and hardware tools for development and methods for evaluating Ambient Tangible AR Interfaces.},
isbn = {978-3-642-02579-2},
doi = {10.1007/978-3-642-02580-8},
author = {Mark Billinghurst and Raphael Grasset and Hartmut Seichter and Andreas D{\"u}nser}
}
@conference {Buchanan_2008a,
title = {Augmented Reality and Rigid Body Simulation for Edutainment},
booktitle = {International Conference on Advances in Computer Entertainment Technology},
year = {2008},
month = {3/12/2008},
publisher = {ACM},
organization = {ACM},
address = {Yokohama, Japan},
abstract = {Physics simulation is becoming more common in computing. We have developed a comprehensive toolkit to connect the physical and virtual world within Augmented Reality (AR)using rigid body simulation. Unlike existing techniques of embedding physics simulations into 3D environments, the use of rigid body simulations within AR requires a di{\"{\i}{\textlnot}{\texteuro}erent approach. To demonstrate our approach we developed an edutainment game based on the concept of chain reactions and physical contraptions. In this paper we elaborate on the constraints introduced by mixing AR and rigid body simulation, and how it subsequently e{\"{\i}{\textlnot}{\texteuro}ects the visual richness and perceptual appearance of an AR simulation. We describe our implementation approach and provide an analysis of additional scenarios which would be enriched by physical simulation.},
doi = {http://dx.doi.org/10.1145/1501750.1501754},
author = {Philip Buchanan and Hartmut Seichter and Mark Billinghurst and Raphael Grasset}
}
@conference {326,
title = {"Chinese Whispers": mediating oscillations between physical form and digital space},
booktitle = {International Conference on Computer Graphics and Interactive Techniques},
year = {2008},
month = {12/2008},
publisher = {ACM Press},
organization = {ACM Press},
address = {Singapore},
abstract = {In this paper we investigate the use of next generation remote collaboration tools for supporting design education. "Chinese Whispers" [ref] refers to the concept of mediating between remote studios with new forms of hybrid designing and real time online collaboration. We conducted a sequence of experiments which explore the concept of linkage and slippage, occurring at the boundaries of converging technologies as a means of generating innovative and unexpected design outcomes {\textendash} real and virtual, tangible and intangible. The project is framed in a contemporary context with background research into current concepts and theories centered on learning ecology, and user-generated design into future trends and state of the art technologies. Chinese Whisper involved linking hard- and soft-wares that are not immediately compatible in a remote networked environment, to facilitate an educational design process in both remote and real environments. Through this process students engaged in 3D scanning, downloading, visualizing, analyzing, remote simultaneous modeling in stereo and deciding when to hit {\textquoteright}3D print{\textquoteright} at any given stage to invent a new design methodology.},
isbn = {978-1-60558-388-4},
doi = {http://doi.acm.org/10.1145/1507713.1507733},
author = {Fraser, Simon and Lions, Marcia and Julian Looser and Hartmut Seichter}
}
@inbook {302,
title = {Communication in Augmented Reality Aided Architectural Design},
booktitle = {Mixed Reality in Architecture, Design and Construction},
year = {2008},
publisher = {Springer},
organization = {Springer},
isbn = {978-1-4020-9087-5},
author = {Hartmut Seichter and Wang, Xiangyu and Schnabel, Marc Aurel}
}
@conference {ComposAR2008,
title = {ComposAR: An Intuitive Tool for Authoring AR Applications},
booktitle = {International Symposium of Mixed and Augmented Reality (ISMAR 2008)},
year = {2008},
month = {15/09/2008},
pages = {177-178},
publisher = {IEEE},
organization = {IEEE},
address = {Cambridge, UK},
abstract = {This paper introduces ComposAR, a tool to allow a wide audience to author AR and MR applications. It is unique in that it supports both visual programming and interpretive scripting, and an immediate mode for runtime testing. ComposAR is written in Python which means the user interface and runtime behavior can be easily customized and third-party modules can be incorporated into the authoring environment. We describe the design philosophy and the resulting user interface,lessons learned and directions for future research.},
author = {Hartmut Seichter and Julian Looser and Mark Billinghurst}
}
@conference {308,
title = {E-tree: emotionally driven augmented reality art},
booktitle = {Proceeding of the 16th ACM international conference on Multimedia},
year = {2008},
pages = {945-948},
publisher = {ACM},
organization = {ACM},
address = {Vancouver, British Columbia, Canada},
abstract = {In this paper, we describe an Augmented Reality Art installation, which reacts to user behaviour using Multimodal analysis of affective signals. The installation features a virtual tree, whose growth is influenced by the perceived emotional response from spectators. The system implements a {\textquoteright}magic mirror{\textquoteright} paradigm (using a large-screen display or projection system) and is based on the ARToolkit with extended representations for scene graphs. The system relies on a PAD dimensional model of affect to support the fusion of different affective modalities, while also supporting the representation of affective responses that relate to aesthetic impressions. The influence of affective input on the visual component is achieved by mapping affective data to an L-System governing virtual tree behaviour. We have performed an early evaluation of the system, both from the technical perspective and in terms of user experience. Post-hoc questionnaires were generally consistent with data from multimodal affective processing, and users rated the overall experience as positive and enjoyable, regardless of how proactive they were in their interaction with the installation.},
isbn = {978-1-60558-303-7},
doi = {http://doi.acm.org/10.1145/1459359.1459529},
author = {Stephen W. Gilroy and Marc Cavazza and Remi Chaignon and Satu-Marja M{\"a}kel{\"a} and Markus Niranen and Elisabeth Andr{\'e} and Thurid Vogt and Jerome Urbain and Mark Billinghurst and Hartmut Seichter and Maurice Benayoun}
}
@conference {300,
title = {The Interesting Mechanism - A Rube Goldberg Machine in AR},
booktitle = {International Symposium of Mixed and Augmented Reality (ISMAR 2008)},
year = {2008},
month = {15/09/2008},
publisher = {IEEE},
organization = {IEEE},
address = {Cambridge, UK},
author = {Philip Buchanan and Hartmut Seichter and Mark Billinghurst}
}
@conference {Hong2008,
title = {A Sensor-based Interaction for Ubiquitous Virtual Reality Systems},
booktitle = {ISUVR 2008},
year = {2008},
pages = {75-78},
publisher = {IEEE},
organization = {IEEE},
address = {Korea},
abstract = {In this paper, we propose a sensor-based interaction for ubiquitous virtual reality (U-VR) systems that users are able to interact implicitly or explicitly with through a sensor. Due to the advances in sensor technology, we can utilize sensory data as a means of user interactions. To show the feasibility of the proposed method, we extend the ComposAR Augmented Reality (AR) authoring tool to add support for sensor-based interaction. In this way the user can write simple scripts to rapidly prototype interaction with virtual 3D contents through a sensor. We believe that the proposed method provides natural user interactions for U-VR systems.},
keywords = {AR, Authoring, Sensor-based Interaction, U-VR},
isbn = {978-0-7695-3259-2},
doi = {http://dx.doi.org/10.1109/ISUVR.2008.19},
author = {Dongpyo Hong and Julian Looser and Hartmut Seichter and Mark Billinghurst and Woontak Woo}
}
@conference {283,
title = {Touching the Untouchables: Virtual-, Augmented- and Reality},
booktitle = {CAADRIA 2008},
series = {CAADRIA},
year = {2008},
month = {9/4/2009},
publisher = {Department of Architecture, University of Chiang Mai, Thailand},
organization = {Department of Architecture, University of Chiang Mai, Thailand},
address = {Chiang Mai, Thailand},
abstract = {This paper brings forth an overview of design and interaction within realms stretching from reality to virtuality. In recent years, architects have been exploring creative technologies and potentials using a variety of interfaces ranging from real and virtual to augmented reality (AR) and mixed reality (MR) media. The process of design and method of communication are becoming increasingly imperative for inter-disciplinary work. This highlights the need for a structured review of successful effective adoptions and settings of those realms and technologies. This paper critically reflects on lessons learned from architectural design research, which employ virtual, augmented, mixed and real environments, and address rising issues in these areas.},
author = {Schnabel, Marc Aurel and Wang, Xiangyu and Hartmut Seichter and Thomas Kvan}
}
@conference {51,
title = {Applying HCI Principles in AR Systems Design},
booktitle = {2nd International Workshop on Mixed Reality User Interfaces: Specification, Authoring, Adaptation (MRUI 2007)},
year = {2007},
month = {11/3/2007},
author = {Andreas D{\"u}nser and Raphael Grasset and Hartmut Seichter and Mark Billinghurst}
}
@mastersthesis {4,
title = {Augmented Reality Aided Design},
volume = {PhD},
year = {2007},
pages = {187},
school = {The University of Hong Kong},
type = {PhD Thesis},
address = {Hong Kong},
abstract = {For generations the working patterns of architects changed progressively towards more abstraction. Augmented Reality (AR) technology provides the prospectus to reintegrate craft and design enhanced with the advantages of simulated representation. Borders between information architecture and real design dissolve. Nevertheless, flexible and low disturbance interfaces for the handling of immersive cooperative design work are missing. The main question for this research was the relationship of the design interface and immediate communication between involved parties. Do properties of the tools define what an object affords for in the design embedded communication? If there are effects it would indicate that design interfaces have properties that either engage or discourage to discuss a problem. In order to investigate these issues a laboratory experiment was created, which built upon methodology adapted for the use in the design process. It combines qualitative and quantitative measures in order to explain phenomena observed in the experiment. It has been found that the overall perceived communication is not affected by the change in design creation interface. However, an in-depth analysis of the actual working pattern unveiled that the design interface has an impact on the frequency of communication exchanges. To date the impact of design interfaces for AR has not been assessed. This study provides an insight into the inner workings of the design process in regard to communication with design in a AR mediated setting.},
doi = {http://dx.doi.org/10.5353/th_b3896659},
url = {http://library.hku.hk/record=b3828905},
author = {Hartmut Seichter}
}
@conference {45,
title = {Augmented Reality and Tangible User Interfaces in Collaborative Urban Design},
booktitle = {CAAD futures},
year = {2007},
month = {11/07/2007},
pages = {3-16},
publisher = {Springer},
organization = {Springer},
address = {Sydney, Australia},
isbn = {978-1-4020-6527-9},
author = {Hartmut Seichter}
}
@conference {58,
title = {An Emotionally Responsive AR Art Installation},
booktitle = {International Symposium of Mixed and Augmented Reality (ISMAR 2007)},
series = {ISMAR},
year = {2007},
month = {13/11/2007},
publisher = {ACM Press},
organization = {ACM Press},
address = {Nara, Japan},
abstract = {In this paper, we describe a novel method of combining emotional input and an Augmented Reality (AR) tracking/display system to produce dynamic interactive art that responds to the perceived emotional content of viewer reactions and interactions. As part of the CALLAS project, our aim is to explore multimodal interaction in an Arts and Entertainment context. The approach we describe has been implemented as part of a prototype {\^a}{\texteuro}{\o}e}showcase{\^a}{\texteuro} in collaboration with a digital artist designed to demonstrate how affective input from the audience of an interactive art installation can be used to enhance and enrich the aesthetic experience of the artistic work. We propose an affective model for combining emotionally-loaded participant input with aesthetic interpretations of interaction, together with a mapping which controls properties of dynamically generated digital art.},
author = {Stephen W. Gilroy and Marc Cavazza and Remi Chaignon and Satu-Marja M{\"a}kel{\"a} and Markus Niranen and Elisabeth Andr{\'e} and Thurid Vogt and Mark Billinghurst and Hartmut Seichter and Maurice Benayoun}
}
@conference {33,
title = {Evaluation of spatial abilities through tabletop AR},
booktitle = {7th ACM SIGCHI New Zealand Chapter{\textquoteright}s international Conference on Computer-Human interaction: Design Centered HCI},
year = {2007},
month = {02/07/2007},
pages = {17-24},
publisher = {ACM Press, New York},
organization = {ACM Press, New York},
address = {Hamilton, New Zealand},
abstract = {Research has been done into improving the means by which we organise and manage information. The usefulness of 2D versus 3D interfaces and environments has also been debated and evaluated. Human spatial abilities can be used to store more information about particular objects including their position in space. Our hypothesis states that as 3D objects contain more information about themselves and their relative position in space than 2D objects, although users take longer to process this information, they should be more accurate when searching and retrieving 3D objects. The evaluation study conducted compared spatial abilities between a 2D version of a memory game and an Augmented Reality (AR) version. Results showed that participants took significantly longer to complete the AR 3D version of the game than the 2D version, but did so with significantly fewer attempts i.e. they were more accurate. These results are specifically relevant for the design and development process of interfaces for AR applications.},
doi = {http://doi.acm.org/10.1145/1278960.1278963},
author = {Moffat Mathews and Madan Challa and Cheng-Tse Chu and Gu Jian and Hartmut Seichter and Raphael Grasset}
}
@conference {50,
title = {From Virtuality to Reality and Back},
booktitle = {Conference on International Association of Societies of Design Research 2007 (IASDR)},
year = {2007},
month = {12/11/2007},
address = {Hong Kong},
abstract = {There has been a growing research interest in investigating techniques to combine real and virtual spaces. A variety of {\^a}{\texteuro}{\o}e}reality{\^a}{\texteuro} concepts such as Virtual Reality and Augmented Reality and their supporting technologies have emerged in the field of design to adopt the task of replacing or merging our physical world with the virtual world. The different realities can be tailored to enhance comprehension for specific design activities along a design life-cycle. This paper presents state- of-the-art applications of these {\^a}{\texteuro}{\o}e}reality{\^a}{\texteuro} concepts in design and related areas, and proposes a classification of these realities to address suitability issues for the effective utilization of the concepts and technologies. Their potentials and implications in certain design activities are also discussed.},
author = {Schnabel, Marc Aurel and Wang, Xiangyu and Hartmut Seichter and Thomas Kvan}
}
@conference {55,
title = {The Mixed Reality Book: A New Multimedia Reading Experience},
booktitle = {CHI{\textquoteright}2007 Interactivity},
year = {2007},
author = {Raphael Grasset and Andreas D{\"u}nser and Hartmut Seichter and Mark Billinghurst}
}
@conference {Hampshire2006a,
title = {Augmented Reality Authoring: Generic Context from Programmer to Designer},
booktitle = {OZCHI{\textquoteright}06},
year = {2006},
address = {Sydney, Australia},
abstract = {Developing an Augmented Reality (AR) application is usually a long and non-intuitive task. Few methodologies address this problem and tools implementing these are limited or non-existent. To date there is no efficient and easy development tool tailored to the needs of Mixed Reality (MR). We are presenting an initial taxonomy of MR applications, addressing the different levels of abstraction for defining the relation between real and virtual world. We then demonstrate some development approaches and describe tools and libraries that we implemented in order to illustrate aspects of our authoring taxonomy. Finally, we provide a definition addressing the requirements for new generation of AR rapid application development (RAD) tools based on actual implementations.},
doi = {http://doi.acm.org/10.1145/1228175.1228259},
author = {Alastair Hampshire and Hartmut Seichter and Raphael Grasset and Mark Billinghurst}
}
@conference {Looser2006a,
title = {OSGART - A Pragmatic Approach to MR},
booktitle = {International Symposium of Mixed and Augmented Reality (ISMAR 2006)},
series = {ISMAR},
year = {2006},
address = {Santa Barbara, CA, USA},
abstract = {We are presenting a software development framework called OSGART for Rapid Application Developement (RAD) in the domain of Mixed Reality (MR). This toolkit is being developed as an extension to OpenSceneGraph [3]. It implements a hierarchical, scenegraph based approach to marker based AR using the AR Toolkit [4]. Due to its{\^a}{\texteuro}{\texttrademark} tight integration with OpenSceneGraph it provides a valuable test environment for bi-directional transition between immersive Virtual Environments (IVE) and Augmented Reality (AR).},
author = {Julian Looser and Raphael Grasset and Hartmut Seichter and Mark Billinghurst}
}
@proceedings {58,
title = {Assessing Virtual Tangibility - Usability Evaluation Methods for Augmented Reality Urban Design},
journal = {CAAD futures},
year = {2005},
pages = {151-159},
address = {Vienna, Austria},
abstract = {Design technology simulates a variety of senses but on the other hand restricts them to audio and visual responses. What happens if technology can accommodate more senses in the creation process and how does it affect the way we approach design? This paper investigates the implication of tangible interfaces in design computing. The focal point is to assess the factors of perception and cooperative working by employing an Augmented Reality (AR) setup with tangible interfaces in a design studio. A concept of usability evaluation is discussed with the focus on core theories and resulting methodology.},
author = {Hartmut Seichter}
}
@booklet {59,
title = {Augmented Reality and Tangible User Interfaces for Urban Design},
journal = {Young VR 2005},
year = {2005},
url = {http://www.postech.ac.kr/cse/vr/yvr2005/},
author = {Hartmut Seichter}
}
@conference {60,
title = {Digital and Tangible Sensation: An Augmented Reality Urban Design Studio},
booktitle = {The Tenth Conference on Computer-Aided Architectural Design Research in Asia (CAADRIA 2005)},
year = {2005},
month = {04/2005},
pages = {191-202},
address = {New Dehli, India},
abstract = {Traditionally urban design is perceived, communicated and created using physical and digital media. However, these realms are handled as separate entities, which hinder collaboration, understanding and communication. Collaborative Augmented Reality (AR) systems can integrate these tasks into one media type and allow a different conversation with complex issues. Human Computer Interfaces and Tangible User Interfaces play key role in AR. They allow an engagement with both the real and virtual component of an urban design project. This paper describes an urban design studio that employs AR as medium of collaboration, the theoretical framework of sense of presence, the understanding and the quality of the resulting design.},
author = {Hartmut Seichter and Schnabel, Marc Aurel}
}
@conference {Seichter04a,
title = {Benchworks: Augmented Reality Urban Design},
booktitle = {Computer-Aided Architectural Design Research in Asia (CAADRIA 2004)},
year = {2004},
pages = {937-946},
publisher = {Yonsei University Press},
organization = {Yonsei University Press},
address = {Seoul, Korea},
abstract = {The problems of inspecting urban design proposals are different to that of architecture. The larger context is a crucial aspect in urban design. Generally the issues are not of detailed design but rather understanding space and spatial features. Discussions about proposals use plans and large urban design models. The models are cumbersome and access difficult for collaborative consultation. This paper introduces a prototype for an Augmented Reality system for analyzing and representational design in an urban design scale. The system is designed as a workbench for collaboratively and dynamically exploring in an urban design model.},
author = {Hartmut Seichter}
}
@conference {Seichter04b,
title = {Tangible Interfaces in Design Computing},
booktitle = {in Education and Research in Computer Aided Architectural Design in Europe (eCAADe 2004)},
year = {2004},
pages = {159-166},
address = {Copenhagen, Denmark},
abstract = {Sensorial richness is essential in the design process, yet digital design tools do not respond to this need. Tangible interfaces offer an opportunity for interaction with design computing systems to explore means of supporting a wider range of experiences. In this paper we look at implementations of tangible interfaces through a framework based on the concept of affordance. We conclude with a brief introduction to experimental tangible interfaces that have been developed in order to carry out collaborative user evaluations in a design studio setting and evaluate these in the same framework.},
author = {Hartmut Seichter and Thomas Kvan}
}
@article {63,
title = {Augmented Reality Aided Design Investigation},
journal = {IJAC, International Journal of Architectural Computing},
volume = {1},
year = {2003},
author = {Hartmut Seichter}
}
@conference {42,
title = {Collaborative Augmented Sketching},
booktitle = {2nd International IEEE AR Toolkit Workshop},
year = {2003},
month = {10/2003},
publisher = {IEEE},
organization = {IEEE},
address = {Tokyo, Japan},
abstract = {The aim of this paper is to demonstrate a software prototype using AR Toolkit (Billinghurst and Kato, 1999) for collaborative augmented reality sketching in architectural design. The author introduces a non-intrusive interaction technique developed for this prototype. Additionally, sketching and distribution mechanisms are discussed and illustrated. The prototype uses non-photo-realistic rendering and an adaptive tessellation mechanism in the geometry kernel to provide a visual cue for the conceptual stage of an architectural design.},
isbn = {0-7803-8240-4},
doi = {10.1109/ART.2003.1320425},
author = {Hartmut Seichter}
}
@conference {30,
title = {sketchand+ - a collaborative augmented reality sketching application},
booktitle = {CAADRIA 2003},
year = {2003},
pages = {209-219},
address = {Bangkok, Thailand},
abstract = {The sketch is the embodiment of the architectural discussion. It incorporates rapidness and fuzziness and as this it is an object of interpretation. The interesting thing there is the question, if the usage of VR/AR already in the early phases of a design can have an impact for the quality of a design-process. Examples like VRAM (Regenbrecht et al., 2000) or TAP (Seichter et al., 2000) showed that there is a huge potential for research. The sketch as one of the parts of an early design is tightly coupled with cognitive aspects and communication. Pictured by a sketch is just a snapshot of what have to be discussed in the ensuing design procedure. The intention behind this work is an exploration about a medium which is not yet adapted to the digital world (Myers et al., 2000) and it can be easily be described with the words of Harald Innis: Mankind constantly being caught in his own traps: language and systems, developed and most difficult to break down. (Innis, 1951)},
author = {Hartmut Seichter}
}
@article {66,
title = {TAP - the architectural playground 2},
journal = {IJAC, International Journal of Architectural Computing},
volume = {1},
year = {2003},
author = {Hartmut Seichter}
}
@conference {67,
title = {TAP - The Architectural Playground - C++ framework for scalable distributed collaborative architectural virtual environments},
booktitle = {20th Conference on Education in Computer Aided Architectural Design in Europe (eCAADe 2002)},
series = {eCAADe},
year = {2002},
month = {04/10/2002},
pages = {422-426},
address = {Warsaw, Poland},
abstract = {Architecture is built information (Schmitt, 1999). Architects have the task of restructuring and translating information into buildable designs. The beginning of the design process where the briefing is transformed into an idea is a crucial phase in the design process. It is where the architect makes decisions which influence the rest of the design development process (Vries et al., 1998). It is at this stage where most information is unstructured but has to be integrated into a broad context. This is where TAP is positioned {\textendash} to support the architect in finding solutions through the creation of spatially structured information sets without impairing thereby the creative development. We want to enrich the inspiration of an architect with a new kind of information design. A further aspect is workflow in a distributed process where the architect{\textquoteright}s work becomes one aspect of a decentralised working patterns. The software supports collaborative work with models, sketches and text messages within an uniform surface. The representations of the various media are connected and combined with each other and the user is free to combine them according to his or her needs.},
author = {Hartmut Seichter and Donath, Dirk and Petzold, Frank}
}
@conference {69,
title = {Augmented Reality Techniques for Design and Revitalisation in Existing Built Environments},
booktitle = {Association of Computer-Aided Design in Architecture (ACADIA 2001)},
year = {2001},
month = {11/10/2001},
address = {Buffalo, NY},
abstract = {Building activity in Germany is moving increasingly toward combined newbuild and renovation projects. Essential for effective computer-aided planning within an existing context is not only the use of on-site computer-aided measurement tools but also an integrative cooperation between the different disciplines involved via an information and communication system. Interdisciplinary cooperation needs to be tailored to the integrative aspects in renovation and revitalisation work. Economic factors determine the viability of an architectural project, and reliable costing information is vital. Existing IT-approaches to this problem are not yet sufficiently exploited. In ongoing research at our university (collaborative research center {\textquotedblright}Materials and Structure in the Revitalisation of Buildings{\textquotedblright}) methods and techniques of revitalisation are being investigated. A special branch of the collaborative research center is investigating possibilities of computer-aided building measurement and communication platforms for professional disciplines (www.uni-weimar.de/sfb). The aim is to develop a general approach to the revitalisation of buildings. This paper discusses possible application areas of AR/VR techniques in the revitalisation of buildings from the point of view of the user and are based on the real project {\textquotedblleft}Cooling factory Gera{\textquotedblright}. Based on the necessities of revitalisation projects, technical requirements are developed. The project is funded by the Deutsche Forschungsgesellschaft (DFG).},
isbn = {1-880250-10-1},
author = {Donath, Dirk and Petzold, Frank and Jakob Beetz and Hartmut Seichter and Klaus Grether}
}
@conference {68,
title = {Cooling Factory - a concrete project to test new architectural applications for Augmented Reality},
booktitle = {International Conference on Augmented, Virtual Environments and Three-Dimensional Imaging 2001},
series = {ICAV3D},
year = {2001},
month = {30/05/2001},
publisher = {Verlag Myconos},
organization = {Verlag Myconos},
address = {Myconos, Greece},
abstract = {This paper will discuss possible fields of applications of AR/VR for revitalization of buildings from a user{\textquoteright}s perspective. Considerations are based on the real-world project "Cooling factory Gera". The project is founded by "Deutsche Forschungsgesellschaft (DFG)".},
author = {Donath, Dirk and Jakob Beetz and Klaus Grether and Ernst Kruijff and Petzold, Frank and Hartmut Seichter}
}
@article {2001,
title = {VRAM/G {\textendash} gesture enabled VRAM},
year = {2001},
month = {06/2001},
institution = {Bauhaus Universit{\"a}t Weimar, Professur InfAr},
address = {Weimar, Germany},
abstract = {VRAM wurde 1998 durch Dr. Holger Regenbrecht als Applikation zum Testen und Modellieren von VRML-Welten ins Leben gerufen. Als Eckpunkte für die Entwicklung wurden Portabilität und leichte Bedienbarkeit auch durch Nicht-Informatiker gesetzt. Die Anbindung der Gestenerkennung soll schnelles Skizzieren im virtuellen Raum erlauben.},
author = {Hartmut Seichter}
}
@conference {70,
title = {VRAM - A Virtual Reality Aided Modeller},
booktitle = {in Education and Research in Computer Aided Architectural Design in Europe (eCAADe 2000)},
series = {eCAADe},
year = {2000},
month = {22/06/2000},
publisher = {Bauhaus University Press},
organization = {Bauhaus University Press},
address = {Weimar, Germany},
abstract = {This article describes VRAM, short for Virtual Reality Aided Modeler. VRAM is a conceptual design tool supported by Virtual Reality technology and an ongoing testbed for theory and methodology in the field of three dimensional user interfaces (3DUIs). The outcomes from the project should consist of an intuitive and comprehensive immersive surface modeler, next to a set of taxonomies and guidelines for the development of 3DUIs. Based on a modular structure, VRAMs program architecture allows the easy extension of functionality. The application consists of the seamless integration of four main functionality modules, namely system control, viewing \& browsing, editing and modeling. Based on the premise of portability, the software environment runs on both SGI Irix and MS Windows NT platforms. To be relatively independent in developing 3DUI techniques, the VRAM environment supports a multiplicity of input and output devices. Due to the focus on immersive modeling, tracking devices, head mounted displays and stereoprojection devices are the main I/O channels. In this article, we will primary focus on the functionality of the virtual modeller. },
keywords = {Digital Design, Interaction, Modeling, Virtual Reality},
author = {Donath, Dirk and Holger Regenbrecht and Ernst Kruijff and Hartmut Seichter and Jakob Beetz}
}