@inbook{65515,
  abstract     = {{Abstract This study explores the usability and onboarding process of a Mixed Reality (MR) application called PEARL, designed to prepare students for laboratory work. Originally developed for mobile Augmented Reality (mAR), PEARL was adapted for MR to offer a more immersive and intuitive experience through hand and gesture controls. Since many students lack experience with MR devices, a user-friendly onboarding system is essential. The study aims to redesign PEARL’s user interface and onboarding experience, evaluating how intuitive interaction elements impact usability. First, a literature review will identify existing usability guidelines for MR applications, which will guide the redesign of the interface. This new version will then be tested with students through a user study. Feedback will be collected via an online survey to assess the onboarding and user experience, and the findings will be used to refine the design further. The expected outcome is an improved onboarding process and interface, making PEARL accessible even for MR novices, enhancing their ability to interact with 3D objects in a real-world setting. Ultimately, the study aims to provide best practices for developing intuitive MR interfaces and effective onboarding experiences, especially in educational contexts.}},
  author       = {{Alptekin, Mesut and Münstermann, Daniel and Temmen, Katrin}},
  booktitle    = {{Lecture Notes in Networks and Systems}},
  isbn         = {{9783032073181}},
  issn         = {{2367-3370}},
  keywords     = {{Meta Quest 3, Augmented Reality, Mixed Reality, PEARL, Electrical Engineering, Laboratory Training, Onboarding, User Experience, User Interface, Heuristics}},
  location     = {{Santiago, Chile}},
  pages        = {{199--211}},
  publisher    = {{Springer Nature Switzerland}},
  title        = {{{Designing and Evaluating the Usability and Onboarding for a Mixed-Reality Application: A Case Study with PEARL (Paderborn Electrical Engineering AR Laboratory) and Meta Quest 3}}},
  doi          = {{10.1007/978-3-032-07319-8_19}},
  volume       = {{2}},
  year         = {{2026}},
}

@inproceedings{38100,
  abstract     = {{Smart Cards are becoming a ubiquitous means for securing
a wide range of interactive applications. However in many
cases its use is limited for authentication purposes only. In
this paper we extend the use of smart cards for carrying
abstract user interface descriptions which can be rendered
on different and potentially remote clients that in addition
can support different interaction modalities. This adds on
the one hand to the security since a backend application
cannot be used without the UI description, and on the other
hand to the versatility of applications, since all devices with
a respective renderer for certain modalities can be utilized.}},
  author       = {{Schäfer, Robbie and Müller, Wolfgang and Marin-López, Andrés and Díaz-Sánchez, Daniel}},
  booktitle    = {{Proceedings of the 9th International Conference on Human Computer Interaction with Mobile Devices and Services (MobileHCI2007)}},
  keywords     = {{Smart Card, Abstract User Interface, Device Independence}},
  title        = {{{Device Independent User Interfaces for Smart Cards}}},
  year         = {{2007}},
}

@inproceedings{38543,
  abstract     = {{Today a large variety of mobile interaction devices such as PDAs and mobile phones enforce the development of a wide range of user interfaces for each platform. The complexity even grows, when multiple interaction devices are used to perform the same task and when different modalities have to be supported. We introduce a new dialog model for the abstraction of concrete user interfaces with a separate advanced control layer for the integration of different modalities. In this context, we present the Dialog and Interface Specification Language (DISL), which comes with a proof-of-concept implementation.}},
  author       = {{Schäfer, Robbie and Bleul, Steffen and Müller, Wolfgang}},
  booktitle    = {{Proceedings of the 5th International Workshop on Task Models and Diagrams for User Interface Design (TAMODIA'2006)}},
  isbn         = {{978-3-540-70815-5}},
  keywords     = {{User Interface     Interaction Manager     Output Device     Multimodal Interface     Interaction Object}},
  title        = {{{Dialog Modelling for Multiple Devices and Multiple Interaction Modalities}}},
  doi          = {{10.1007/978-3-540-70816-2_4}},
  year         = {{2006}},
}

@inproceedings{39350,
  abstract     = {{Variation in different mobile devices with different capabilities and interaction modalities as well as changing user context in nomadic applications, poses huge challenges to the design of user interfaces. To avoid multiple designs for each device or modality, it is almost a must to employ a model-based approach. In this short paper, we present a new dialog model for multimodal interaction together with an advanced control model, which can either be used for direct modeling by an interface designer or in conjunction with higher level models.}},
  author       = {{Schäfer, Robbie and Bleul, Steffen and Müller, Wolfgang}},
  booktitle    = {{Proceedings of EHCI-DSVIS 2005}},
  keywords     = {{Multimodal User Interface     High Level Model     Multimodal User     High Level Approach     Dialog Model}},
  title        = {{{A Novel Dialog Model for the Design of Multimodal User Interfaces}}},
  year         = {{2004}},
}

