@inproceedings{53821,
  author       = {{Yigitbas, Enes and Kaltschmidt, Christian}},
  booktitle    = {{Proceedings of the 8th International Conference on Artificial Intelligence and Virtual Reality (AIVR’24)}},
  publisher    = {{Springer}},
  title        = {{{Effects of Human Avatar Representation in Virtual Reality on Inter-Brain Connections}}},
  year         = {{2024}},
}

@inproceedings{53818,
  author       = {{Krings, Sarah Claudia and Biermeier, Kai and Yigitbas, Enes}},
  booktitle    = {{Proceedings of the 10th International Working Conference on Human-Centered Software Engineering (HCSE'24)}},
  title        = {{{Interaction Techniques for Remote Maintenance in an AR Shared Environment}}},
  year         = {{2024}},
}

@inproceedings{53820,
  author       = {{Leichtweiß, Justus and Yigitbas, Enes}},
  booktitle    = {{Proceedings of the 12th International Conference on Serious Games and Applications for Health (SeGAH'24)}},
  title        = {{{An Exploratory Study of Fear-Inducing Factors in Virtual Reality Experiences}}},
  year         = {{2024}},
}

@inproceedings{53817,
  author       = {{Krois, Sebastian and Yigitbas, Enes}},
  booktitle    = {{Proceedings of the 10th International Working Conference on Human-Centered Software Engineering (HCSE'24)}},
  title        = {{{Prototyping Cross-Reality Escape Rooms}}},
  year         = {{2024}},
}

@inproceedings{53819,
  author       = {{Krings, Sarah Claudia and Yigitbas, Enes and Sauer, Stefan}},
  booktitle    = {{Proceedings of the 10th International Working Conference on Human-Centered Software Engineering (HCSE'24)}},
  title        = {{{Developing a VR Factory Walkthrough for Use in Schools}}},
  year         = {{2024}},
}

@inproceedings{53816,
  abstract     = {{Augmented (AR) and Virtual Reality (VR) technologies have been applied very broadly in the recent past. While prior work emphasizes the potential of these technologies in various application domains, the process of visual attention in and across the contexts of AR/VR environments is not exhaustively explored yet. By now, visual attention in AR/VR environments has majorly been studied by means of overt attention (i.e. saccadic eye movements), self-report, and process-related visual attention proxies (like reaction time). In this work, we analyze covert visual attention based on the (psychological) Theory of Visual Attention (TVA), which allows us to quantify theory-based interpretable properties of the visual attention process. For example, the TVA allows us to measure the overall processing speed. We instantiate this TVA-based framework with a 30-participant explorative within-subjects study. The results show a decisive difference in visual attention between Reality (i.e. the neutral condition) and Virtual Reality and a weak difference between Reality and Augmented Reality. We discuss the consequences of our findings and provide ideas for future studies.}},
  author       = {{Biermeier, Kai and Scharlau, Ingrid and Yigitbas, Enes}},
  booktitle    = {{Proceedings of the 17th International Conference on PErvasive Technologies Related to Assistive Environments (PETRA 2024)}},
  keywords     = {{Visual Attention, TVA, Cognitive Modelling, Bayesian Modelling, AR, VR}},
  publisher    = {{ACM}},
  title        = {{{Measuring Visual Attention Capacity Across xReality}}},
  doi          = {{10.1145/3652037.3652050}},
  year         = {{2024}},
}

@inproceedings{55895,
  author       = {{Yigitbas, Enes and Dell'Aquila, Alessio}},
  booktitle    = {{Proceedings of the 8th International Conference on Computer-Human Interaction Research and Applications (CHIRA’24) }},
  publisher    = {{Springer}},
  title        = {{{An Examination of Pre-School Children's Usage Behavior of Augmented Reality: Traditional vs. AR-Assisted LEGO Building}}},
  year         = {{2024}},
}

@inproceedings{55896,
  author       = {{Neumayr, Thomas and Yigitbas, Enes and Augstein, Mirjam and Herder, Eelco and Stojko, Laura and Strecker, Jannis}},
  booktitle    = {{Proceedings of the Mensch & Computer (2024)}},
  title        = {{{ABIS 2024 – 28th International Workshop on Personalization and Recommendation}}},
  year         = {{2024}},
}

@inproceedings{55897,
  author       = {{Mazur, Janet and Yigitbas, Enes}},
  booktitle    = {{Proceedings of the 28th International Workshop on Personalization and Recommendation}},
  title        = {{{Augmented Reality-Assisted Multi-Robot Programming with Collision Warning }}},
  year         = {{2024}},
}

@book{55898,
  author       = {{Fazal-Baqaie, Masud and Linssen, Oliver and Volland, Alexander and Yigitbas, Enes and Engstler, Martin  and Bertram, Martin and Hanser, Eckhart}},
  title        = {{{Projektmanagement Und Vorgehensmodelle 2024 – Neues Arbeiten in Projekten – Teamarbeit neu interpretiert}}},
  year         = {{2024}},
}

@article{54277,
  author       = {{Herder, Eelco and Stojko, Laura and Strecker, Jannis and Neumayr, Thomas and Yigitbas, Enes and Augstein, Mirjam}},
  journal      = {{i-com - Journal of Interactive Media, Special Issue on “The Future of Human-Computer Interaction” }},
  title        = {{{Towards New Realities: Implications of Personalized Online Layers in Our Daily Lives}}},
  year         = {{2024}},
}

@book{47547,
  editor       = {{Kalenborn, Axel and Fazal-Baqaie, Masud and Linssen, Oliver and Volland, Alexander and Yigitbas, Enes and Engstler, Martin and Bertram, Martin}},
  publisher    = {{Gesellschaft für Informatik e.V}},
  title        = {{{Projektmanagement Und Vorgehensmodelle 2023 - Nachhaltige IT-Projekte}}},
  volume       = {{Vol. P340}},
  year         = {{2023}},
}

@inproceedings{47049,
  abstract     = {{As technology advances, Unmanned Aerial Vehicles ( UAVs) have emerged as an innovative solution to a variety of problems in many fields. Automated control of UAVs is most common in large area operations, but they may also increase the versatility of smart home compositions by acting as a physical helper. For example, a voice- controlled UAV could act as an intelligent aerial assistant that can be seamlessly integrated into smart home systems. In this paper, we present a novel Augmented Reality (AR )-based UAV control that provides high-level control over a UAV by automating common UAV missions. In our work, we enable users to operate a small UAV hands-free using only a small set of voice commands. To help users identify the targets, and to understand the UAV ’s intentions, targets within the user’s field of vision are highlighted in an AR interface. We evaluate our approach in a user study (n=26) regarding usability, physical and mental demand, as well as a focus on the users’ preferences. Our study showed that the use of the proposed control was not only accepted, but some users stated that they would use such a system at home to help with some tasks at home
}},
  author       = {{Helmert, Robin and Hardes, Tobias and Yigitbas, Enes}},
  booktitle    = {{Proceedings of the ACM Symposium on Spatial User Interaction (SUI 2023)}},
  location     = {{ Sydney, Australia }},
  publisher    = {{ACM}},
  title        = {{{Design and Evaluation of an AR Voice-based Indoor UAV Assistant for Smart Home Scenarios}}},
  year         = {{2023}},
}

@article{34402,
  author       = {{Yigitbas, Enes and Klauke, Jonas and Gottschalk, Sebastian and Engels, Gregor}},
  journal      = {{Journal on Computer Languages (COLA) }},
  publisher    = {{Elsevier}},
  title        = {{{End-User Development of Interactive Web-Based Virtual Reality Scenes}}},
  year         = {{2023}},
}

@inproceedings{33511,
  author       = {{Yigitbas, Enes and Engels, Gregor}},
  booktitle    = {{56th Hawaii International Conference on System Science (HICSS 2023) }},
  publisher    = {{ScholarSpace}},
  title        = {{{Enhancing Robot Programming through Digital Twin and Augmented Reality }}},
  year         = {{2023}},
}

@inproceedings{34401,
  author       = {{Yigitbas, Enes and Krois, Sebastian and Gottschalk, Sebastian and Engels, Gregor}},
  booktitle    = {{Proceedings of the 7th International Conference on Human Computer Interaction Theory and Applications (HUCAPP'23) }},
  title        = {{{Towards Enhanced Guiding Mechanisms in VR Training through Process Mining}}},
  year         = {{2023}},
}

@inproceedings{43424,
  author       = {{Yigitbas, Enes and Nowosad, Alexander and Engels, Gregor}},
  booktitle    = {{Proceedings of the 19th IFIP TC13 International Conference on Human-Computer Interaction (INTERACT 2023)}},
  publisher    = {{Springer}},
  title        = {{{Supporting Construction and Architectural Visualization through BIM and AR/VR: A Systematic Literature Review}}},
  year         = {{2023}},
}

@inproceedings{47050,
  author       = {{Wecker, Daniel  and Yigitbas, Enes}},
  booktitle    = {{Proceedings of the ACM Symposium on Spatial User Interaction (SUI 2023)}},
  publisher    = {{ACM}},
  title        = {{{Minimizing Eye Movements and Distractions in Head-Mounted Augmented Reality through Eye-Gaze Adaptiveness}}},
  year         = {{2023}},
}

@article{47051,
  author       = {{Yigitbas, Enes and Schmidt, Maximilian and Bucchiarone, Antonio and Gottschalk, Sebastian and Engels, Gregor}},
  journal      = {{Science of Computer Programming}},
  publisher    = {{Elsevier}},
  title        = {{{GaMoVR: Gamification-Based UML Learning Environment in Virtual Reality}}},
  year         = {{2023}},
}

@inproceedings{47057,
  author       = {{Schmidt, Leonard and Yigitbas, Enes}},
  booktitle    = {{Proceedings of the 27th International Workshop on Personalization and Recommendation}},
  publisher    = {{GI DL}},
  title        = {{{Transitional Cross Reality Interfaces for Spatially Demanding Search and Collect Tasks }}},
  year         = {{2023}},
}

