[{"publication_identifier":{"issn":["1664-1078"]},"publication_status":"published","intvolume":"        16","citation":{"ieee":"T. M. Peters, K. Biermeier, and I. Scharlau, “Assessing healthy distrust in human-AI interaction: interpreting changes in visual attention,” <i>Frontiers in Psychology</i>, vol. 16, Art. no. 1694367, 2026, doi: <a href=\"https://doi.org/10.3389/fpsyg.2025.1694367\">10.3389/fpsyg.2025.1694367</a>.","chicago":"Peters, Tobias Martin, Kai Biermeier, and Ingrid Scharlau. “Assessing Healthy Distrust in Human-AI Interaction: Interpreting Changes in Visual Attention.” <i>Frontiers in Psychology</i> 16 (2026). <a href=\"https://doi.org/10.3389/fpsyg.2025.1694367\">https://doi.org/10.3389/fpsyg.2025.1694367</a>.","ama":"Peters TM, Biermeier K, Scharlau I. Assessing healthy distrust in human-AI interaction: interpreting changes in visual attention. <i>Frontiers in Psychology</i>. 2026;16. doi:<a href=\"https://doi.org/10.3389/fpsyg.2025.1694367\">10.3389/fpsyg.2025.1694367</a>","short":"T.M. Peters, K. Biermeier, I. Scharlau, Frontiers in Psychology 16 (2026).","mla":"Peters, Tobias Martin, et al. “Assessing Healthy Distrust in Human-AI Interaction: Interpreting Changes in Visual Attention.” <i>Frontiers in Psychology</i>, vol. 16, 1694367, Frontiers Media SA, 2026, doi:<a href=\"https://doi.org/10.3389/fpsyg.2025.1694367\">10.3389/fpsyg.2025.1694367</a>.","bibtex":"@article{Peters_Biermeier_Scharlau_2026, title={Assessing healthy distrust in human-AI interaction: interpreting changes in visual attention}, volume={16}, DOI={<a href=\"https://doi.org/10.3389/fpsyg.2025.1694367\">10.3389/fpsyg.2025.1694367</a>}, number={1694367}, journal={Frontiers in Psychology}, publisher={Frontiers Media SA}, author={Peters, Tobias Martin and Biermeier, Kai and Scharlau, Ingrid}, year={2026} }","apa":"Peters, T. M., Biermeier, K., &#38; Scharlau, I. (2026). Assessing healthy distrust in human-AI interaction: interpreting changes in visual attention. <i>Frontiers in Psychology</i>, <i>16</i>, Article 1694367. <a href=\"https://doi.org/10.3389/fpsyg.2025.1694367\">https://doi.org/10.3389/fpsyg.2025.1694367</a>"},"year":"2026","volume":16,"date_created":"2026-01-14T14:21:59Z","author":[{"full_name":"Peters, Tobias Martin","id":"92810","orcid":"0009-0008-5193-6243","last_name":"Peters","first_name":"Tobias Martin"},{"full_name":"Biermeier, Kai","id":"55908","last_name":"Biermeier","orcid":"0000-0002-2879-2359","first_name":"Kai"},{"first_name":"Ingrid","id":"451","full_name":"Scharlau, Ingrid","orcid":"0000-0003-2364-9489","last_name":"Scharlau"}],"publisher":"Frontiers Media SA","date_updated":"2026-01-14T14:29:03Z","doi":"10.3389/fpsyg.2025.1694367","title":"Assessing healthy distrust in human-AI interaction: interpreting changes in visual attention","publication":"Frontiers in Psychology","type":"journal_article","status":"public","abstract":[{"text":"When humans interact with artificial intelligence (AI), one desideratum is appropriate trust. Typically, appropriate trust encompasses that humans trust AI except for instances in which they either explicitly notice AI errors or are suspicious that errors could be present. So far, appropriate trust or related notions have mainly been investigated by assessing trust and reliance. In this contribution, we argue that these assessments are insufficient to measure the complex aim of appropriate trust and the related notion of healthy distrust. We introduce and test the perspective of covert visual attention as an additional indicator for appropriate trust and draw conceptual connections to the notion of healthy distrust. To test the validity of our conceptualization, we formalize visual attention using the Theory of Visual Attention and measure its properties that are potentially relevant to appropriate trust and healthy distrust in an image classification task. Based on temporal-order judgment performance, we estimate participants' attentional capacity and attentional weight toward correct and incorrect mock-up AI classifications. We observe that misclassifications reduce attentional capacity compared to correct classifications. However, our results do not indicate that this reduction is beneficial for a subsequent judgment of the classifications. The attentional weighting is not affected by the classifications' correctness but by the difficulty of categorizing the stimuli themselves. We discuss these results, their implications, and the limited potential for using visual attention as an indicator of appropriate trust and healthy distrust.","lang":"eng"}],"department":[{"_id":"424"},{"_id":"660"}],"user_id":"92810","_id":"63611","project":[{"name":"TRR 318 ; TP C01: Gesundes Misstrauen in Erklärungen","_id":"124"}],"language":[{"iso":"eng"}],"keyword":["appropriate trust","healthy distrust","visual attention","Theory of Visual Attention","human-AI interaction","Bayesian cognitive model","image classification"],"article_type":"original","article_number":"1694367"},{"title":"Interaction Techniques for Remote Maintenance in an AR Shared Environment","date_created":"2024-05-02T10:31:46Z","author":[{"last_name":"Krings","orcid":"0000-0001-8040-7553","full_name":"Krings, Sarah Claudia","id":"64063","first_name":"Sarah Claudia"},{"full_name":"Biermeier, Kai","id":"55908","last_name":"Biermeier","orcid":"0000-0002-2879-2359","first_name":"Kai"},{"full_name":"Yigitbas, Enes","id":"8447","last_name":"Yigitbas","orcid":"0000-0002-5967-833X","first_name":"Enes"}],"date_updated":"2024-05-02T10:31:53Z","citation":{"ieee":"S. C. Krings, K. Biermeier, and E. Yigitbas, “Interaction Techniques for Remote Maintenance in an AR Shared Environment,” 2024.","chicago":"Krings, Sarah Claudia, Kai Biermeier, and Enes Yigitbas. “Interaction Techniques for Remote Maintenance in an AR Shared Environment.” In <i>Proceedings of the 10th International Working Conference on Human-Centered Software Engineering (HCSE’24)</i>, 2024.","ama":"Krings SC, Biermeier K, Yigitbas E. Interaction Techniques for Remote Maintenance in an AR Shared Environment. In: <i>Proceedings of the 10th International Working Conference on Human-Centered Software Engineering (HCSE’24)</i>. ; 2024.","mla":"Krings, Sarah Claudia, et al. “Interaction Techniques for Remote Maintenance in an AR Shared Environment.” <i>Proceedings of the 10th International Working Conference on Human-Centered Software Engineering (HCSE’24)</i>, 2024.","bibtex":"@inproceedings{Krings_Biermeier_Yigitbas_2024, title={Interaction Techniques for Remote Maintenance in an AR Shared Environment}, booktitle={Proceedings of the 10th International Working Conference on Human-Centered Software Engineering (HCSE’24)}, author={Krings, Sarah Claudia and Biermeier, Kai and Yigitbas, Enes}, year={2024} }","short":"S.C. Krings, K. Biermeier, E. Yigitbas, in: Proceedings of the 10th International Working Conference on Human-Centered Software Engineering (HCSE’24), 2024.","apa":"Krings, S. C., Biermeier, K., &#38; Yigitbas, E. (2024). Interaction Techniques for Remote Maintenance in an AR Shared Environment. <i>Proceedings of the 10th International Working Conference on Human-Centered Software Engineering (HCSE’24)</i>."},"year":"2024","language":[{"iso":"eng"}],"user_id":"8447","department":[{"_id":"66"},{"_id":"534"}],"_id":"53818","status":"public","type":"conference","publication":"Proceedings of the 10th International Working Conference on Human-Centered Software Engineering (HCSE'24)"},{"citation":{"apa":"Biermeier, K., Scharlau, I., &#38; Yigitbas, E. (2024). Measuring Visual Attention Capacity Across xReality. <i>Proceedings of the 17th International Conference on PErvasive Technologies Related to Assistive Environments (PETRA 2024)</i>. <a href=\"https://doi.org/10.1145/3652037.3652050\">https://doi.org/10.1145/3652037.3652050</a>","mla":"Biermeier, Kai, et al. “Measuring Visual Attention Capacity Across XReality.” <i>Proceedings of the 17th International Conference on PErvasive Technologies Related to Assistive Environments (PETRA 2024)</i>, ACM, 2024, doi:<a href=\"https://doi.org/10.1145/3652037.3652050\">10.1145/3652037.3652050</a>.","short":"K. Biermeier, I. Scharlau, E. Yigitbas, in: Proceedings of the 17th International Conference on PErvasive Technologies Related to Assistive Environments (PETRA 2024), ACM, 2024.","bibtex":"@inproceedings{Biermeier_Scharlau_Yigitbas_2024, title={Measuring Visual Attention Capacity Across xReality}, DOI={<a href=\"https://doi.org/10.1145/3652037.3652050\">10.1145/3652037.3652050</a>}, booktitle={Proceedings of the 17th International Conference on PErvasive Technologies Related to Assistive Environments (PETRA 2024)}, publisher={ACM}, author={Biermeier, Kai and Scharlau, Ingrid and Yigitbas, Enes}, year={2024} }","ieee":"K. Biermeier, I. Scharlau, and E. Yigitbas, “Measuring Visual Attention Capacity Across xReality,” 2024, doi: <a href=\"https://doi.org/10.1145/3652037.3652050\">10.1145/3652037.3652050</a>.","chicago":"Biermeier, Kai, Ingrid Scharlau, and Enes Yigitbas. “Measuring Visual Attention Capacity Across XReality.” In <i>Proceedings of the 17th International Conference on PErvasive Technologies Related to Assistive Environments (PETRA 2024)</i>. ACM, 2024. <a href=\"https://doi.org/10.1145/3652037.3652050\">https://doi.org/10.1145/3652037.3652050</a>.","ama":"Biermeier K, Scharlau I, Yigitbas E. Measuring Visual Attention Capacity Across xReality. In: <i>Proceedings of the 17th International Conference on PErvasive Technologies Related to Assistive Environments (PETRA 2024)</i>. ACM; 2024. doi:<a href=\"https://doi.org/10.1145/3652037.3652050\">10.1145/3652037.3652050</a>"},"year":"2024","doi":"10.1145/3652037.3652050","main_file_link":[{"open_access":"1","url":"https://dl.acm.org/doi/pdf/10.1145/3652037.3652050"}],"title":"Measuring Visual Attention Capacity Across xReality","author":[{"first_name":"Kai","orcid":"0000-0002-2879-2359","last_name":"Biermeier","id":"55908","full_name":"Biermeier, Kai"},{"first_name":"Ingrid","last_name":"Scharlau","orcid":"0000-0003-2364-9489","full_name":"Scharlau, Ingrid","id":"451"},{"orcid":"0000-0002-5967-833X","last_name":"Yigitbas","full_name":"Yigitbas, Enes","id":"8447","first_name":"Enes"}],"date_created":"2024-05-02T10:28:03Z","date_updated":"2024-07-08T08:32:21Z","oa":"1","publisher":"ACM","status":"public","abstract":[{"text":"Augmented (AR) and Virtual Reality (VR) technologies have been applied very broadly in the recent past. While prior work emphasizes the potential of these technologies in various application domains, the process of visual attention in and across the contexts of AR/VR environments is not exhaustively explored yet. By now, visual attention in AR/VR environments has majorly been studied by means of overt attention (i.e. saccadic eye movements), self-report, and process-related visual attention proxies (like reaction time). In this work, we analyze covert visual attention based on the (psychological) Theory of Visual Attention (TVA), which allows us to quantify theory-based interpretable properties of the visual attention process. For example, the TVA allows us to measure the overall processing speed. We instantiate this TVA-based framework with a 30-participant explorative within-subjects study. The results show a decisive difference in visual attention between Reality (i.e. the neutral condition) and Virtual Reality and a weak difference between Reality and Augmented Reality. We discuss the consequences of our findings and provide ideas for future studies.","lang":"eng"}],"publication":"Proceedings of the 17th International Conference on PErvasive Technologies Related to Assistive Environments (PETRA 2024)","type":"conference","language":[{"iso":"eng"}],"keyword":["Visual Attention","TVA","Cognitive Modelling","Bayesian Modelling","AR","VR"],"department":[{"_id":"66"},{"_id":"534"},{"_id":"424"}],"user_id":"55908","_id":"53816"},{"publication":"Interaction Studies","abstract":[{"text":"Everyday explanations are interactive processes with the aim to provide a less knowledgeable person with reasonable information about other people, objects, or events. Because explanations are interactive communicative processes, the topical structure of an explanation may vary dynamically depending on the immediate feedback of the explainee. In this paper, we analyse topical transitions in medical explanations organised by different physicians (explainers) related to different forms of multimodal behaviour of caregivers (explainees) attending an explanation about the procedures of\r\nan upcoming surgery of a child. The analyses reveal that explainees’ multimodal behaviour with gaze shifts (and particularly gaze aversion) can predict a transition from an elaborated topic to a new one, whereas explainees’ forms of multimodal behaviour with static gaze cannot be related to changes of the topical structure.","lang":"eng"}],"keyword":["explanations","multimodal behaviour","elaborations","conditional probabilities"],"language":[{"iso":"eng"}],"issue":"3","year":"2024","publisher":"John Benjamins","date_created":"2025-05-14T06:25:13Z","title":"Changes in the topical structure of explanations are related to explainees’ multimodal behaviour","type":"journal_article","status":"public","_id":"59888","project":[{"grant_number":"438445824","name":"TRR 318 - A02: TRR 318 - Verstehensprozess einer Erklärung beobachten und auswerten (Teilprojekt A02)","_id":"112"}],"user_id":"90345","article_type":"original","publication_identifier":{"eissn":["1572-0381"]},"publication_status":"published","intvolume":"        25","page":"257 - 280","citation":{"ama":"Lazarov ST, Biermeier K, Grimminger A. Changes in the topical structure of explanations are related to explainees’ multimodal behaviour. <i>Interaction Studies</i>. 2024;25(3):257-280. doi:<a href=\"https://doi.org/10.1075/is.23033.laz\">10.1075/is.23033.laz</a>","ieee":"S. T. Lazarov, K. Biermeier, and A. Grimminger, “Changes in the topical structure of explanations are related to explainees’ multimodal behaviour,” <i>Interaction Studies</i>, vol. 25, no. 3, pp. 257–280, 2024, doi: <a href=\"https://doi.org/10.1075/is.23033.laz\">10.1075/is.23033.laz</a>.","chicago":"Lazarov, Stefan Teodorov, Kai Biermeier, and Angela Grimminger. “Changes in the Topical Structure of Explanations Are Related to Explainees’ Multimodal Behaviour.” <i>Interaction Studies</i> 25, no. 3 (2024): 257–80. <a href=\"https://doi.org/10.1075/is.23033.laz\">https://doi.org/10.1075/is.23033.laz</a>.","mla":"Lazarov, Stefan Teodorov, et al. “Changes in the Topical Structure of Explanations Are Related to Explainees’ Multimodal Behaviour.” <i>Interaction Studies</i>, vol. 25, no. 3, John Benjamins, 2024, pp. 257–80, doi:<a href=\"https://doi.org/10.1075/is.23033.laz\">10.1075/is.23033.laz</a>.","bibtex":"@article{Lazarov_Biermeier_Grimminger_2024, title={Changes in the topical structure of explanations are related to explainees’ multimodal behaviour}, volume={25}, DOI={<a href=\"https://doi.org/10.1075/is.23033.laz\">10.1075/is.23033.laz</a>}, number={3}, journal={Interaction Studies}, publisher={John Benjamins}, author={Lazarov, Stefan Teodorov and Biermeier, Kai and Grimminger, Angela}, year={2024}, pages={257–280} }","short":"S.T. Lazarov, K. Biermeier, A. Grimminger, Interaction Studies 25 (2024) 257–280.","apa":"Lazarov, S. T., Biermeier, K., &#38; Grimminger, A. (2024). Changes in the topical structure of explanations are related to explainees’ multimodal behaviour. <i>Interaction Studies</i>, <i>25</i>(3), 257–280. <a href=\"https://doi.org/10.1075/is.23033.laz\">https://doi.org/10.1075/is.23033.laz</a>"},"oa":"1","date_updated":"2025-06-27T13:57:36Z","volume":25,"author":[{"first_name":"Stefan Teodorov","id":"90345","full_name":"Lazarov, Stefan Teodorov","orcid":"0009-0009-0892-9483","last_name":"Lazarov"},{"orcid":"0000-0002-2879-2359","last_name":"Biermeier","full_name":"Biermeier, Kai","id":"55908","first_name":"Kai"},{"last_name":"Grimminger","full_name":"Grimminger, Angela","id":"57578","first_name":"Angela"}],"doi":"10.1075/is.23033.laz","main_file_link":[{"url":"https://www.jbe-platform.com/content/journals/10.1075/is.23033.laz#metrics_content","open_access":"1"}]},{"type":"conference","publication":"Proceedings of the 14th ACM SIGCHI Symposium on Engineering Interactive Computing Systems (EICS 2022)","status":"public","user_id":"8447","department":[{"_id":"66"},{"_id":"534"}],"_id":"30883","language":[{"iso":"eng"}],"citation":{"ama":"Krings SC, Yigitbas E, Biermeier K, Engels G. Design and Evaluation of AR-Assisted End-User Robot Path Planning Strategies. In: <i>Proceedings of the 14th ACM SIGCHI Symposium on Engineering Interactive Computing Systems (EICS 2022)</i>. ; 2022.","ieee":"S. C. Krings, E. Yigitbas, K. Biermeier, and G. Engels, “Design and Evaluation of AR-Assisted End-User Robot Path Planning Strategies,” 2022.","chicago":"Krings, Sarah Claudia, Enes Yigitbas, Kai Biermeier, and Gregor Engels. “Design and Evaluation of AR-Assisted End-User Robot Path Planning Strategies.” In <i>Proceedings of the 14th ACM SIGCHI Symposium on Engineering Interactive Computing Systems (EICS 2022)</i>, 2022.","apa":"Krings, S. C., Yigitbas, E., Biermeier, K., &#38; Engels, G. (2022). Design and Evaluation of AR-Assisted End-User Robot Path Planning Strategies. <i>Proceedings of the 14th ACM SIGCHI Symposium on Engineering Interactive Computing Systems (EICS 2022)</i>.","mla":"Krings, Sarah Claudia, et al. “Design and Evaluation of AR-Assisted End-User Robot Path Planning Strategies.” <i>Proceedings of the 14th ACM SIGCHI Symposium on Engineering Interactive Computing Systems (EICS 2022)</i>, 2022.","short":"S.C. Krings, E. Yigitbas, K. Biermeier, G. Engels, in: Proceedings of the 14th ACM SIGCHI Symposium on Engineering Interactive Computing Systems (EICS 2022), 2022.","bibtex":"@inproceedings{Krings_Yigitbas_Biermeier_Engels_2022, title={Design and Evaluation of AR-Assisted End-User Robot Path Planning Strategies}, booktitle={Proceedings of the 14th ACM SIGCHI Symposium on Engineering Interactive Computing Systems (EICS 2022)}, author={Krings, Sarah Claudia and Yigitbas, Enes and Biermeier, Kai and Engels, Gregor}, year={2022} }"},"year":"2022","author":[{"id":"64063","full_name":"Krings, Sarah Claudia","last_name":"Krings","orcid":"0000-0001-8040-7553","first_name":"Sarah Claudia"},{"orcid":"0000-0002-5967-833X","last_name":"Yigitbas","id":"8447","full_name":"Yigitbas, Enes","first_name":"Enes"},{"first_name":"Kai","full_name":"Biermeier, Kai","id":"55908","orcid":"0000-0002-2879-2359","last_name":"Biermeier"},{"first_name":"Gregor","last_name":"Engels","id":"107","full_name":"Engels, Gregor"}],"date_created":"2022-04-13T08:11:15Z","date_updated":"2023-12-07T10:42:07Z","title":"Design and Evaluation of AR-Assisted End-User Robot Path Planning Strategies"},{"year":"2021","citation":{"apa":"Biermeier, K., Yigitbas, E., Weidmann, N., &#38; Engels, G. (2021). Ensuring User Interface Adaptation Consistency through Triple Graph Grammers. <i>Proceedings of the International Workshop on Human-Centered Software Engineering for Changing Contexts of Use </i>.","short":"K. Biermeier, E. Yigitbas, N. Weidmann, G. Engels, in: Proceedings of the International Workshop on Human-Centered Software Engineering for Changing Contexts of Use , 2021.","mla":"Biermeier, Kai, et al. “Ensuring User Interface Adaptation Consistency through Triple Graph Grammers.” <i>Proceedings of the International Workshop on Human-Centered Software Engineering for Changing Contexts of Use </i>, 2021.","bibtex":"@inproceedings{Biermeier_Yigitbas_Weidmann_Engels_2021, title={Ensuring User Interface Adaptation Consistency through Triple Graph Grammers}, booktitle={Proceedings of the International Workshop on Human-Centered Software Engineering for Changing Contexts of Use }, author={Biermeier, Kai and Yigitbas, Enes and Weidmann, Nils and Engels, Gregor}, year={2021} }","ieee":"K. Biermeier, E. Yigitbas, N. Weidmann, and G. Engels, “Ensuring User Interface Adaptation Consistency through Triple Graph Grammers,” 2021.","chicago":"Biermeier, Kai, Enes Yigitbas, Nils Weidmann, and Gregor Engels. “Ensuring User Interface Adaptation Consistency through Triple Graph Grammers.” In <i>Proceedings of the International Workshop on Human-Centered Software Engineering for Changing Contexts of Use </i>, 2021.","ama":"Biermeier K, Yigitbas E, Weidmann N, Engels G. Ensuring User Interface Adaptation Consistency through Triple Graph Grammers. In: <i>Proceedings of the International Workshop on Human-Centered Software Engineering for Changing Contexts of Use </i>. ; 2021."},"title":"Ensuring User Interface Adaptation Consistency through Triple Graph Grammers","date_updated":"2024-04-02T09:39:05Z","author":[{"first_name":"Kai","full_name":"Biermeier, Kai","id":"55908","orcid":"0000-0002-2879-2359","last_name":"Biermeier"},{"first_name":"Enes","id":"8447","full_name":"Yigitbas, Enes","last_name":"Yigitbas","orcid":"0000-0002-5967-833X"},{"id":"53103","full_name":"Weidmann, Nils","last_name":"Weidmann","first_name":"Nils"},{"first_name":"Gregor","full_name":"Engels, Gregor","id":"107","last_name":"Engels"}],"date_created":"2021-06-07T07:32:40Z","status":"public","publication":"Proceedings of the International Workshop on Human-Centered Software Engineering for Changing Contexts of Use ","type":"conference","language":[{"iso":"eng"}],"_id":"22285","department":[{"_id":"66"},{"_id":"534"}],"user_id":"8447"},{"year":"2020","citation":{"apa":"Yigitbas, E., Jovanovikj, I., Biermeier, K., Sauer, S., &#38; Engels, G. (2020). Integrated Model-driven Development of Self-adaptive User Interfaces . <i>International Journal on Software and Systems Modeling (SoSyM)</i>.","short":"E. Yigitbas, I. Jovanovikj, K. Biermeier, S. Sauer, G. Engels, International Journal on Software and Systems Modeling (SoSyM) (2020).","mla":"Yigitbas, Enes, et al. “Integrated Model-Driven Development of Self-Adaptive User Interfaces .” <i>International Journal on Software and Systems Modeling (SoSyM)</i>, Springer, 2020.","bibtex":"@article{Yigitbas_Jovanovikj_Biermeier_Sauer_Engels_2020, title={Integrated Model-driven Development of Self-adaptive User Interfaces }, journal={International Journal on Software and Systems Modeling (SoSyM)}, publisher={Springer}, author={Yigitbas, Enes and Jovanovikj, Ivan and Biermeier, Kai and Sauer, Stefan and Engels, Gregor}, year={2020} }","ieee":"E. Yigitbas, I. Jovanovikj, K. Biermeier, S. Sauer, and G. Engels, “Integrated Model-driven Development of Self-adaptive User Interfaces ,” <i>International Journal on Software and Systems Modeling (SoSyM)</i>, 2020.","chicago":"Yigitbas, Enes, Ivan Jovanovikj, Kai Biermeier, Stefan Sauer, and Gregor Engels. “Integrated Model-Driven Development of Self-Adaptive User Interfaces .” <i>International Journal on Software and Systems Modeling (SoSyM)</i>, 2020.","ama":"Yigitbas E, Jovanovikj I, Biermeier K, Sauer S, Engels G. Integrated Model-driven Development of Self-adaptive User Interfaces . <i>International Journal on Software and Systems Modeling (SoSyM)</i>. Published online 2020."},"date_updated":"2025-09-10T11:41:54Z","publisher":"Springer","author":[{"first_name":"Enes","orcid":"0000-0002-5967-833X","last_name":"Yigitbas","full_name":"Yigitbas, Enes","id":"8447"},{"first_name":"Ivan","orcid":"https://orcid.org/0000-0002-1838-794X","last_name":"Jovanovikj","full_name":"Jovanovikj, Ivan","id":"39187"},{"first_name":"Kai","full_name":"Biermeier, Kai","id":"55908","last_name":"Biermeier","orcid":"0000-0002-2879-2359"},{"first_name":"Stefan","orcid":"0000-0003-3084-0409","last_name":"Sauer","full_name":"Sauer, Stefan","id":"447"},{"first_name":"Gregor","last_name":"Engels","full_name":"Engels, Gregor","id":"107"}],"date_created":"2019-12-10T11:31:55Z","title":"Integrated Model-driven Development of Self-adaptive User Interfaces ","main_file_link":[{"url":"https://doi.org/10.1007/s10270-020-00777-7"}],"type":"journal_article","publication":"International Journal on Software and Systems Modeling (SoSyM)","status":"public","_id":"15266","user_id":"8447","department":[{"_id":"66"},{"_id":"534"}],"language":[{"iso":"eng"}]}]
