[{"language":[{"iso":"eng"}],"keyword":["Understanding","Nonverbal Social Signals","Stress Induction","Explanation","Machine Learning Bias"],"ddc":["150"],"publication":"12th International Conference on  Affective Computing & Intelligent Interaction","file":[{"date_created":"2025-09-15T11:18:01Z","creator":"paletsch","date_updated":"2025-09-15T11:18:01Z","file_name":"ACII2024_Camera_Ready.pdf","file_id":"61274","access_level":"closed","file_size":8807478,"content_type":"application/pdf","relation":"main_file","success":1}],"abstract":[{"text":"In human-machine explanation interactions, such as tutoring systems or customer support chatbots, it is important for the machine explainer to infer the human user's understanding.  Nonverbal signals play an important role for expressing mental states like understanding and confusion in these interactions. However, an individual's expressions may vary depending on other factors. In cases where these factors are unknown, machine learning methods that infer understanding from nonverbal cues become unreliable. Stress for example has been shown to affect human expression, but it is not clear from the current research how stress affects the expression of understanding.\r\nTo address this gap, we design a paradigm that induces understanding and confusion through game rule explanations. During the explanations, self-perceived understanding and confusion are annotated by the participants. A stress condition is also introduced to enable the investigation of changes in the expression of social signals under stress.\r\nWe conducted a study to validate the stress induction and participants reported a statistically significant increase in stress during the stress condition compared to the neutral control condition. \r\nAdditionally, feedback from participants shows that the paradigm is effective in inducing understanding and confusion. \r\nThis paradigm paves the way for further studies investigating social signals of understanding to improve human-machine explanation interactions for varying contexts.","lang":"eng"}],"date_created":"2025-09-15T11:24:56Z","publisher":"IEEE","title":"A Paradigm to Investigate Social Signals of Understanding and Their Susceptibility to Stress","year":"2024","department":[{"_id":"660"}],"user_id":"98941","_id":"61273","project":[{"_id":"1200","name":"TRR 318 - Teilprojekt A6 - Inklusive Ko-Konstruktion sozialer Signale des Verstehens"}],"file_date_updated":"2025-09-15T11:18:01Z","type":"conference","status":"public","author":[{"first_name":"Jonas","full_name":"Paletschek, Jonas","id":"98941","last_name":"Paletschek"}],"date_updated":"2025-09-16T07:57:53Z","conference":{"start_date":"2024-09-15","name":"12th International Conference on  Affective Computing & Intelligent Interaction","location":"Glasgow","end_date":"2024-09-18"},"doi":"10.1109/ACII63134.2024.00040","has_accepted_license":"1","publication_status":"published","citation":{"apa":"Paletschek, J. (2024). A Paradigm to Investigate Social Signals of Understanding and Their Susceptibility to Stress. <i>12th International Conference on  Affective Computing &#38; Intelligent Interaction</i>. 12th International Conference on  Affective Computing &#38; Intelligent Interaction, Glasgow. <a href=\"https://doi.org/10.1109/ACII63134.2024.00040\">https://doi.org/10.1109/ACII63134.2024.00040</a>","bibtex":"@inproceedings{Paletschek_2024, title={A Paradigm to Investigate Social Signals of Understanding and Their Susceptibility to Stress}, DOI={<a href=\"https://doi.org/10.1109/ACII63134.2024.00040\">10.1109/ACII63134.2024.00040</a>}, booktitle={12th International Conference on  Affective Computing &#38; Intelligent Interaction}, publisher={IEEE}, author={Paletschek, Jonas}, year={2024} }","short":"J. Paletschek, in: 12th International Conference on  Affective Computing &#38; Intelligent Interaction, IEEE, 2024.","mla":"Paletschek, Jonas. “A Paradigm to Investigate Social Signals of Understanding and Their Susceptibility to Stress.” <i>12th International Conference on  Affective Computing &#38; Intelligent Interaction</i>, IEEE, 2024, doi:<a href=\"https://doi.org/10.1109/ACII63134.2024.00040\">10.1109/ACII63134.2024.00040</a>.","ieee":"J. Paletschek, “A Paradigm to Investigate Social Signals of Understanding and Their Susceptibility to Stress,” presented at the 12th International Conference on  Affective Computing &#38; Intelligent Interaction, Glasgow, 2024, doi: <a href=\"https://doi.org/10.1109/ACII63134.2024.00040\">10.1109/ACII63134.2024.00040</a>.","chicago":"Paletschek, Jonas. “A Paradigm to Investigate Social Signals of Understanding and Their Susceptibility to Stress.” In <i>12th International Conference on  Affective Computing &#38; Intelligent Interaction</i>. IEEE, 2024. <a href=\"https://doi.org/10.1109/ACII63134.2024.00040\">https://doi.org/10.1109/ACII63134.2024.00040</a>.","ama":"Paletschek J. A Paradigm to Investigate Social Signals of Understanding and Their Susceptibility to Stress. In: <i>12th International Conference on  Affective Computing &#38; Intelligent Interaction</i>. IEEE; 2024. doi:<a href=\"https://doi.org/10.1109/ACII63134.2024.00040\">10.1109/ACII63134.2024.00040</a>"}},{"language":[{"iso":"eng"}],"ddc":["000"],"file":[{"file_size":3252812,"file_name":"Explainable_AI_for_Audio_and_Visual_Affective_Computing_A_Scoping_Review.pdf","file_id":"61291","access_level":"closed","date_updated":"2025-09-16T07:34:27Z","date_created":"2025-09-16T07:34:27Z","creator":"johnson","success":1,"relation":"main_file","content_type":"application/pdf"}],"abstract":[{"text":"ffective computing often relies on audiovisual data to identify affective states from non-verbal signals, such as facial expressions and vocal cues. Since automatic affect recognition can be used in sensitive applications, such as healthcare and education, it is crucial to understand how models arrive at their decisions. Interpretability of machine learning models is the goal of the emerging research area of Explainable AI (explainable AI (XAI)). This scoping review aims to survey the field of audiovisual affective machine learning to identify how XAI is applied in this domain. We first provide an overview of XAI concepts relevant to affective computing. Next, following the recommended PRISMA guidelines, we perform a literature search in the ACM, IEEE, Web of Science and PubMed databases. After systematically reviewing 1190 articles, a final set of 65 papers is included in our analysis. We quantitatively summarize the scope, methods and evaluation of the XAI techniques used in the identified papers. Our findings show encouraging developments for using XAI to explain models in audiovisual affective computing, yet only a limited set of methods are used in the reviewed works. Following a critical discussion, we provide recommendations for incorporating interpretability in future work for affective machine learnin","lang":"eng"}],"publication":"IEEE Transactions on Affective Computing","title":"Explainable AI for Audio and Visual Affective Computing: A Scoping Review","date_created":"2025-09-16T07:24:07Z","publisher":"Institute of Electrical and Electronics Engineers (IEEE)","year":"2024","issue":"2","file_date_updated":"2025-09-16T07:34:27Z","article_type":"review","user_id":"97208","department":[{"_id":"660"}],"project":[{"name":"TRR 318 - Project Area A","_id":"110"},{"_id":"1204","name":"TRR 318 - Teilprojekt IRG BI"},{"_id":"1200","name":"TRR 318 - Teilprojekt A6 - Inklusive Ko-Konstruktion sozialer Signale des Verstehens"}],"_id":"61290","status":"public","type":"journal_article","doi":"10.1109/taffc.2024.3505269","author":[{"last_name":"Johnson","full_name":"Johnson, David","id":"97208","first_name":"David"},{"last_name":"Hakobyan","full_name":"Hakobyan, Olya","first_name":"Olya"},{"first_name":"Jonas","id":"98941","full_name":"Paletschek, Jonas","last_name":"Paletschek"},{"full_name":"Drimalla, Hanna","last_name":"Drimalla","first_name":"Hanna"}],"volume":16,"date_updated":"2025-09-16T08:02:23Z","citation":{"mla":"Johnson, David, et al. “Explainable AI for Audio and Visual Affective Computing: A Scoping Review.” <i>IEEE Transactions on Affective Computing</i>, vol. 16, no. 2, Institute of Electrical and Electronics Engineers (IEEE), 2024, pp. 518–36, doi:<a href=\"https://doi.org/10.1109/taffc.2024.3505269\">10.1109/taffc.2024.3505269</a>.","bibtex":"@article{Johnson_Hakobyan_Paletschek_Drimalla_2024, title={Explainable AI for Audio and Visual Affective Computing: A Scoping Review}, volume={16}, DOI={<a href=\"https://doi.org/10.1109/taffc.2024.3505269\">10.1109/taffc.2024.3505269</a>}, number={2}, journal={IEEE Transactions on Affective Computing}, publisher={Institute of Electrical and Electronics Engineers (IEEE)}, author={Johnson, David and Hakobyan, Olya and Paletschek, Jonas and Drimalla, Hanna}, year={2024}, pages={518–536} }","short":"D. Johnson, O. Hakobyan, J. Paletschek, H. Drimalla, IEEE Transactions on Affective Computing 16 (2024) 518–536.","apa":"Johnson, D., Hakobyan, O., Paletschek, J., &#38; Drimalla, H. (2024). Explainable AI for Audio and Visual Affective Computing: A Scoping Review. <i>IEEE Transactions on Affective Computing</i>, <i>16</i>(2), 518–536. <a href=\"https://doi.org/10.1109/taffc.2024.3505269\">https://doi.org/10.1109/taffc.2024.3505269</a>","ieee":"D. Johnson, O. Hakobyan, J. Paletschek, and H. Drimalla, “Explainable AI for Audio and Visual Affective Computing: A Scoping Review,” <i>IEEE Transactions on Affective Computing</i>, vol. 16, no. 2, pp. 518–536, 2024, doi: <a href=\"https://doi.org/10.1109/taffc.2024.3505269\">10.1109/taffc.2024.3505269</a>.","chicago":"Johnson, David, Olya Hakobyan, Jonas Paletschek, and Hanna Drimalla. “Explainable AI for Audio and Visual Affective Computing: A Scoping Review.” <i>IEEE Transactions on Affective Computing</i> 16, no. 2 (2024): 518–36. <a href=\"https://doi.org/10.1109/taffc.2024.3505269\">https://doi.org/10.1109/taffc.2024.3505269</a>.","ama":"Johnson D, Hakobyan O, Paletschek J, Drimalla H. Explainable AI for Audio and Visual Affective Computing: A Scoping Review. <i>IEEE Transactions on Affective Computing</i>. 2024;16(2):518-536. doi:<a href=\"https://doi.org/10.1109/taffc.2024.3505269\">10.1109/taffc.2024.3505269</a>"},"intvolume":"        16","page":"518-536","publication_status":"published","publication_identifier":{"issn":["1949-3045","2371-9850"]},"has_accepted_license":"1"}]
