[{"publication_identifier":{"issn":["1664-1078"]},"publication_status":"published","year":"2026","intvolume":"        16","citation":{"mla":"Peters, Tobias Martin, et al. “Assessing Healthy Distrust in Human-AI Interaction: Interpreting Changes in Visual Attention.” <i>Frontiers in Psychology</i>, vol. 16, 1694367, Frontiers Media SA, 2026, doi:<a href=\"https://doi.org/10.3389/fpsyg.2025.1694367\">10.3389/fpsyg.2025.1694367</a>.","short":"T.M. Peters, K. Biermeier, I. Scharlau, Frontiers in Psychology 16 (2026).","bibtex":"@article{Peters_Biermeier_Scharlau_2026, title={Assessing healthy distrust in human-AI interaction: interpreting changes in visual attention}, volume={16}, DOI={<a href=\"https://doi.org/10.3389/fpsyg.2025.1694367\">10.3389/fpsyg.2025.1694367</a>}, number={1694367}, journal={Frontiers in Psychology}, publisher={Frontiers Media SA}, author={Peters, Tobias Martin and Biermeier, Kai and Scharlau, Ingrid}, year={2026} }","apa":"Peters, T. M., Biermeier, K., &#38; Scharlau, I. (2026). Assessing healthy distrust in human-AI interaction: interpreting changes in visual attention. <i>Frontiers in Psychology</i>, <i>16</i>, Article 1694367. <a href=\"https://doi.org/10.3389/fpsyg.2025.1694367\">https://doi.org/10.3389/fpsyg.2025.1694367</a>","ieee":"T. M. Peters, K. Biermeier, and I. Scharlau, “Assessing healthy distrust in human-AI interaction: interpreting changes in visual attention,” <i>Frontiers in Psychology</i>, vol. 16, Art. no. 1694367, 2026, doi: <a href=\"https://doi.org/10.3389/fpsyg.2025.1694367\">10.3389/fpsyg.2025.1694367</a>.","chicago":"Peters, Tobias Martin, Kai Biermeier, and Ingrid Scharlau. “Assessing Healthy Distrust in Human-AI Interaction: Interpreting Changes in Visual Attention.” <i>Frontiers in Psychology</i> 16 (2026). <a href=\"https://doi.org/10.3389/fpsyg.2025.1694367\">https://doi.org/10.3389/fpsyg.2025.1694367</a>.","ama":"Peters TM, Biermeier K, Scharlau I. Assessing healthy distrust in human-AI interaction: interpreting changes in visual attention. <i>Frontiers in Psychology</i>. 2026;16. doi:<a href=\"https://doi.org/10.3389/fpsyg.2025.1694367\">10.3389/fpsyg.2025.1694367</a>"},"date_updated":"2026-01-14T14:29:03Z","publisher":"Frontiers Media SA","volume":16,"author":[{"orcid":"0009-0008-5193-6243","last_name":"Peters","id":"92810","full_name":"Peters, Tobias Martin","first_name":"Tobias Martin"},{"first_name":"Kai","orcid":"0000-0002-2879-2359","last_name":"Biermeier","full_name":"Biermeier, Kai","id":"55908"},{"full_name":"Scharlau, Ingrid","id":"451","orcid":"0000-0003-2364-9489","last_name":"Scharlau","first_name":"Ingrid"}],"date_created":"2026-01-14T14:21:59Z","title":"Assessing healthy distrust in human-AI interaction: interpreting changes in visual attention","doi":"10.3389/fpsyg.2025.1694367","publication":"Frontiers in Psychology","type":"journal_article","abstract":[{"lang":"eng","text":"When humans interact with artificial intelligence (AI), one desideratum is appropriate trust. Typically, appropriate trust encompasses that humans trust AI except for instances in which they either explicitly notice AI errors or are suspicious that errors could be present. So far, appropriate trust or related notions have mainly been investigated by assessing trust and reliance. In this contribution, we argue that these assessments are insufficient to measure the complex aim of appropriate trust and the related notion of healthy distrust. We introduce and test the perspective of covert visual attention as an additional indicator for appropriate trust and draw conceptual connections to the notion of healthy distrust. To test the validity of our conceptualization, we formalize visual attention using the Theory of Visual Attention and measure its properties that are potentially relevant to appropriate trust and healthy distrust in an image classification task. Based on temporal-order judgment performance, we estimate participants' attentional capacity and attentional weight toward correct and incorrect mock-up AI classifications. We observe that misclassifications reduce attentional capacity compared to correct classifications. However, our results do not indicate that this reduction is beneficial for a subsequent judgment of the classifications. The attentional weighting is not affected by the classifications' correctness but by the difficulty of categorizing the stimuli themselves. We discuss these results, their implications, and the limited potential for using visual attention as an indicator of appropriate trust and healthy distrust."}],"status":"public","_id":"63611","project":[{"name":"TRR 318 ; TP C01: Gesundes Misstrauen in Erklärungen","_id":"124"}],"department":[{"_id":"424"},{"_id":"660"}],"user_id":"92810","keyword":["appropriate trust","healthy distrust","visual attention","Theory of Visual Attention","human-AI interaction","Bayesian cognitive model","image classification"],"article_type":"original","article_number":"1694367","language":[{"iso":"eng"}]},{"abstract":[{"text":"Augmented (AR) and Virtual Reality (VR) technologies have been applied very broadly in the recent past. While prior work emphasizes the potential of these technologies in various application domains, the process of visual attention in and across the contexts of AR/VR environments is not exhaustively explored yet. By now, visual attention in AR/VR environments has majorly been studied by means of overt attention (i.e. saccadic eye movements), self-report, and process-related visual attention proxies (like reaction time). In this work, we analyze covert visual attention based on the (psychological) Theory of Visual Attention (TVA), which allows us to quantify theory-based interpretable properties of the visual attention process. For example, the TVA allows us to measure the overall processing speed. We instantiate this TVA-based framework with a 30-participant explorative within-subjects study. The results show a decisive difference in visual attention between Reality (i.e. the neutral condition) and Virtual Reality and a weak difference between Reality and Augmented Reality. We discuss the consequences of our findings and provide ideas for future studies.","lang":"eng"}],"status":"public","publication":"Proceedings of the 17th International Conference on PErvasive Technologies Related to Assistive Environments (PETRA 2024)","type":"conference","keyword":["Visual Attention","TVA","Cognitive Modelling","Bayesian Modelling","AR","VR"],"language":[{"iso":"eng"}],"_id":"53816","department":[{"_id":"66"},{"_id":"534"},{"_id":"424"}],"user_id":"55908","year":"2024","citation":{"mla":"Biermeier, Kai, et al. “Measuring Visual Attention Capacity Across XReality.” <i>Proceedings of the 17th International Conference on PErvasive Technologies Related to Assistive Environments (PETRA 2024)</i>, ACM, 2024, doi:<a href=\"https://doi.org/10.1145/3652037.3652050\">10.1145/3652037.3652050</a>.","short":"K. Biermeier, I. Scharlau, E. Yigitbas, in: Proceedings of the 17th International Conference on PErvasive Technologies Related to Assistive Environments (PETRA 2024), ACM, 2024.","bibtex":"@inproceedings{Biermeier_Scharlau_Yigitbas_2024, title={Measuring Visual Attention Capacity Across xReality}, DOI={<a href=\"https://doi.org/10.1145/3652037.3652050\">10.1145/3652037.3652050</a>}, booktitle={Proceedings of the 17th International Conference on PErvasive Technologies Related to Assistive Environments (PETRA 2024)}, publisher={ACM}, author={Biermeier, Kai and Scharlau, Ingrid and Yigitbas, Enes}, year={2024} }","apa":"Biermeier, K., Scharlau, I., &#38; Yigitbas, E. (2024). Measuring Visual Attention Capacity Across xReality. <i>Proceedings of the 17th International Conference on PErvasive Technologies Related to Assistive Environments (PETRA 2024)</i>. <a href=\"https://doi.org/10.1145/3652037.3652050\">https://doi.org/10.1145/3652037.3652050</a>","ieee":"K. Biermeier, I. Scharlau, and E. Yigitbas, “Measuring Visual Attention Capacity Across xReality,” 2024, doi: <a href=\"https://doi.org/10.1145/3652037.3652050\">10.1145/3652037.3652050</a>.","chicago":"Biermeier, Kai, Ingrid Scharlau, and Enes Yigitbas. “Measuring Visual Attention Capacity Across XReality.” In <i>Proceedings of the 17th International Conference on PErvasive Technologies Related to Assistive Environments (PETRA 2024)</i>. ACM, 2024. <a href=\"https://doi.org/10.1145/3652037.3652050\">https://doi.org/10.1145/3652037.3652050</a>.","ama":"Biermeier K, Scharlau I, Yigitbas E. Measuring Visual Attention Capacity Across xReality. In: <i>Proceedings of the 17th International Conference on PErvasive Technologies Related to Assistive Environments (PETRA 2024)</i>. ACM; 2024. doi:<a href=\"https://doi.org/10.1145/3652037.3652050\">10.1145/3652037.3652050</a>"},"title":"Measuring Visual Attention Capacity Across xReality","doi":"10.1145/3652037.3652050","main_file_link":[{"open_access":"1","url":"https://dl.acm.org/doi/pdf/10.1145/3652037.3652050"}],"date_updated":"2024-07-08T08:32:21Z","oa":"1","publisher":"ACM","author":[{"first_name":"Kai","orcid":"0000-0002-2879-2359","last_name":"Biermeier","id":"55908","full_name":"Biermeier, Kai"},{"first_name":"Ingrid","id":"451","full_name":"Scharlau, Ingrid","orcid":"0000-0003-2364-9489","last_name":"Scharlau"},{"first_name":"Enes","full_name":"Yigitbas, Enes","id":"8447","orcid":"0000-0002-5967-833X","last_name":"Yigitbas"}],"date_created":"2024-05-02T10:28:03Z"},{"_id":"57971","user_id":"61071","keyword":["Adult","Brain Mapping","Cerebral Cortex/diagnostic imaging/physiology","Female","Humans","Magnetic Resonance Imaging","Male","Pattern Recognition","Psycholinguistics","Reproducibility of Results","Speech/physiology","Transcranial Magnetic Stimulation","Visual/physiology","Young Adult"],"language":[{"iso":"eng"}],"extern":"1","type":"journal_article","publication":"Human brain mapping","abstract":[{"text":"Repetitive TMS (rTMS) with a frequency of 5-10~Hz is widely used for language mapping. However, it may be accompanied by discomfort and is limited in the number and reliability of evoked language errors. We, here, systematically tested the influence of different stimulation frequencies (i.e., 10, 30, and 50 Hz) on tolerability, number, reliability, and cortical distribution of language errors aiming at improved language mapping. 15 right-handed, healthy subjects (m~=~8, median age: 29 yrs) were investigated in two sessions, separated by 2-5 days. In each session, 10, 30, and 50 Hz rTMS were applied over the left hemisphere in a randomized order during a picture naming task. Overall, 30 Hz rTMS evoked significantly more errors (20 $\\pm$ 12{%}) compared to 50 Hz (12 $\\pm$ 8{%}; p {\\textless}.01), whereas error rates were comparable between 30/50 and 10~Hz (18 $\\pm$ 11{%}). Across all conditions, a significantly higher error rate was found in Session 1 (19 $\\pm$ 13{%}) compared to Session 2 (13 $\\pm$ 7{%}, p {\\textless}.05). The error rate was poorly reliable between sessions for 10 (intraclass correlation coefficient, ICC~=~.315) and 30 Hz (ICC~=~.427), whereas 50 Hz showed a moderate reliability (ICC~=~.597). Spatial reliability of language errors was low to moderate with a tendency toward increased reliability for higher frequencies, for example, within frontal regions. Compared to 10~Hz, both, 30 and 50 Hz were rated as less painful. Taken together, our data favor the use of rTMS-protocols employing higher frequencies for evoking language errors reliably and with reduced discomfort, depending on the region of interest.","lang":"eng"}],"status":"public","date_updated":"2026-04-13T11:37:55Z","date_created":"2025-01-06T12:11:43Z","author":[{"last_name":"Nettekoven","full_name":"Nettekoven, Charlotte","first_name":"Charlotte"},{"first_name":"Julia","full_name":"Pieczewski, Julia","last_name":"Pieczewski"},{"first_name":"Volker","last_name":"Neuschmelting","full_name":"Neuschmelting, Volker"},{"first_name":"Kristina","last_name":"Jonas","orcid":"0000-0002-1067-9139","full_name":"Jonas, Kristina","id":"94540"},{"first_name":"Roland","last_name":"Goldbrunner","full_name":"Goldbrunner, Roland"},{"first_name":"Christian","full_name":"Grefkes, Christian","last_name":"Grefkes"},{"first_name":"Carolin","full_name":"Weiss Lucas, Carolin","last_name":"Weiss Lucas"}],"volume":42,"title":"Improving the efficacy and reliability of rTMS language mapping by increasing the stimulation frequency","doi":"10.1002/hbm.25619","issue":"16","year":"2021","citation":{"apa":"Nettekoven, C., Pieczewski, J., Neuschmelting, V., Jonas, K., Goldbrunner, R., Grefkes, C., &#38; Weiss Lucas, C. (2021). Improving the efficacy and reliability of rTMS language mapping by increasing the stimulation frequency. <i>Human Brain Mapping</i>, <i>42</i>(16), 5309–5321. <a href=\"https://doi.org/10.1002/hbm.25619\">https://doi.org/10.1002/hbm.25619</a>","mla":"Nettekoven, Charlotte, et al. “Improving the Efficacy and Reliability of RTMS Language Mapping by Increasing the Stimulation Frequency.” <i>Human Brain Mapping</i>, vol. 42, no. 16, 2021, pp. 5309–5321, doi:<a href=\"https://doi.org/10.1002/hbm.25619\">10.1002/hbm.25619</a>.","bibtex":"@article{Nettekoven_Pieczewski_Neuschmelting_Jonas_Goldbrunner_Grefkes_Weiss Lucas_2021, title={Improving the efficacy and reliability of rTMS language mapping by increasing the stimulation frequency}, volume={42}, DOI={<a href=\"https://doi.org/10.1002/hbm.25619\">10.1002/hbm.25619</a>}, number={16}, journal={Human brain mapping}, author={Nettekoven, Charlotte and Pieczewski, Julia and Neuschmelting, Volker and Jonas, Kristina and Goldbrunner, Roland and Grefkes, Christian and Weiss Lucas, Carolin}, year={2021}, pages={5309–5321} }","short":"C. Nettekoven, J. Pieczewski, V. Neuschmelting, K. Jonas, R. Goldbrunner, C. Grefkes, C. Weiss Lucas, Human Brain Mapping 42 (2021) 5309–5321.","ieee":"C. Nettekoven <i>et al.</i>, “Improving the efficacy and reliability of rTMS language mapping by increasing the stimulation frequency,” <i>Human brain mapping</i>, vol. 42, no. 16, pp. 5309–5321, 2021, doi: <a href=\"https://doi.org/10.1002/hbm.25619\">10.1002/hbm.25619</a>.","chicago":"Nettekoven, Charlotte, Julia Pieczewski, Volker Neuschmelting, Kristina Jonas, Roland Goldbrunner, Christian Grefkes, and Carolin Weiss Lucas. “Improving the Efficacy and Reliability of RTMS Language Mapping by Increasing the Stimulation Frequency.” <i>Human Brain Mapping</i> 42, no. 16 (2021): 5309–5321. <a href=\"https://doi.org/10.1002/hbm.25619\">https://doi.org/10.1002/hbm.25619</a>.","ama":"Nettekoven C, Pieczewski J, Neuschmelting V, et al. Improving the efficacy and reliability of rTMS language mapping by increasing the stimulation frequency. <i>Human brain mapping</i>. 2021;42(16):5309–5321. doi:<a href=\"https://doi.org/10.1002/hbm.25619\">10.1002/hbm.25619</a>"},"intvolume":"        42","page":"5309–5321"},{"user_id":"3118","department":[{"_id":"78"}],"_id":"10780","language":[{"iso":"eng"}],"keyword":["embedded systems","image sensors","power aware computing","wireless sensor networks","Zynq-based VSN node prototype","computational self-awareness","design approach","platform levels","power consumption","visual sensor networks","visual sensor nodes","Cameras","Hardware","Middleware","Multicore processing","Operating systems","Runtime","Reconfigurable platforms","distributed embedded systems","performance-resource trade-off","self-awareness","visual sensor nodes"],"type":"conference","publication":"12th International Symposium on Reconfigurable Communication-centric Systems-on-Chip (ReCoSoC)","status":"public","author":[{"last_name":"Guettatfi","full_name":"Guettatfi, Zakarya","first_name":"Zakarya"},{"last_name":"Hübner","full_name":"Hübner, Philipp","first_name":"Philipp"},{"first_name":"Marco","last_name":"Platzner","full_name":"Platzner, Marco","id":"398"},{"last_name":"Rinner","full_name":"Rinner, Bernhard","first_name":"Bernhard"}],"date_created":"2019-07-10T12:13:15Z","date_updated":"2022-01-06T06:50:50Z","doi":"10.1109/ReCoSoC.2017.8016147","title":"Computational self-awareness as design approach for visual sensor nodes","citation":{"chicago":"Guettatfi, Zakarya, Philipp Hübner, Marco Platzner, and Bernhard Rinner. “Computational Self-Awareness as Design Approach for Visual Sensor Nodes.” In <i>12th International Symposium on Reconfigurable Communication-Centric Systems-on-Chip (ReCoSoC)</i>, 1–8, 2017. <a href=\"https://doi.org/10.1109/ReCoSoC.2017.8016147\">https://doi.org/10.1109/ReCoSoC.2017.8016147</a>.","ieee":"Z. Guettatfi, P. Hübner, M. Platzner, and B. Rinner, “Computational self-awareness as design approach for visual sensor nodes,” in <i>12th International Symposium on Reconfigurable Communication-centric Systems-on-Chip (ReCoSoC)</i>, 2017, pp. 1–8.","ama":"Guettatfi Z, Hübner P, Platzner M, Rinner B. Computational self-awareness as design approach for visual sensor nodes. In: <i>12th International Symposium on Reconfigurable Communication-Centric Systems-on-Chip (ReCoSoC)</i>. ; 2017:1-8. doi:<a href=\"https://doi.org/10.1109/ReCoSoC.2017.8016147\">10.1109/ReCoSoC.2017.8016147</a>","apa":"Guettatfi, Z., Hübner, P., Platzner, M., &#38; Rinner, B. (2017). Computational self-awareness as design approach for visual sensor nodes. In <i>12th International Symposium on Reconfigurable Communication-centric Systems-on-Chip (ReCoSoC)</i> (pp. 1–8). <a href=\"https://doi.org/10.1109/ReCoSoC.2017.8016147\">https://doi.org/10.1109/ReCoSoC.2017.8016147</a>","bibtex":"@inproceedings{Guettatfi_Hübner_Platzner_Rinner_2017, title={Computational self-awareness as design approach for visual sensor nodes}, DOI={<a href=\"https://doi.org/10.1109/ReCoSoC.2017.8016147\">10.1109/ReCoSoC.2017.8016147</a>}, booktitle={12th International Symposium on Reconfigurable Communication-centric Systems-on-Chip (ReCoSoC)}, author={Guettatfi, Zakarya and Hübner, Philipp and Platzner, Marco and Rinner, Bernhard}, year={2017}, pages={1–8} }","short":"Z. Guettatfi, P. Hübner, M. Platzner, B. Rinner, in: 12th International Symposium on Reconfigurable Communication-Centric Systems-on-Chip (ReCoSoC), 2017, pp. 1–8.","mla":"Guettatfi, Zakarya, et al. “Computational Self-Awareness as Design Approach for Visual Sensor Nodes.” <i>12th International Symposium on Reconfigurable Communication-Centric Systems-on-Chip (ReCoSoC)</i>, 2017, pp. 1–8, doi:<a href=\"https://doi.org/10.1109/ReCoSoC.2017.8016147\">10.1109/ReCoSoC.2017.8016147</a>."},"page":"1-8","year":"2017"},{"doi":"10.3758/s13414-017-1325-6","author":[{"first_name":"Alexander","last_name":"Krüger","full_name":"Krüger, Alexander"},{"last_name":"Tünnermann","full_name":"Tünnermann, Jan","first_name":"Jan"},{"full_name":"Scharlau, Ingrid","id":"451","last_name":"Scharlau","orcid":"0000-0003-2364-9489","first_name":"Ingrid"}],"volume":79,"date_updated":"2022-06-06T14:08:05Z","citation":{"ama":"Krüger A, Tünnermann J, Scharlau I. Measuring and modeling salience with the theory of visual attention. <i>Attention, Perception, &#38; Psychophysics</i>. 2017;79(6):1593-1614. doi:<a href=\"https://doi.org/10.3758/s13414-017-1325-6\">10.3758/s13414-017-1325-6</a>","chicago":"Krüger, Alexander, Jan Tünnermann, and Ingrid Scharlau. “Measuring and Modeling Salience with the Theory of Visual Attention.” <i>Attention, Perception, &#38; Psychophysics</i> 79, no. 6 (2017): 1593–1614. <a href=\"https://doi.org/10.3758/s13414-017-1325-6\">https://doi.org/10.3758/s13414-017-1325-6</a>.","ieee":"A. Krüger, J. Tünnermann, and I. Scharlau, “Measuring and modeling salience with the theory of visual attention.,” <i>Attention, Perception, &#38; Psychophysics</i>, vol. 79, no. 6, pp. 1593–1614, 2017, doi: <a href=\"https://doi.org/10.3758/s13414-017-1325-6\">10.3758/s13414-017-1325-6</a>.","short":"A. Krüger, J. Tünnermann, I. Scharlau, Attention, Perception, &#38; Psychophysics 79 (2017) 1593–1614.","bibtex":"@article{Krüger_Tünnermann_Scharlau_2017, title={Measuring and modeling salience with the theory of visual attention.}, volume={79}, DOI={<a href=\"https://doi.org/10.3758/s13414-017-1325-6\">10.3758/s13414-017-1325-6</a>}, number={6}, journal={Attention, Perception, &#38; Psychophysics}, author={Krüger, Alexander and Tünnermann, Jan and Scharlau, Ingrid}, year={2017}, pages={1593–1614} }","mla":"Krüger, Alexander, et al. “Measuring and Modeling Salience with the Theory of Visual Attention.” <i>Attention, Perception, &#38; Psychophysics</i>, vol. 79, no. 6, 2017, pp. 1593–614, doi:<a href=\"https://doi.org/10.3758/s13414-017-1325-6\">10.3758/s13414-017-1325-6</a>.","apa":"Krüger, A., Tünnermann, J., &#38; Scharlau, I. (2017). Measuring and modeling salience with the theory of visual attention. <i>Attention, Perception, &#38; Psychophysics</i>, <i>79</i>(6), 1593–1614. <a href=\"https://doi.org/10.3758/s13414-017-1325-6\">https://doi.org/10.3758/s13414-017-1325-6</a>"},"intvolume":"        79","page":"1593 - 1614","publication_status":"published","publication_identifier":{"issn":["1943-3921"]},"article_type":"original","user_id":"42165","department":[{"_id":"424"}],"_id":"6075","status":"public","type":"journal_article","title":"Measuring and modeling salience with the theory of visual attention.","date_created":"2018-12-10T07:05:04Z","year":"2017","issue":"6","language":[{"iso":"eng"}],"keyword":["Salience","Visual attention","Bayesian inference","Theory of visual attention","Computational modeling","Inference","Object Recognition","Theories","Visual Perception","Visual Attention","Luminance","Perceptual Orientation","Statistical Probability","Stimulus Salience","Computational Modeling"],"abstract":[{"lang":"eng","text":"For almost three decades, the theory of visual attention (TVA) has been successful in mathematically describing and explaining a wide variety of phenomena in visual selection and recognition with high quantitative precision. Interestingly, the influence of feature contrast on attention has been included in TVA only recently, although it has been extensively studied outside the TVA framework. The present approach further develops this extension of TVA’s scope by measuring and modeling salience. An empirical measure of salience is achieved by linking different (orientation and luminance) contrasts to a TVA parameter. In the modeling part, the function relating feature contrasts to salience is described mathematically and tested against alternatives by Bayesian model comparison. This model comparison reveals that the power function is an appropriate model of salience growth in the dimensions of orientation and luminance contrast. Furthermore, if contrasts from the two dimensions are comb"}],"publication":"Attention, Perception, & Psychophysics"},{"page":"20 - 38","intvolume":"        12","citation":{"apa":"Krüger, A., Tünnermann, J., &#38; Scharlau, I. (2016). Fast and conspicuous? Quantifying salience with the theory of visual attention. <i>Advances in Cognitive Psychology</i>, <i>12</i>(1), 20–38. <a href=\"https://doi.org/10.5709/acp-0184-1\">https://doi.org/10.5709/acp-0184-1</a>","bibtex":"@article{Krüger_Tünnermann_Scharlau_2016, title={Fast and conspicuous? Quantifying salience with the theory of visual attention.}, volume={12}, DOI={<a href=\"https://doi.org/10.5709/acp-0184-1\">10.5709/acp-0184-1</a>}, number={1}, journal={Advances in Cognitive Psychology}, author={Krüger, Alexander and Tünnermann, Jan and Scharlau, Ingrid}, year={2016}, pages={20–38} }","mla":"Krüger, Alexander, et al. “Fast and Conspicuous? Quantifying Salience with the Theory of Visual Attention.” <i>Advances in Cognitive Psychology</i>, vol. 12, no. 1, 2016, pp. 20–38, doi:<a href=\"https://doi.org/10.5709/acp-0184-1\">10.5709/acp-0184-1</a>.","short":"A. Krüger, J. Tünnermann, I. Scharlau, Advances in Cognitive Psychology 12 (2016) 20–38.","ama":"Krüger A, Tünnermann J, Scharlau I. Fast and conspicuous? Quantifying salience with the theory of visual attention. <i>Advances in Cognitive Psychology</i>. 2016;12(1):20-38. doi:<a href=\"https://doi.org/10.5709/acp-0184-1\">10.5709/acp-0184-1</a>","ieee":"A. Krüger, J. Tünnermann, and I. Scharlau, “Fast and conspicuous? Quantifying salience with the theory of visual attention.,” <i>Advances in Cognitive Psychology</i>, vol. 12, no. 1, pp. 20–38, 2016, doi: <a href=\"https://doi.org/10.5709/acp-0184-1\">10.5709/acp-0184-1</a>.","chicago":"Krüger, Alexander, Jan Tünnermann, and Ingrid Scharlau. “Fast and Conspicuous? Quantifying Salience with the Theory of Visual Attention.” <i>Advances in Cognitive Psychology</i> 12, no. 1 (2016): 20–38. <a href=\"https://doi.org/10.5709/acp-0184-1\">https://doi.org/10.5709/acp-0184-1</a>."},"publication_identifier":{"issn":["1895-1171"]},"publication_status":"published","doi":"10.5709/acp-0184-1","main_file_link":[{"open_access":"1","url":"http://ac-psych.org/en/download-pdf/volume/12/issue/1/id/185"}],"date_updated":"2022-06-06T16:21:09Z","oa":"1","volume":12,"author":[{"full_name":"Krüger, Alexander","last_name":"Krüger","first_name":"Alexander"},{"first_name":"Jan","last_name":"Tünnermann","full_name":"Tünnermann, Jan"},{"id":"451","full_name":"Scharlau, Ingrid","orcid":"0000-0003-2364-9489","last_name":"Scharlau","first_name":"Ingrid"}],"status":"public","type":"journal_article","funded_apc":"1","_id":"6071","department":[{"_id":"424"}],"user_id":"42165","year":"2016","issue":"1","title":"Fast and conspicuous? Quantifying salience with the theory of visual attention.","date_created":"2018-12-10T07:04:15Z","abstract":[{"text":"Particular differences between an object and its surrounding cause salience, guide attention, and improve performance in various tasks. While much research has been dedicated to identifying which feature dimensions contribute to salience, much less regard has been paid to the quantitative strength of the salience caused by feature differences. Only a few studies systematically related salience effects to a common salience measure, and they are partly outdated in the light of new findings on the time course of salience effects. We propose Bundesen’s Theory of Visual Attention (TV A) as a theoretical basis for measuring salience and introduce an empirical and modeling approach to link this theory to data retrieved from temporal-order judgments. With this procedure, TV A becomes applicable to a broad range of salience-related stimulus material. Three experiments with orientation pop-out displays demonstrate the feasibility of the method. A 4th experiment substantiates its applicability t","lang":"eng"}],"publication":"Advances in Cognitive Psychology","keyword":["salience","visual attention","Bayesian inference","theory of visual attention","computational modeling","Visual Attention","Computational Modeling","Inference","Judgment","Statistical Probability"],"language":[{"iso":"eng"}]},{"keyword":["cueing","temporal-order judgements","theory of visual attention (TVA)","peripheral cue","processing speed","stimulus encoding","prior entry","Attention","Cues","Face Perception","Judgment"],"language":[{"iso":"eng"}],"_id":"6080","user_id":"42165","department":[{"_id":"424"}],"abstract":[{"text":"Peripheral visual cues lead to large shifts in psychometric distributions of temporal-order judgments. In one view, such shifts are attributed to attention speeding up processing of the cued stimulus, so-called prior entry. However, sometimes these shifts are so large that it is unlikely that they are caused by attention alone. Here we tested the prevalent alternative explanation that the cue is sometimes confused with the target on a perceptual level, bolstering the shift of the psychometric function. We applied a novel model of cued temporal-order judgments, derived from Bundesen’s Theory of Visual Attention.We found that cue–target confusions indeed contribute to shifting psychometric functions. However, cue-induced changes in the processing rates of the target stimuli play an important role, too. At smaller cueing intervals, the cue increased the processing speed of the target. At larger intervals, inhibition of return was predominant. Earlier studies of cued TOJs were insensitive","lang":"eng"}],"status":"public","type":"journal_article","publication":"Frontiers in Psychology","title":"Peripheral visual cues: Their fate in processing and effects on attention and temporal-order perception.","main_file_link":[{"url":"https://www.frontiersin.org/articles/10.3389/fpsyg.2016.01442/full","open_access":"1"}],"doi":"10.3389/fpsyg.2016.01442","date_updated":"2022-06-06T16:29:50Z","oa":"1","date_created":"2018-12-10T07:06:09Z","author":[{"full_name":"Tünnermann, Jan","last_name":"Tünnermann","first_name":"Jan"},{"first_name":"Ingrid","full_name":"Scharlau, Ingrid","id":"451","last_name":"Scharlau","orcid":"0000-0003-2364-9489"}],"volume":7,"year":"2016","citation":{"short":"J. Tünnermann, I. Scharlau, Frontiers in Psychology 7 (2016).","bibtex":"@article{Tünnermann_Scharlau_2016, title={Peripheral visual cues: Their fate in processing and effects on attention and temporal-order perception.}, volume={7}, DOI={<a href=\"https://doi.org/10.3389/fpsyg.2016.01442\">10.3389/fpsyg.2016.01442</a>}, journal={Frontiers in Psychology}, author={Tünnermann, Jan and Scharlau, Ingrid}, year={2016} }","mla":"Tünnermann, Jan, and Ingrid Scharlau. “Peripheral Visual Cues: Their Fate in Processing and Effects on Attention and Temporal-Order Perception.” <i>Frontiers in Psychology</i>, vol. 7, 2016, doi:<a href=\"https://doi.org/10.3389/fpsyg.2016.01442\">10.3389/fpsyg.2016.01442</a>.","apa":"Tünnermann, J., &#38; Scharlau, I. (2016). Peripheral visual cues: Their fate in processing and effects on attention and temporal-order perception. <i>Frontiers in Psychology</i>, <i>7</i>. <a href=\"https://doi.org/10.3389/fpsyg.2016.01442\">https://doi.org/10.3389/fpsyg.2016.01442</a>","ama":"Tünnermann J, Scharlau I. Peripheral visual cues: Their fate in processing and effects on attention and temporal-order perception. <i>Frontiers in Psychology</i>. 2016;7. doi:<a href=\"https://doi.org/10.3389/fpsyg.2016.01442\">10.3389/fpsyg.2016.01442</a>","chicago":"Tünnermann, Jan, and Ingrid Scharlau. “Peripheral Visual Cues: Their Fate in Processing and Effects on Attention and Temporal-Order Perception.” <i>Frontiers in Psychology</i> 7 (2016). <a href=\"https://doi.org/10.3389/fpsyg.2016.01442\">https://doi.org/10.3389/fpsyg.2016.01442</a>.","ieee":"J. Tünnermann and I. Scharlau, “Peripheral visual cues: Their fate in processing and effects on attention and temporal-order perception.,” <i>Frontiers in Psychology</i>, vol. 7, 2016, doi: <a href=\"https://doi.org/10.3389/fpsyg.2016.01442\">10.3389/fpsyg.2016.01442</a>."},"intvolume":"         7","publication_status":"published","publication_identifier":{"issn":["1664-1078"]}},{"language":[{"iso":"eng"}],"keyword":["unattended stimuli","attention speed","cognitive processing","Attention","Humans","Judgment","Mental Recall","Visual Perception","Stimulus Parameters","Visual Perception","Visual Attention","Cognitive Processes","Velocity"],"publication":"Journal of Vision","abstract":[{"lang":"eng","text":"Selective visual attention improves performance in many tasks. Among others, it leads to 'prior entry'—earlier perception of an attended compared to an unattended stimulus. Whether this phenomenon is purely based on an increase of the processing rate of the attended stimulus or if a decrease in the processing rate of the unattended stimulus also contributes to the effect is, up to now, unanswered. Here we describe a novel approach to this question based on Bundesen’s Theory of Visual Attention, which we use to overcome the limitations of earlier prior-entry assessment with temporal order judgments (TOJs) that only allow relative statements regarding the processing speed of attended and unattended stimuli. Prevalent models of prior entry in TOJs either indirectly predict a pure acceleration or cannot model the difference between acceleration and deceleration. In a paradigm that combines a letter-identification task with TOJs, we show that indeed acceleration of the attended and deceler"}],"date_created":"2018-12-10T07:01:56Z","title":"Does attention speed up processing? Decreases and increases of processing rates in visual prior entry.","issue":"3","year":"2015","department":[{"_id":"424"}],"user_id":"42165","_id":"6066","type":"journal_article","status":"public","volume":15,"author":[{"first_name":"Jan","last_name":"Tünnermann","full_name":"Tünnermann, Jan"},{"last_name":"Petersen","full_name":"Petersen, Anders","first_name":"Anders"},{"orcid":"0000-0003-2364-9489","last_name":"Scharlau","full_name":"Scharlau, Ingrid","id":"451","first_name":"Ingrid"}],"date_updated":"2022-06-06T16:31:07Z","oa":"1","doi":"10.1167/15.3.1","main_file_link":[{"url":"https://jov.arvojournals.org/article.aspx?articleid=2213282","open_access":"1"}],"publication_identifier":{"issn":["1534-7362"]},"publication_status":"published","intvolume":"        15","citation":{"ieee":"J. Tünnermann, A. Petersen, and I. Scharlau, “Does attention speed up processing? Decreases and increases of processing rates in visual prior entry.,” <i>Journal of Vision</i>, vol. 15, no. 3, 2015, doi: <a href=\"https://doi.org/10.1167/15.3.1\">10.1167/15.3.1</a>.","chicago":"Tünnermann, Jan, Anders Petersen, and Ingrid Scharlau. “Does Attention Speed up Processing? Decreases and Increases of Processing Rates in Visual Prior Entry.” <i>Journal of Vision</i> 15, no. 3 (2015). <a href=\"https://doi.org/10.1167/15.3.1\">https://doi.org/10.1167/15.3.1</a>.","ama":"Tünnermann J, Petersen A, Scharlau I. Does attention speed up processing? Decreases and increases of processing rates in visual prior entry. <i>Journal of Vision</i>. 2015;15(3). doi:<a href=\"https://doi.org/10.1167/15.3.1\">10.1167/15.3.1</a>","apa":"Tünnermann, J., Petersen, A., &#38; Scharlau, I. (2015). Does attention speed up processing? Decreases and increases of processing rates in visual prior entry. <i>Journal of Vision</i>, <i>15</i>(3). <a href=\"https://doi.org/10.1167/15.3.1\">https://doi.org/10.1167/15.3.1</a>","short":"J. Tünnermann, A. Petersen, I. Scharlau, Journal of Vision 15 (2015).","mla":"Tünnermann, Jan, et al. “Does Attention Speed up Processing? Decreases and Increases of Processing Rates in Visual Prior Entry.” <i>Journal of Vision</i>, vol. 15, no. 3, 2015, doi:<a href=\"https://doi.org/10.1167/15.3.1\">10.1167/15.3.1</a>.","bibtex":"@article{Tünnermann_Petersen_Scharlau_2015, title={Does attention speed up processing? Decreases and increases of processing rates in visual prior entry.}, volume={15}, DOI={<a href=\"https://doi.org/10.1167/15.3.1\">10.1167/15.3.1</a>}, number={3}, journal={Journal of Vision}, author={Tünnermann, Jan and Petersen, Anders and Scharlau, Ingrid}, year={2015} }"}},{"date_updated":"2022-06-06T16:35:40Z","date_created":"2018-12-10T07:06:20Z","author":[{"full_name":"Hilkenmeier, Frederic","last_name":"Hilkenmeier","first_name":"Frederic"},{"first_name":"Christian N. L.","last_name":"Olivers","full_name":"Olivers, Christian N. L."},{"orcid":"0000-0003-2364-9489","last_name":"Scharlau","id":"451","full_name":"Scharlau, Ingrid","first_name":"Ingrid"}],"volume":38,"title":"Prior entry and temporal attention: Cueing affects order errors in RSVP.","publication_status":"published","publication_identifier":{"issn":["0096-1523"]},"issue":"1","year":"2012","citation":{"ama":"Hilkenmeier F, Olivers CNL, Scharlau I. Prior entry and temporal attention: Cueing affects order errors in RSVP. <i>Journal of Experimental Psychology: Human Perception and Performance</i>. 2012;38(1):180-190.","apa":"Hilkenmeier, F., Olivers, C. N. L., &#38; Scharlau, I. (2012). Prior entry and temporal attention: Cueing affects order errors in RSVP. <i>Journal of Experimental Psychology: Human Perception and Performance</i>, <i>38</i>(1), 180–190.","mla":"Hilkenmeier, Frederic, et al. “Prior Entry and Temporal Attention: Cueing Affects Order Errors in RSVP.” <i>Journal of Experimental Psychology: Human Perception and Performance</i>, vol. 38, no. 1, 2012, pp. 180–90.","short":"F. Hilkenmeier, C.N.L. Olivers, I. Scharlau, Journal of Experimental Psychology: Human Perception and Performance 38 (2012) 180–190.","bibtex":"@article{Hilkenmeier_Olivers_Scharlau_2012, title={Prior entry and temporal attention: Cueing affects order errors in RSVP.}, volume={38}, number={1}, journal={Journal of Experimental Psychology: Human Perception and Performance}, author={Hilkenmeier, Frederic and Olivers, Christian N. L. and Scharlau, Ingrid}, year={2012}, pages={180–190} }","chicago":"Hilkenmeier, Frederic, Christian N. L. Olivers, and Ingrid Scharlau. “Prior Entry and Temporal Attention: Cueing Affects Order Errors in RSVP.” <i>Journal of Experimental Psychology: Human Perception and Performance</i> 38, no. 1 (2012): 180–90.","ieee":"F. Hilkenmeier, C. N. L. Olivers, and I. Scharlau, “Prior entry and temporal attention: Cueing affects order errors in RSVP.,” <i>Journal of Experimental Psychology: Human Perception and Performance</i>, vol. 38, no. 1, pp. 180–190, 2012."},"page":"180 - 190","intvolume":"        38","_id":"6081","user_id":"42165","department":[{"_id":"424"}],"keyword":["attentional blink","attentional enhancement","lag-1 sparing","prior entry","temporal cueing","visual attention","rapid serial presentation","Adolescent","Adult","Attention","Attentional Blink","Color Perception","Cues","Female","Humans","Male","Neuropsychological Tests","Pattern Recognition","Visual","Time Factors","Visual Perception","Young Adult","Cues","Serial Recall","Visual Attention","Eyeblink Reflex"],"language":[{"iso":"eng"}],"funded_apc":"1","type":"journal_article","publication":"Journal of Experimental Psychology: Human Perception and Performance","abstract":[{"lang":"eng","text":"The law of prior entry states that attended objects come to consciousness more quickly than unattended ones. This has been well established in spatial cueing paradigms, where two task-relevant stimuli are presented near-simultaneously at two different locations. Here, we suggest that prior entry also plays a pivotal role in temporal attention paradigms, where stimuli appear at the same location but at distinct moments in time, in rapid serial presentation (RSVP). Specifically, we hypothesize that prior entry can explain temporal order reversals in reporting two targets from RSVP. In support of this, three experiments show that cueing attention toward either of the targets has a strong influence on order errors. We conclude that prior entry provides a viable explanation of the way in which relevant information is prioritized in RSVP. (PsycINFO Database Record (c) 2016 APA, all rights reserved)"}],"status":"public"},{"citation":{"ama":"Weiß K, Scharlau I. At the mercy of prior entry: Prior entry induced by invisible primes is not susceptible to current intentions. <i>Acta Psychologica</i>. 2012;139(1):54-64.","ieee":"K. Weiß and I. Scharlau, “At the mercy of prior entry: Prior entry induced by invisible primes is not susceptible to current intentions.,” <i>Acta Psychologica</i>, vol. 139, no. 1, pp. 54–64, 2012.","chicago":"Weiß, Katharina, and Ingrid Scharlau. “At the Mercy of Prior Entry: Prior Entry Induced by Invisible Primes Is Not Susceptible to Current Intentions.” <i>Acta Psychologica</i> 139, no. 1 (2012): 54–64.","short":"K. Weiß, I. Scharlau, Acta Psychologica 139 (2012) 54–64.","bibtex":"@article{Weiß_Scharlau_2012, title={At the mercy of prior entry: Prior entry induced by invisible primes is not susceptible to current intentions.}, volume={139}, number={1}, journal={Acta Psychologica}, author={Weiß, Katharina and Scharlau, Ingrid}, year={2012}, pages={54–64} }","mla":"Weiß, Katharina, and Ingrid Scharlau. “At the Mercy of Prior Entry: Prior Entry Induced by Invisible Primes Is Not Susceptible to Current Intentions.” <i>Acta Psychologica</i>, vol. 139, no. 1, 2012, pp. 54–64.","apa":"Weiß, K., &#38; Scharlau, I. (2012). At the mercy of prior entry: Prior entry induced by invisible primes is not susceptible to current intentions. <i>Acta Psychologica</i>, <i>139</i>(1), 54–64."},"intvolume":"       139","page":"54 - 64","year":"2012","issue":"1","publication_status":"published","publication_identifier":{"issn":["0001-6918"]},"title":"At the mercy of prior entry: Prior entry induced by invisible primes is not susceptible to current intentions.","date_created":"2018-12-10T07:01:19Z","author":[{"first_name":"Katharina","last_name":"Weiß","full_name":"Weiß, Katharina"},{"first_name":"Ingrid","full_name":"Scharlau, Ingrid","id":"451","orcid":"0000-0003-2364-9489","last_name":"Scharlau"}],"volume":139,"date_updated":"2022-06-06T16:41:22Z","status":"public","abstract":[{"lang":"eng","text":"If one of two events is attended to, it will be perceived earlier than a simultaneously occurring unattended event. Since 150 years, this effect has been ascribed to the facilitating influence of attention, also known as prior entry. Yet, the attentional origin of prior-entry effects¹ has been repeatedly doubted. One criticism is that prior-entry effects might be due to biased decision processes that would mimic a temporal advantage for attended stimuli. Although most obvious biases have already been excluded experimentally (e.g. judgment criteria, response compatibility) and prior-entry effects have shown to persist (Shore, Spence, & Klein, 2001), many other biases are conceivable, which makes it difficult to put the debate to an end. Thus, we approach this problem the other way around by asking whether prior-entry effects can be biased voluntarily. Observers were informed about prior entry and instructed to reduce it as far as possible. For this aim they received continuous feedback"}],"type":"journal_article","publication":"Acta Psychologica","language":[{"iso":"eng"}],"funded_apc":"1","keyword":["intentions","events","attention","decision processes","Adult","Attention","Choice Behavior","Cues","Female","Humans","Intention","Judgment","Male","Middle Aged","Reaction Time","Time Perception","Visual Perception","Attention","Decision Making","Experiences (Events)","Intention"],"user_id":"42165","department":[{"_id":"424"}],"_id":"6064"},{"title":"Spatial mislocalization as a consequence of sequential coding of stimuli.","date_updated":"2022-06-06T16:38:04Z","volume":74,"author":[{"full_name":"Priess, Heinz-Werner","last_name":"Priess","first_name":"Heinz-Werner"},{"orcid":"0000-0003-2364-9489","last_name":"Scharlau","id":"451","full_name":"Scharlau, Ingrid","first_name":"Ingrid"},{"full_name":"Becker, Stefanie I.","last_name":"Becker","first_name":"Stefanie I."},{"first_name":"Ulrich","last_name":"Ansorge","full_name":"Ansorge, Ulrich"}],"date_created":"2018-12-10T07:07:08Z","year":"2012","page":"365 - 378","intvolume":"        74","citation":{"apa":"Priess, H.-W., Scharlau, I., Becker, S. I., &#38; Ansorge, U. (2012). Spatial mislocalization as a consequence of sequential coding of stimuli. <i>Attention, Perception, &#38; Psychophysics</i>, <i>74</i>(2), 365–378.","short":"H.-W. Priess, I. Scharlau, S.I. Becker, U. Ansorge, Attention, Perception, &#38; Psychophysics 74 (2012) 365–378.","mla":"Priess, Heinz-Werner, et al. “Spatial Mislocalization as a Consequence of Sequential Coding of Stimuli.” <i>Attention, Perception, &#38; Psychophysics</i>, vol. 74, no. 2, 2012, pp. 365–78.","bibtex":"@article{Priess_Scharlau_Becker_Ansorge_2012, title={Spatial mislocalization as a consequence of sequential coding of stimuli.}, volume={74}, number={2}, journal={Attention, Perception, &#38; Psychophysics}, author={Priess, Heinz-Werner and Scharlau, Ingrid and Becker, Stefanie I. and Ansorge, Ulrich}, year={2012}, pages={365–378} }","ama":"Priess H-W, Scharlau I, Becker SI, Ansorge U. Spatial mislocalization as a consequence of sequential coding of stimuli. <i>Attention, Perception, &#38; Psychophysics</i>. 2012;74(2):365-378.","ieee":"H.-W. Priess, I. Scharlau, S. I. Becker, and U. Ansorge, “Spatial mislocalization as a consequence of sequential coding of stimuli.,” <i>Attention, Perception, &#38; Psychophysics</i>, vol. 74, no. 2, pp. 365–378, 2012.","chicago":"Priess, Heinz-Werner, Ingrid Scharlau, Stefanie I. Becker, and Ulrich Ansorge. “Spatial Mislocalization as a Consequence of Sequential Coding of Stimuli.” <i>Attention, Perception, &#38; Psychophysics</i> 74, no. 2 (2012): 365–78."},"publication_identifier":{"issn":["1943-3921"]},"publication_status":"published","issue":"2","keyword":["spatial mislocalization","sequential coding","stimulus parameters","Attention","Discrimination (Psychology)","Humans","Judgment","Motion Perception","Optical Illusions","Orientation","Pattern Recognition","Visual","Psychophysics","Space Perception","Cognitive Processes","Motion Perception","Perceptual Localization","Spatial Perception","Stimulus Parameters","Consequence"],"language":[{"iso":"eng"}],"funded_apc":"1","_id":"6085","department":[{"_id":"424"}],"user_id":"42165","abstract":[{"lang":"eng","text":"In three experiments, we tested whether sequentially coding two visual stimuli can create a spatial misperception of a visual moving stimulus. In Experiment 1, we showed that a spatial misperception, the flash-lag effect, is accompanied by a similar temporal misperception of first perceiving the flash and only then a change of the moving stimulus, when in fact the two events were exactly simultaneous. In Experiment 2, we demonstrated that when the spatial misperception of a flash-lag effect is absent, the temporal misperception is also absent. In Experiment 3, we extended these findings and showed that if the stimulus conditions require coding first a flash and subsequently a nearby moving stimulus, a spatial flash-lag effect is found, with the position of the moving stimulus being misperceived as shifted in the direction of its motion, whereas this spatial misperception is reversed so that the moving stimulus is misperceived as shifted in a direction opposite to its motion when the c"}],"status":"public","publication":"Attention, Perception, & Psychophysics","type":"journal_article"},{"date_updated":"2022-06-06T16:36:51Z","volume":20,"author":[{"first_name":"Frederic","last_name":"Hilkenmeier","full_name":"Hilkenmeier, Frederic"},{"last_name":"Scharlau","orcid":"0000-0003-2364-9489","id":"451","full_name":"Scharlau, Ingrid","first_name":"Ingrid"},{"first_name":"Katharina","last_name":"Weiß","full_name":"Weiß, Katharina"},{"last_name":"Olivers","full_name":"Olivers, Christian N. L.","first_name":"Christian N. L."}],"date_created":"2018-12-10T07:07:45Z","title":"The dynamics of prior entry in serial visual processing.","publication_identifier":{"issn":["1350-6285"]},"publication_status":"published","issue":"1","year":"2012","intvolume":"        20","page":"48 - 76","citation":{"apa":"Hilkenmeier, F., Scharlau, I., Weiß, K., &#38; Olivers, C. N. L. (2012). The dynamics of prior entry in serial visual processing. <i>Visual Cognition</i>, <i>20</i>(1), 48–76.","short":"F. Hilkenmeier, I. Scharlau, K. Weiß, C.N.L. Olivers, Visual Cognition 20 (2012) 48–76.","mla":"Hilkenmeier, Frederic, et al. “The Dynamics of Prior Entry in Serial Visual Processing.” <i>Visual Cognition</i>, vol. 20, no. 1, 2012, pp. 48–76.","bibtex":"@article{Hilkenmeier_Scharlau_Weiß_Olivers_2012, title={The dynamics of prior entry in serial visual processing.}, volume={20}, number={1}, journal={Visual Cognition}, author={Hilkenmeier, Frederic and Scharlau, Ingrid and Weiß, Katharina and Olivers, Christian N. L.}, year={2012}, pages={48–76} }","ama":"Hilkenmeier F, Scharlau I, Weiß K, Olivers CNL. The dynamics of prior entry in serial visual processing. <i>Visual Cognition</i>. 2012;20(1):48-76.","ieee":"F. Hilkenmeier, I. Scharlau, K. Weiß, and C. N. L. Olivers, “The dynamics of prior entry in serial visual processing.,” <i>Visual Cognition</i>, vol. 20, no. 1, pp. 48–76, 2012.","chicago":"Hilkenmeier, Frederic, Ingrid Scharlau, Katharina Weiß, and Christian N. L. Olivers. “The Dynamics of Prior Entry in Serial Visual Processing.” <i>Visual Cognition</i> 20, no. 1 (2012): 48–76."},"_id":"6088","department":[{"_id":"424"}],"user_id":"42165","keyword":["serial visual processing","prior entry dynamics","cueing paradigms","Cues","Visual Perception","Visual Search"],"language":[{"iso":"eng"}],"funded_apc":"1","publication":"Visual Cognition","type":"journal_article","abstract":[{"text":"An attended stimulus reduces the perceptual latency of a later stimulus at the same location, leading to the intriguing finding that the perceived order between the two is often reversed. This prior-entry effect has been well established in a number of different cueing paradigms, mostly involving spatial attentional shifts. Here we assess the time-course of prior entry when all stimuli appear in rapid serial presentation at one location. Our findings indicate that the size of the attentional enhancement is strongly affected by the stimulus onset asynchrony between cue and target, with a rapid early peak, followed by decay. When task-irrelevant cues are used, the cueing effect on prior entry is short-lived and peaks as early as 50 ms. The benefit extends to about 100 ms when task-relevant cues are employed. These results fit with a straightforward computational model of transient attentional enhancement, peaking about 80 100 ms after stimulus detection. (PsycINFO Database Record (c) 20","lang":"eng"}],"status":"public"},{"issue":"2","year":"2011","date_created":"2018-12-10T07:08:22Z","title":"Top-down contingent feature-specific orienting with and without awareness of the visual input.","publication":"Advances in Cognitive Psychology","abstract":[{"text":"In the present article, the role of endogenous feature-specific orienting for conscious and unconscious vision is reviewed. We start with an overview of orienting. We proceed with a review of masking research, and the definition of the criteria of experimental protocols that demonstrate endogenous and exogenous orienting, respectively. Against this background of criteria, we assess studies of unconscious orienting and come to the conclusion that so far studies of unconscious orienting demonstrated endogenous feature-specific orienting. The review closes with a discussion of the role of unconscious orienting in action control. (PsycINFO Database Record (c) 2016 APA, all rights reserved)","lang":"eng"}],"language":[{"iso":"eng"}],"keyword":["visual input","awareness","conscious","orientation","visual perception","Awareness","Consciousness States","Perceptual Orientation","Visual Perception","Blindsight"],"publication_identifier":{"issn":["1895-1171"]},"publication_status":"published","page":"108 - 119","intvolume":"         7","citation":{"ieee":"U. Ansorge, G. Horstmann, and I. Scharlau, “Top-down contingent feature-specific orienting with and without awareness of the visual input.,” <i>Advances in Cognitive Psychology</i>, vol. 7, no. 2, pp. 108–119, 2011.","chicago":"Ansorge, Ulrich, Gernot Horstmann, and Ingrid Scharlau. “Top-down Contingent Feature-Specific Orienting with and without Awareness of the Visual Input.” <i>Advances in Cognitive Psychology</i> 7, no. 2 (2011): 108–19.","ama":"Ansorge U, Horstmann G, Scharlau I. Top-down contingent feature-specific orienting with and without awareness of the visual input. <i>Advances in Cognitive Psychology</i>. 2011;7(2):108-119.","bibtex":"@article{Ansorge_Horstmann_Scharlau_2011, title={Top-down contingent feature-specific orienting with and without awareness of the visual input.}, volume={7}, number={2}, journal={Advances in Cognitive Psychology}, author={Ansorge, Ulrich and Horstmann, Gernot and Scharlau, Ingrid}, year={2011}, pages={108–119} }","short":"U. Ansorge, G. Horstmann, I. Scharlau, Advances in Cognitive Psychology 7 (2011) 108–119.","mla":"Ansorge, Ulrich, et al. “Top-down Contingent Feature-Specific Orienting with and without Awareness of the Visual Input.” <i>Advances in Cognitive Psychology</i>, vol. 7, no. 2, 2011, pp. 108–19.","apa":"Ansorge, U., Horstmann, G., &#38; Scharlau, I. (2011). Top-down contingent feature-specific orienting with and without awareness of the visual input. <i>Advances in Cognitive Psychology</i>, <i>7</i>(2), 108–119."},"volume":7,"author":[{"last_name":"Ansorge","full_name":"Ansorge, Ulrich","first_name":"Ulrich"},{"first_name":"Gernot","last_name":"Horstmann","full_name":"Horstmann, Gernot"},{"first_name":"Ingrid","orcid":"0000-0003-2364-9489","last_name":"Scharlau","id":"451","full_name":"Scharlau, Ingrid"}],"date_updated":"2022-06-07T00:15:30Z","oa":"1","main_file_link":[{"url":"https://kw.uni-paderborn.de/fileadmin/fakultaet/Institute/psychologie/Kognitive_Psychologie/Publikationen/Ansorge_Horstmann_Scharlau_ACP_21072011.pdf","open_access":"1"}],"type":"journal_article","status":"public","department":[{"_id":"424"}],"user_id":"42165","_id":"6091"},{"year":"2011","issue":"1","title":"Prior entry explains order reversals in the attentional blink.","date_created":"2018-12-10T07:06:31Z","abstract":[{"text":"When two targets are presented in rapid succession, the first target (T1) is usually identified, but the second target (T2) is often missed. A remarkable exception to this 'attentional blink' occurs when T2 immediately follows the first T1, at lag 1. It is then often spared but reported in the wrong order—that is, before T1. These order reversals have led to the hypothesis that 'lag 1 sparing' occurs because the two targets merge into a single episodic representation. Here, we report evidence consistent with an alternative theory: T2 receives more attention than T1, leading to prior entry into working memory. Two experiments showed that the more T2 performance exceeded that for T1, the more order reversals were made. Furthermore, precuing T1 led to a shift in performance benefits from T2 to T1 and to an equivalent reduction in order reversals. We conclude that it is not necessary to assume episodic integration to explain lag 1 sparing or the accompanying order reversals. (PsycINFO Dat","lang":"eng"}],"publication":"Attention, Perception, & Psychophysics","language":[{"iso":"eng"}],"keyword":["attentional blink","order reversals","prior entry","working memory","visual attention","attentional performance","Adolescent","Adult","Attention","Attentional Blink","Color Perception","Cues","Discrimination (Psychology)","Female","Humans","Male","Memory","Short-Term","Pattern Recognition","Visual","Psychophysics","Reaction Time","Reversal Learning","Sensory Gating","Serial Learning","Young Adult","Eyeblink Reflex","Stimulus Change","Stimulus Parameters","Visual Attention","Attentional Blink","Short Term Memory"],"citation":{"ama":"Olivers CNL, Hilkenmeier F, Scharlau I. Prior entry explains order reversals in the attentional blink. <i>Attention, Perception, &#38; Psychophysics</i>. 2011;73(1):53-67.","ieee":"C. N. L. Olivers, F. Hilkenmeier, and I. Scharlau, “Prior entry explains order reversals in the attentional blink.,” <i>Attention, Perception, &#38; Psychophysics</i>, vol. 73, no. 1, pp. 53–67, 2011.","chicago":"Olivers, Christian N. L., Frederic Hilkenmeier, and Ingrid Scharlau. “Prior Entry Explains Order Reversals in the Attentional Blink.” <i>Attention, Perception, &#38; Psychophysics</i> 73, no. 1 (2011): 53–67.","mla":"Olivers, Christian N. L., et al. “Prior Entry Explains Order Reversals in the Attentional Blink.” <i>Attention, Perception, &#38; Psychophysics</i>, vol. 73, no. 1, 2011, pp. 53–67.","short":"C.N.L. Olivers, F. Hilkenmeier, I. Scharlau, Attention, Perception, &#38; Psychophysics 73 (2011) 53–67.","bibtex":"@article{Olivers_Hilkenmeier_Scharlau_2011, title={Prior entry explains order reversals in the attentional blink.}, volume={73}, number={1}, journal={Attention, Perception, &#38; Psychophysics}, author={Olivers, Christian N. L. and Hilkenmeier, Frederic and Scharlau, Ingrid}, year={2011}, pages={53–67} }","apa":"Olivers, C. N. L., Hilkenmeier, F., &#38; Scharlau, I. (2011). Prior entry explains order reversals in the attentional blink. <i>Attention, Perception, &#38; Psychophysics</i>, <i>73</i>(1), 53–67."},"page":"53 - 67","intvolume":"        73","publication_status":"published","publication_identifier":{"issn":["1943-3921"]},"main_file_link":[{"open_access":"1","url":"https://kw.uni-paderborn.de/fileadmin/fakultaet/Institute/psychologie/Kognitive_Psychologie/Publikationen/Olivers_etal__2011__AP_PProofs.pdf"}],"author":[{"first_name":"Christian N. L.","last_name":"Olivers","full_name":"Olivers, Christian N. L."},{"full_name":"Hilkenmeier, Frederic","last_name":"Hilkenmeier","first_name":"Frederic"},{"orcid":"0000-0003-2364-9489","last_name":"Scharlau","id":"451","full_name":"Scharlau, Ingrid","first_name":"Ingrid"}],"volume":73,"oa":"1","date_updated":"2022-06-07T00:16:50Z","status":"public","type":"journal_article","funded_apc":"1","user_id":"42165","department":[{"_id":"424"}],"_id":"6082"},{"main_file_link":[{"open_access":"1","url":"https://kw.uni-paderborn.de/fileadmin/fakultaet/Institute/psychologie/Kognitive_Psychologie/Publikationen/WeissScharlau2010.pdf"}],"author":[{"full_name":"Weiß, Katharina","last_name":"Weiß","first_name":"Katharina"},{"first_name":"Ingrid","orcid":"0000-0003-2364-9489","last_name":"Scharlau","id":"451","full_name":"Scharlau, Ingrid"}],"volume":64,"oa":"1","date_updated":"2022-06-07T00:17:26Z","citation":{"ieee":"K. Weiß and I. Scharlau, “Simultaneity and temporal order perception: Different sides of the same coin? Evidence from a visual prior-entry study.,” <i>The Quarterly Journal of Experimental Psychology</i>, vol. 64, no. 2, pp. 394–416, 2011.","chicago":"Weiß, Katharina, and Ingrid Scharlau. “Simultaneity and Temporal Order Perception: Different Sides of the Same Coin? Evidence from a Visual Prior-Entry Study.” <i>The Quarterly Journal of Experimental Psychology</i> 64, no. 2 (2011): 394–416.","apa":"Weiß, K., &#38; Scharlau, I. (2011). Simultaneity and temporal order perception: Different sides of the same coin? Evidence from a visual prior-entry study. <i>The Quarterly Journal of Experimental Psychology</i>, <i>64</i>(2), 394–416.","ama":"Weiß K, Scharlau I. Simultaneity and temporal order perception: Different sides of the same coin? Evidence from a visual prior-entry study. <i>The Quarterly Journal of Experimental Psychology</i>. 2011;64(2):394-416.","short":"K. Weiß, I. Scharlau, The Quarterly Journal of Experimental Psychology 64 (2011) 394–416.","bibtex":"@article{Weiß_Scharlau_2011, title={Simultaneity and temporal order perception: Different sides of the same coin? Evidence from a visual prior-entry study.}, volume={64}, number={2}, journal={The Quarterly Journal of Experimental Psychology}, author={Weiß, Katharina and Scharlau, Ingrid}, year={2011}, pages={394–416} }","mla":"Weiß, Katharina, and Ingrid Scharlau. “Simultaneity and Temporal Order Perception: Different Sides of the Same Coin? Evidence from a Visual Prior-Entry Study.” <i>The Quarterly Journal of Experimental Psychology</i>, vol. 64, no. 2, 2011, pp. 394–416."},"intvolume":"        64","page":"394 - 416","publication_status":"published","publication_identifier":{"issn":["1747-0218"]},"funded_apc":"1","user_id":"42165","department":[{"_id":"424"}],"_id":"6084","status":"public","type":"journal_article","title":"Simultaneity and temporal order perception: Different sides of the same coin? Evidence from a visual prior-entry study.","date_created":"2018-12-10T07:06:56Z","year":"2011","issue":"2","language":[{"iso":"eng"}],"keyword":["temporal order perception","simultaneity","temporal order judgment","attention","visual perception","Adolescent","Adult","Attention","Cues","Discrimination (Psychology)","Female","Humans","Judgment","Male","Models","Psychological","Photic Stimulation","Reaction Time","Time Factors","Uncertainty","Visual Perception","Young Adult","Attention","Judgment","Stimulus Similarity","Time Perception","Visual Discrimination","Temporal Order (Judgment)"],"abstract":[{"text":"Attended stimuli are perceived as occurring earlier than unattended stimuli. This phenomenon of prior entry is usually identified by a shift in the point of subjective simultaneity (PSS) in temporal order judgements (TOJs). According to its traditional psychophysical interpretation, the PSS coincides with the perception of simultaneity. This assumption is, however, questionable. Technically, the PSS represents the temporal interval between two stimuli at which the two alternative TOJs are equally likely. Thus it also seems possible that observers perceive not simultaneity, but uncertainty of temporal order. This possibility is supported by prior-entry studies, which find that perception of simultaneity is not very likely at the PSS. The present study tested the percept at the PSS in prior entry, using peripheral cues to orient attention. We found that manipulating attention caused varying temporal perceptions around the PSS. On some occasions observers perceived the two stimuli as sim","lang":"eng"}],"publication":"The Quarterly Journal of Experimental Psychology"},{"status":"public","abstract":[{"text":"Implicit change detection demonstrates how the visual system can benefit from stored information that is not immediately available to conscious awareness. We investigated the role of motor action in this context. In the first two experiments, using a one-shot implicit change detection paradigm, participants responded to unperceived changes either with an action (jabbing the screen at the guessed location of a change) or with words (verbal report), and sat either 60 cm or 300 cm (with a laser pointer) away from the display. Our observers guessed the locations of changes at a reachable distance better with an action than with a verbal judgment. At 300 cm, beyond reach, the motor advantage disappeared. In experiment 3, this advantage was also unavailable when participants sat at a reachable distance but responded with hand-held laser pointers near their bodies. We conclude that a motor system specialized for real-time visually guided behavior has access to additional visual information. ","lang":"eng"}],"type":"journal_article","publication":"Perception","language":[{"iso":"eng"}],"keyword":["implicit change detection","action perception","visual system","perceptual judgment","verbal fluency","Analysis of Variance","Awareness","Female","Humans","Male","Psychomotor Performance","Random Allocation","Reaction Time","Visual Perception","Visual Memory","Visual Stimulation","Implicit Memory","Judgment","Perceptual Discrimination"],"user_id":"42165","department":[{"_id":"424"}],"_id":"6067","citation":{"chicago":"Tseng, Philip, Jan Tuennermann, Nancy Roker-Knight, Dorina Winter, Ingrid Scharlau, and Bruce Bridgeman. “Enhancing Implicit Change Detection through Action.” <i>Perception</i> 39, no. 10 (2010): 1311–21.","ieee":"P. Tseng, J. Tuennermann, N. Roker-Knight, D. Winter, I. Scharlau, and B. Bridgeman, “Enhancing implicit change detection through action.,” <i>Perception</i>, vol. 39, no. 10, pp. 1311–1321, 2010.","mla":"Tseng, Philip, et al. “Enhancing Implicit Change Detection through Action.” <i>Perception</i>, vol. 39, no. 10, 2010, pp. 1311–21.","short":"P. Tseng, J. Tuennermann, N. Roker-Knight, D. Winter, I. Scharlau, B. Bridgeman, Perception 39 (2010) 1311–1321.","bibtex":"@article{Tseng_Tuennermann_Roker-Knight_Winter_Scharlau_Bridgeman_2010, title={Enhancing implicit change detection through action.}, volume={39}, number={10}, journal={Perception}, author={Tseng, Philip and Tuennermann, Jan and Roker-Knight, Nancy and Winter, Dorina and Scharlau, Ingrid and Bridgeman, Bruce}, year={2010}, pages={1311–1321} }","ama":"Tseng P, Tuennermann J, Roker-Knight N, Winter D, Scharlau I, Bridgeman B. Enhancing implicit change detection through action. <i>Perception</i>. 2010;39(10):1311-1321.","apa":"Tseng, P., Tuennermann, J., Roker-Knight, N., Winter, D., Scharlau, I., &#38; Bridgeman, B. (2010). Enhancing implicit change detection through action. <i>Perception</i>, <i>39</i>(10), 1311–1321."},"page":"1311 - 1321","intvolume":"        39","year":"2010","issue":"10","publication_status":"published","publication_identifier":{"issn":["0301-0066"]},"title":"Enhancing implicit change detection through action.","date_created":"2018-12-10T07:02:19Z","author":[{"full_name":"Tseng, Philip","last_name":"Tseng","first_name":"Philip"},{"last_name":"Tuennermann","full_name":"Tuennermann, Jan","first_name":"Jan"},{"full_name":"Roker-Knight, Nancy","last_name":"Roker-Knight","first_name":"Nancy"},{"last_name":"Winter","full_name":"Winter, Dorina","first_name":"Dorina"},{"orcid":"0000-0003-2364-9489","last_name":"Scharlau","full_name":"Scharlau, Ingrid","id":"451","first_name":"Ingrid"},{"first_name":"Bruce","full_name":"Bridgeman, Bruce","last_name":"Bridgeman"}],"volume":39,"date_updated":"2022-06-06T16:47:26Z"},{"department":[{"_id":"424"}],"user_id":"42165","_id":"6090","language":[{"iso":"eng"}],"keyword":["visual selection","attention","information","visual field","brain","Attention","Humans","Models","Psychological","Visual Perception","Volition","Brain","Visual Field","Visual Perception","Visual Attention","Information"],"publication":"Acta Psychologica","type":"journal_article","status":"public","abstract":[{"lang":"eng","text":"Comments on an article by Jan Theeuwes (see record [rid]2010-20897-002[/rid]). Theeuwes summarizes an impressive number of studies demonstrating interference by irrelevant visual singletons in computer experiments with humans. Theeuwes assumes that this salience-driven capture of attention is fast and occurs within 150 ms since singleton onset, during the feed-forward phase of visual processing. In contrast to Theeuwes, we think that top–down contingent capture is the rule and explains initial and fast attention capture effects in the first feed-forward phase of visual processing. During a later phase and under some conditions exogenous capture of attention possibly follows. At the same time, we propose that the evidence presented by Theeuwes fails to support exogenous orienting because it fails to exclude a top–down contingent capture explanation. We present our arguments in two sections. One major source of evidence for top–down controlled attentional capture during the feed-forward"}],"volume":135,"date_created":"2018-12-10T07:08:08Z","author":[{"first_name":"Ulrich","last_name":"Ansorge","full_name":"Ansorge, Ulrich"},{"first_name":"Gernot","last_name":"Horstmann","full_name":"Horstmann, Gernot"},{"orcid":"0000-0003-2364-9489","last_name":"Scharlau","full_name":"Scharlau, Ingrid","id":"451","first_name":"Ingrid"}],"oa":"1","date_updated":"2022-06-07T00:17:51Z","main_file_link":[{"url":"https://kw.uni-paderborn.de/fileadmin/fakultaet/Institute/psychologie/Kognitive_Psychologie/Publikationen/AHSActa2011.pdf","open_access":"1"}],"title":"Top–down contingent attentional capture during feed-forward visual processing.","issue":"2","publication_identifier":{"issn":["0001-6918"]},"publication_status":"published","intvolume":"       135","page":"123 - 126","citation":{"short":"U. Ansorge, G. Horstmann, I. Scharlau, Acta Psychologica 135 (2010) 123–126.","mla":"Ansorge, Ulrich, et al. “Top–down Contingent Attentional Capture during Feed-Forward Visual Processing.” <i>Acta Psychologica</i>, vol. 135, no. 2, 2010, pp. 123–26.","bibtex":"@article{Ansorge_Horstmann_Scharlau_2010, title={Top–down contingent attentional capture during feed-forward visual processing.}, volume={135}, number={2}, journal={Acta Psychologica}, author={Ansorge, Ulrich and Horstmann, Gernot and Scharlau, Ingrid}, year={2010}, pages={123–126} }","ama":"Ansorge U, Horstmann G, Scharlau I. Top–down contingent attentional capture during feed-forward visual processing. <i>Acta Psychologica</i>. 2010;135(2):123-126.","apa":"Ansorge, U., Horstmann, G., &#38; Scharlau, I. (2010). Top–down contingent attentional capture during feed-forward visual processing. <i>Acta Psychologica</i>, <i>135</i>(2), 123–126.","ieee":"U. Ansorge, G. Horstmann, and I. Scharlau, “Top–down contingent attentional capture during feed-forward visual processing.,” <i>Acta Psychologica</i>, vol. 135, no. 2, pp. 123–126, 2010.","chicago":"Ansorge, Ulrich, Gernot Horstmann, and Ingrid Scharlau. “Top–down Contingent Attentional Capture during Feed-Forward Visual Processing.” <i>Acta Psychologica</i> 135, no. 2 (2010): 123–26."},"year":"2010"},{"main_file_link":[{"open_access":"1","url":"https://kw.uni-paderborn.de/fileadmin/fakultaet/Institute/psychologie/Kognitive_Psychologie/Publikationen/HilkenmeierScharlau2010.pdf"}],"oa":"1","date_updated":"2022-06-07T00:18:16Z","volume":22,"author":[{"full_name":"Hilkenmeier, Frederic","last_name":"Hilkenmeier","first_name":"Frederic"},{"full_name":"Scharlau, Ingrid","id":"451","orcid":"0000-0003-2364-9489","last_name":"Scharlau","first_name":"Ingrid"}],"intvolume":"        22","page":"1222 - 1234","citation":{"chicago":"Hilkenmeier, Frederic, and Ingrid Scharlau. “Rapid Allocation of Temporal Attention in the Attentional Blink Paradigm.” <i>European Journal of Cognitive Psychology</i> 22, no. 8 (2010): 1222–34.","ieee":"F. Hilkenmeier and I. Scharlau, “Rapid allocation of temporal attention in the attentional blink paradigm.,” <i>European Journal of Cognitive Psychology</i>, vol. 22, no. 8, pp. 1222–1234, 2010.","ama":"Hilkenmeier F, Scharlau I. Rapid allocation of temporal attention in the attentional blink paradigm. <i>European Journal of Cognitive Psychology</i>. 2010;22(8):1222-1234.","short":"F. Hilkenmeier, I. Scharlau, European Journal of Cognitive Psychology 22 (2010) 1222–1234.","mla":"Hilkenmeier, Frederic, and Ingrid Scharlau. “Rapid Allocation of Temporal Attention in the Attentional Blink Paradigm.” <i>European Journal of Cognitive Psychology</i>, vol. 22, no. 8, 2010, pp. 1222–34.","bibtex":"@article{Hilkenmeier_Scharlau_2010, title={Rapid allocation of temporal attention in the attentional blink paradigm.}, volume={22}, number={8}, journal={European Journal of Cognitive Psychology}, author={Hilkenmeier, Frederic and Scharlau, Ingrid}, year={2010}, pages={1222–1234} }","apa":"Hilkenmeier, F., &#38; Scharlau, I. (2010). Rapid allocation of temporal attention in the attentional blink paradigm. <i>European Journal of Cognitive Psychology</i>, <i>22</i>(8), 1222–1234."},"publication_identifier":{"issn":["0954-1446"]},"publication_status":"published","funded_apc":"1","_id":"6083","department":[{"_id":"424"}],"user_id":"42165","status":"public","type":"journal_article","title":"Rapid allocation of temporal attention in the attentional blink paradigm.","date_created":"2018-12-10T07:06:43Z","year":"2010","issue":"8","keyword":["temporal attention","attentional blink paradigm","first target information","top-down allocation","rapid serial visual presentation","Stimulus Presentation Methods","Visual Stimulation","Visual Attention"],"language":[{"iso":"eng"}],"abstract":[{"lang":"eng","text":"How fast can information of a first target (T1) in a rapid serial visual presentation be used for top-down allocation of attention in time? A valid cue about the temporal position of a second target (T2) was integrated into T1. The data show that 100 ms after T1 onset, T2 was identified better than without cue, raising the conditional T2 performance. T1 apparently triggers a facilitative effect of attention, known from other paradigms such as peripheral cueing. (PsycINFO Database Record (c) 2016 APA, all rights reserved)"}],"publication":"European Journal of Cognitive Psychology"},{"quality_controlled":"1","issue":"5","year":"2010","citation":{"mla":"Schmalenstroeer, Joerg, and Reinhold Haeb-Umbach. “Online Diarization of Streaming Audio-Visual Data for Smart Environments.” <i>IEEE Journal of Selected Topics in Signal Processing</i>, vol. 4, no. 5, 2010, pp. 845–56, doi:<a href=\"https://doi.org/10.1109/JSTSP.2010.2050519\">10.1109/JSTSP.2010.2050519</a>.","bibtex":"@article{Schmalenstroeer_Haeb-Umbach_2010, title={Online Diarization of Streaming Audio-Visual Data for Smart Environments}, volume={4}, DOI={<a href=\"https://doi.org/10.1109/JSTSP.2010.2050519\">10.1109/JSTSP.2010.2050519</a>}, number={5}, journal={IEEE Journal of Selected Topics in Signal Processing}, author={Schmalenstroeer, Joerg and Haeb-Umbach, Reinhold}, year={2010}, pages={845–856} }","short":"J. Schmalenstroeer, R. Haeb-Umbach, IEEE Journal of Selected Topics in Signal Processing 4 (2010) 845–856.","apa":"Schmalenstroeer, J., &#38; Haeb-Umbach, R. (2010). Online Diarization of Streaming Audio-Visual Data for Smart Environments. <i>IEEE Journal of Selected Topics in Signal Processing</i>, <i>4</i>(5), 845–856. <a href=\"https://doi.org/10.1109/JSTSP.2010.2050519\">https://doi.org/10.1109/JSTSP.2010.2050519</a>","ama":"Schmalenstroeer J, Haeb-Umbach R. Online Diarization of Streaming Audio-Visual Data for Smart Environments. <i>IEEE Journal of Selected Topics in Signal Processing</i>. 2010;4(5):845-856. doi:<a href=\"https://doi.org/10.1109/JSTSP.2010.2050519\">10.1109/JSTSP.2010.2050519</a>","chicago":"Schmalenstroeer, Joerg, and Reinhold Haeb-Umbach. “Online Diarization of Streaming Audio-Visual Data for Smart Environments.” <i>IEEE Journal of Selected Topics in Signal Processing</i> 4, no. 5 (2010): 845–56. <a href=\"https://doi.org/10.1109/JSTSP.2010.2050519\">https://doi.org/10.1109/JSTSP.2010.2050519</a>.","ieee":"J. Schmalenstroeer and R. Haeb-Umbach, “Online Diarization of Streaming Audio-Visual Data for Smart Environments,” <i>IEEE Journal of Selected Topics in Signal Processing</i>, vol. 4, no. 5, pp. 845–856, 2010, doi: <a href=\"https://doi.org/10.1109/JSTSP.2010.2050519\">10.1109/JSTSP.2010.2050519</a>."},"intvolume":"         4","page":"845-856","oa":"1","date_updated":"2023-10-26T08:10:18Z","date_created":"2019-07-12T05:30:16Z","author":[{"first_name":"Joerg","last_name":"Schmalenstroeer","id":"460","full_name":"Schmalenstroeer, Joerg"},{"first_name":"Reinhold","last_name":"Haeb-Umbach","id":"242","full_name":"Haeb-Umbach, Reinhold"}],"volume":4,"title":"Online Diarization of Streaming Audio-Visual Data for Smart Environments","main_file_link":[{"open_access":"1","url":"https://groups.uni-paderborn.de/nt/pubs/2010/ScHa10.pdf"}],"doi":"10.1109/JSTSP.2010.2050519","type":"journal_article","publication":"IEEE Journal of Selected Topics in Signal Processing","abstract":[{"text":"For an environment to be perceived as being smart, contextual information has to be gathered to adapt the system's behavior and its interface towards the user. Being a rich source of context information speech can be acquired unobtrusively by microphone arrays and then processed to extract information about the user and his environment. In this paper, a system for joint temporal segmentation, speaker localization, and identification is presented, which is supported by face identification from video data obtained from a steerable camera. Special attention is paid to latency aspects and online processing capabilities, as they are important for the application under investigation, namely ambient communication. It describes the vision of terminal-less, session-less and multi-modal telecommunication with remote partners, where the user can move freely within his home while the communication follows him. The speaker diarization serves as a context source, which has been integrated in a service-oriented middleware architecture and provided to the application to select the most appropriate I/O device and to steer the camera towards the speaker during ambient communication.","lang":"eng"}],"status":"public","_id":"11892","user_id":"460","department":[{"_id":"54"}],"keyword":["audio streaming","audio visual data streaming","context information speech","face identification","face recognition","image segmentation","middleware","multimodal telecommunication","online diarization","service oriented middleware architecture","sessionless telecommunication","software architecture","speaker identification","speaker localization","speaker recognition","steerable camera","telecommunication computing","temporal segmentation","terminal-less telecommunication","video streaming"],"language":[{"iso":"eng"}]},{"type":"journal_article","publication":"KI 2009: Advances in Artificial Intelligence. Proceedings of the 32nd Annual Conference on Artificial Intelligence.","abstract":[{"lang":"eng","text":"The relevance of top-down information in the deployment of attention has more and more been emphasized in cognitive psychology. We present recent findings about the dynamic of these processes and also demonstrate that task relevance can be adjusted rapidly by incoming bottom-up information. This adjustment substantially increases performance in a subsequent task. Implications for artificial visual models are discussed."}],"status":"public","_id":"28964","user_id":"42165","department":[{"_id":"424"}],"keyword":["visuo-spatial attention","top-down control","task relevance","artificial visual attention","attentional blink"],"language":[{"iso":"eng"}],"funded_apc":"1","publication_status":"published","year":"2009","citation":{"ama":"Hilkenmeier F, Tünnermann J, Scharlau I. Early Top-Down Influences in Control of Attention: Evidence from the Attentional Blink. <i>KI 2009: Advances in Artificial Intelligence Proceedings of the 32nd Annual Conference on Artificial Intelligence</i>. Published online 2009.","chicago":"Hilkenmeier, Frederic, Jan Tünnermann, and Ingrid Scharlau. “Early Top-Down Influences in Control of Attention: Evidence from the Attentional Blink.” <i>KI 2009: Advances in Artificial Intelligence. Proceedings of the 32nd Annual Conference on Artificial Intelligence.</i>, 2009.","ieee":"F. Hilkenmeier, J. Tünnermann, and I. Scharlau, “Early Top-Down Influences in Control of Attention: Evidence from the Attentional Blink,” <i>KI 2009: Advances in Artificial Intelligence. Proceedings of the 32nd Annual Conference on Artificial Intelligence.</i>, 2009.","apa":"Hilkenmeier, F., Tünnermann, J., &#38; Scharlau, I. (2009). Early Top-Down Influences in Control of Attention: Evidence from the Attentional Blink. <i>KI 2009: Advances in Artificial Intelligence. Proceedings of the 32nd Annual Conference on Artificial Intelligence.</i>","short":"F. Hilkenmeier, J. Tünnermann, I. Scharlau, KI 2009: Advances in Artificial Intelligence. Proceedings of the 32nd Annual Conference on Artificial Intelligence. (2009).","mla":"Hilkenmeier, Frederic, et al. “Early Top-Down Influences in Control of Attention: Evidence from the Attentional Blink.” <i>KI 2009: Advances in Artificial Intelligence. Proceedings of the 32nd Annual Conference on Artificial Intelligence.</i>, 2009.","bibtex":"@article{Hilkenmeier_Tünnermann_Scharlau_2009, title={Early Top-Down Influences in Control of Attention: Evidence from the Attentional Blink}, journal={KI 2009: Advances in Artificial Intelligence. Proceedings of the 32nd Annual Conference on Artificial Intelligence.}, author={Hilkenmeier, Frederic and Tünnermann, Jan and Scharlau, Ingrid}, year={2009} }"},"oa":"1","date_updated":"2022-06-07T00:18:37Z","author":[{"first_name":"Frederic","full_name":"Hilkenmeier, Frederic","last_name":"Hilkenmeier"},{"full_name":"Tünnermann, Jan","last_name":"Tünnermann","first_name":"Jan"},{"full_name":"Scharlau, Ingrid","id":"451","last_name":"Scharlau","orcid":"0000-0003-2364-9489","first_name":"Ingrid"}],"date_created":"2021-12-15T13:09:25Z","title":"Early Top-Down Influences in Control of Attention: Evidence from the Attentional Blink","main_file_link":[{"url":"https://kw.uni-paderborn.de/fileadmin/fakultaet/Institute/psychologie/Kognitive_Psychologie/Publikationen/KI09_Hilkenmeier_TD_AB.pdf","open_access":"1"}]}]
