@article{36481, abstract = {{Recent studies highlight early childhood teachers’ mathematics-related competence. Developing this competence should be a main aspect of early childhood teachers’ education. This is, however, not the case in all countries. Consequently, high-quality professional development courses are needed. Based on research results, we developed a competence-oriented continuous professional development course ("EmMa") and examined the effects of "EmMa" by asking: How does "EmMa" affect the development of early childhood teachers’ i) mathematical content knowledge, ii) mathematical pedagogical content knowledge and iii) beliefs towards mathematics in general? To answer these questions, we conducted a pre-test/post-test study including a control group with 99 in-service early childhood teachers. Results show that the course affected teachers’ mathematical pedagogical content knowledge and static orientation towards mathematics positively. From this we conclude that scaling-up "EmMa" might be a suitable approach to bridge the gap between pre-service education with nearly no mathematics and the challenges of early mathematics education.}}, author = {{Bruns, Julia and Eichen, Lars and Gasteiger, Hedwig}}, journal = {{Mathematics Teacher Education and Development (MTED)}}, keywords = {{Beliefs, Competency Based Teacher Education, Control Groups, Early Childhood Education, Faculty Development, Foreign Countries, Inservice Teacher Education, Intervention, Mathematical Aptitude, Mathematics Skills, Pedagogical Content Knowledge, Preschool Teachers, Pretests Posttests, Professional Continuing Education, Statistical Analysis, Teacher Competency Testing}}, number = {{3}}, pages = {{76–93}}, title = {{{Mathematics-related Competence of Early Childhood Teachers Visiting a Continuous Professional Development Course: An Intervention Study}}}, volume = {{19}}, year = {{2017}}, } @article{4690, author = {{Gorbacheva, Elena and Stein, Armin and Schmiedel, Theresa and Müller, Oliver}}, issn = {{18670202}}, journal = {{Business and Information Systems Engineering}}, keywords = {{BPM workforce, Business process management, Competences, Gender diversity, Latent semantic analysis, Skills, Text mining}}, number = {{3}}, pages = {{213----231}}, title = {{{The Role of Gender in Business Process Management Competence Supply}}}, doi = {{10.1007/s12599-016-0428-2}}, year = {{2016}}, } @article{4692, author = {{Müller, Oliver and Schmiedel, Theresa and Gorbacheva, Elena and vom Brocke, Jan}}, issn = {{17517583}}, journal = {{Enterprise Information Systems}}, keywords = {{abilities, business process management, competences, knowledge, latent semantic analysis, professionals, skills, typology}}, number = {{1}}, pages = {{50----80}}, title = {{{Towards a typology of business process management professionals: identifying patterns of competences through latent semantic analysis}}}, doi = {{10.1080/17517575.2014.923514}}, year = {{2016}}, } @article{17225, abstract = {{How is communicative gesture behavior in robots perceived by humans? Although gesture is crucial in social interaction, this research question is still largely unexplored in the field of social robotics. Thus, the main objective of the present work is to investigate how gestural machine behaviors can be used to design more natural communication in social robots. The chosen approach is twofold. Firstly, the technical challenges encountered when implementing a speech-gesture generation model on a robotic platform are tackled. We present a framework that enables the humanoid robot to flexibly produce synthetic speech and co-verbal hand and arm gestures at run-time, while not being limited to a predefined repertoire of motor actions. Secondly, the achieved flexibility in robot gesture is exploited in controlled experiments. To gain a deeper understanding of how communicative robot gesture might impact and shape human perception and evaluation of human-robot interaction, we conducted a between-subjects experimental study using the humanoid robot in a joint task scenario. We manipulated the non-verbal behaviors of the robot in three experimental conditions, so that it would refer to objects by utilizing either (1) unimodal (i.e., speech only) utterances, (2) congruent multimodal (i.e., semantically matching speech and gesture) or (3) incongruent multimodal (i.e., semantically non-matching speech and gesture) utterances. Our findings reveal that the robot is evaluated more positively when non-verbal behaviors such as hand and arm gestures are displayed along with speech, even if they do not semantically match the spoken utterance.}}, author = {{Salem, Maha and Kopp, Stefan and Wachsmuth, Ipke and Rohlfing, Katharina and Joublin, Frank}}, issn = {{1875-4805}}, journal = {{International Journal of Social Robotics, Special Issue on Expectations, Intentions, and Actions}}, keywords = {{Social Human-Robot Interaction, Multimodal Interaction and Conversational Skills, Robot Companions and Social Robots, Non-verbal Cues and Expressiveness}}, number = {{2}}, pages = {{201--217}}, publisher = {{Springer Science + Business Media}}, title = {{{Generation and evaluation of communicative robot gesture}}}, doi = {{10.1007/s12369-011-0124-9}}, volume = {{4}}, year = {{2012}}, } @article{17428, abstract = {{How is communicative gesture behavior in robots perceived by humans? Although gesture is crucial in social interaction, this research question is still largely unexplored in the field of social robotics. Thus, the main objective of the present work is to investigate how gestural machine behaviors can be used to design more natural communication in social robots. The chosen approach is twofold. Firstly, the technical challenges encountered when implementing a speech-gesture generation model on a robotic platform are tackled. We present a framework that enables the humanoid robot to flexibly produce synthetic speech and co-verbal hand and arm gestures at run-time, while not being limited to a predefined repertoire of motor actions. Secondly, the achieved flexibility in robot gesture is exploited in controlled experiments. To gain a deeper understanding of how communicative robot gesture might impact and shape human perception and evaluation of human-robot interaction, we conducted a between-subjects experimental study using the humanoid robot in a joint task scenario. We manipulated the non-verbal behaviors of the robot in three experimental conditions, so that it would refer to objects by utilizing either (1) unimodal (i.e., speech only) utterances, (2) congruent multimodal (i.e., semantically matching speech and gesture) or (3) incongruent multimodal (i.e., semantically non-matching speech and gesture) utterances. Our findings reveal that the robot is evaluated more positively when non-verbal behaviors such as hand and arm gestures are displayed along with speech, even if they do not semantically match the spoken utterance.}}, author = {{Salem, Maha and Kopp, Stefan and Wachsmuth, Ipke and Rohlfing, Katharina and Joublin, Frank}}, issn = {{1875-4805}}, journal = {{International Journal of Social Robotics, Special Issue on Expectations, Intentions, and Actions}}, keywords = {{Social Human-Robot Interaction, Multimodal Interaction and Conversational Skills, Robot Companions and Social Robots, Non-verbal Cues and Expressiveness}}, number = {{2}}, pages = {{201--217}}, publisher = {{Springer Science + Business Media}}, title = {{{Generation and evaluation of communicative robot gesture}}}, doi = {{10.1007/s12369-011-0124-9}}, volume = {{4}}, year = {{2012}}, } @inproceedings{17430, abstract = {{Previous work has shown that gestural behaviors affect anthropomorphic inferences about artificial communicators such as virtual agents. In an experiment with a humanoid robot, we investigated to what extent gesture would affect anthropomorphic inferences about the robot. Particularly, we examined the effects of the robot's hand and arm gestures on the attribution of typically human traits, likability of the robot, shared reality, and future contact intentions after interacting with the robot. For this, we manipulated the non-verbal behaviors of the humanoid robot in three experimental conditions: (1) no gesture, (2) congruent gesture, and (3) incongruent gesture. We hypothesized higher ratings on all dependent measures in the two gesture (vs. no gesture) conditions. The results confirm our predictions: when the robot used gestures during interaction, it was anthropomorphized more, participants perceived it as more likable, reported greater shared reality with it, and showed increased future contact intentions than when the robot gave instructions without using gestures. Surprisingly, this effect was particularly pronounced when the robot's gestures were partly incongruent with speech. These findings show that communicative non-verbal behaviors in robotic systems affect both anthropomorphic perceptions and the mental models humans form of a humanoid robot during interaction.}}, author = {{Salem, Maha and Eyssel, Friederike Anne and Rohlfing, Katharina and Kopp, Stefan and Joublin, F.}}, booktitle = {{Social Robotics}}, editor = {{Mutlu, B. and Bartneck, C. and Ham, J. and Evers, V. and Kanda, T.}}, isbn = {{978-3-642-25503-8}}, keywords = {{Multimodal Interaction and Conversational Skills, Anthropomorphism, Non-verbal Cues and Expressiveness}}, pages = {{31--41}}, publisher = {{Springer Science + Business Media}}, title = {{{Effects of gesture on the perception of psychological anthropomorphism: A case study with a humanoid robot}}}, doi = {{10.1007/978-3-642-25504-5_4}}, volume = {{7072}}, year = {{2011}}, } @inproceedings{17242, abstract = {{Previous work has shown that gestural behaviors affect anthropomorphic inferences about artificial communicators such as virtual agents. In an experiment with a humanoid robot, we investigated to what extent gesture would affect anthropomorphic inferences about the robot. Particularly, we examined the effects of the robot's hand and arm gestures on the attribution of typically human traits, likability of the robot, shared reality, and future contact intentions after interacting with the robot. For this, we manipulated the non-verbal behaviors of the humanoid robot in three experimental conditions: (1) no gesture, (2) congruent gesture, and (3) incongruent gesture. We hypothesized higher ratings on all dependent measures in the two gesture (vs. no gesture) conditions. The results confirm our predictions: when the robot used gestures during interaction, it was anthropomorphized more, participants perceived it as more likable, reported greater shared reality with it, and showed increased future contact intentions than when the robot gave instructions without using gestures. Surprisingly, this effect was particularly pronounced when the robot's gestures were partly incongruent with speech. These findings show that communicative non-verbal behaviors in robotic systems affect both anthropomorphic perceptions and the mental models humans form of a humanoid robot during interaction.}}, author = {{Salem, Maha and Eyssel, Friederike Anne and Rohlfing, Katharina and Kopp, Stefan and Joublin, F.}}, booktitle = {{Social Robotics}}, editor = {{Mutlu, B. and Bartneck, C. and Ham, J. and Evers, V. and Kanda, T.}}, isbn = {{978-3-642-25503-8}}, keywords = {{Multimodal Interaction and Conversational Skills, Anthropomorphism, Non-verbal Cues and Expressiveness}}, pages = {{31--41}}, publisher = {{Springer Science + Business Media}}, title = {{{Effects of gesture on the perception of psychological anthropomorphism: A case study with a humanoid robot}}}, doi = {{10.1007/978-3-642-25504-5_4}}, volume = {{7072}}, year = {{2011}}, }