@inproceedings{21, author = {Pourya Aliasghari and Chrystopher Nehaniv and Moojan Ghafurian and Kerstin Dautenhahn}, title = {Improving Robot Learning Outcomes in Human-Robot Teaching: The Role of Human Teachers’ Awareness of a Robot’s Visual Constraints}, abstract = {
To be able to learn effectively, robots sometimes will need to select more suitable human teachers. We propose an attribute in human teachers for robots that learn through visual observations, namely human teachers’ awareness of and attention to the robot’s visual capabilities and constraints, and explore how it affects robot learning outcomes. In an in-person experiment involving 72 participants who taught three physical tasks to an iCub humanoid robot, we manipulated teachers’ awareness of the robot’s visual constraints by offering the visual perspective of the robot in one of the experimental conditions. Participants who were able to see the robot’s vision output paid increased attention to ensuring task objects were visible to the robot when providing demonstrations of physical tasks. This emphasis on attention to the robot’s view resulted in better learning outcomes for the robot, as indicated by lower perception error rates and higher learning scores. This study contributes to understanding factors in human teachers that lead to better learning outcomes for robots.
}, year = {2025}, journal = {IEEE International Workshop on Robot and Human Communication}, month = {11/2025}, publisher = {IEEE}, address = {Eindhoven, Netherlands}, url = {https://ieeexplore.ieee.org/abstract/document/11217582}, }