@article { , title = {Personalized robot assistant for support in dressing}, abstract = {Robot-assisted dressing is performed in close physical interaction with users who may have a wide range of physical characteristics and abilities. Design of user adaptive and personalized robots in this context is still indicating limited, or no consideration, of specific user-related issues. This paper describes the development of a multi-modal robotic system for a specific dressing scenario - putting on a shoe, where users’ personalized inputs contribute to a much improved task success rate. We have developed: 1) user tracking, gesture recognition andposturerecognitionalgorithmsrelyingonimagesprovidedby a depth camera; 2) a shoe recognition algorithm from RGB and depthimages;3)speechrecognitionandtext-to-speechalgorithms implemented to allow verbal interaction between the robot and user. The interaction is further enhanced by calibrated recognition of the users’ pointing gestures and adjusted robot’s shoe delivery position. A series of shoe fitting experiments have been performed on two groups of users, with and without previous robot personalization, to assess how it affects the interaction performance. Our results show that the shoe fitting task with the personalized robot is completed in shorter time, with a smaller number of user commands and reduced workload.}, doi = {10.1109/TCDS.2018.2817283}, eissn = {2379-8939}, issn = {2379-8920}, journal = {IEEE Transactions on Cognitive and Developmental Systems}, publicationstatus = {Published}, publisher = {Institute of Electrical and Electronics Engineers}, url = {https://uwe-repository.worktribe.com/output/874685}, keyword = {Robotic Engineering and Computing for Healthcare - FET, assistive robots, robot personalization, multimodal human-robot interaction}, year = {2018}, author = {Jevtic, Aleksandar and Valle, Andres Flores and Alenya, Guillem and Chance, Greg and Caleb-Solly, Praminda and Dogramadzi, Sanja and Torras, Carme} }