@ARTICLE{Grzejszczak_Tomasz_Gestures_2026, author={Grzejszczak, Tomasz}, volume={74}, number={3}, journal={Bulletin of the Polish Academy of Sciences Technical Sciences}, pages={e158302}, howpublished={online}, year={2026}, abstract={This paper investigates human-robot interaction (HRI) within the challenging, high-noise environments of educational fairs, advocating for an HCI-centered approach to evaluate social robots in unpredictable public spaces. Utilizing the advanced social robot Furhat, the study compares the effectiveness of speech-based interaction against gesture-based recognition to overcome the limitations of traditional voice systems in loud settings. Five interaction programs were evaluated: a Large Language Model (LLM) variant relying on voice, and two gesture-controlled games (Rock-Paper-Scissors and Blocks guessing game) tested both with and without passive robot gestures. The results, drawn from field tests, demonstrate that the gesture recognition module is a highly effective alternative to speech recognition in noisy environments. While the voice-based LLM program struggled with a 33% success rate and high idle times due to environmental noise, gesture-based interactions achieved significantly higher success rates, ranging from 77% to 96%. Furthermore, the study confirms that a gesturing social robot is significantly more effective at attracting attention. The inclusion of passive gestures reduced the robot idle time from an average of 141.7–143.7 seconds to 105.6–106.5 seconds, while increasing participant engagement by 16% to 21%. These findings underscore the importance of non-verbal communication and multimodal perception in fostering reliable and engaging HRI in dynamic, high-social environments.}, type={article}, title={Gestures for attraction, gestures for communication: A field study of social robot interaction modalities at an educational fair}, URL={http://journals.pan.pl/Content/138761/PDF/BPASTS-05439-EA.pdf}, doi={10.24425/bpasts.2026.158302}, keywords={human-robot interaction, HRI, social robot, gestural interaction, engagement and communication}, }