@inproceedings{b6de6b61a2e441a7a8ec71fbbae0ebd2,
title = "Piggybacking Robots: Human-Robot Overtrust in University Dormitory Security",
abstract = "Can overtrust in robots compromise physical security? We conducted a series of experiments in which a robot positioned outside a secure-access student dormitory asked passersby to assist it to gain access. We found individual participants were as likely to assist the robot in exiting the dormitory (40% assistance rate, 4/10 individuals) as in entering (19%, 3/16 individuals). Groups of people were more likely than individuals to assist the robot in entering (71%, 10/14 groups). When the robot was disguised as a food delivery agent for the fictional start-up Robot Grub, individuals were more likely to assist the robot in entering (76%, 16/21 individuals). Lastly, we found participants who identified the robot as a bomb threat demonstrated a trend toward assisting the robot (87%, 7/8 individuals, 6/7 groups). Thus, we demonstrate that overtrust - -the unfounded belief that the robot does not intend to deceive or carry risk - -can represent a significant threat to physical security at a university dormitory.",
keywords = "overtrust, piggybacking, robotics, secure access, tailgating, trust",
author = "Serena Booth and James Tompkin and Hanspeter Pfister and Jim Waldo and Krzysztof Gajos and Radhika Nagpal",
note = "Publisher Copyright: {\textcopyright} 2017 ACM.; 12th Annual ACM/IEEE International Conference on Human-Robot Interaction, HRI 2017 ; Conference date: 06-03-2017 Through 09-03-2017",
year = "2017",
month = mar,
day = "6",
doi = "10.1145/2909824.3020211",
language = "English (US)",
series = "ACM/IEEE International Conference on Human-Robot Interaction",
publisher = "IEEE Computer Society",
pages = "426--434",
booktitle = "HRI 2017 - Proceedings of the 2017 ACM/IEEE International Conference on Human-Robot Interaction",
address = "United States",
}