BibTex format
@inproceedings{Noronha:2017:10.1109/ICORR.2017.8009387,
author = {Noronha, B and Dziemian, S and Zito, GA and Konnaris, C and Faisal, AA},
doi = {10.1109/ICORR.2017.8009387},
pages = {1043--1048},
publisher = {IEEE},
title = {"Wink to grasp" – comparing eye, voice & EMG gesture control of grasp with soft-robotic gloves},
url = {http://dx.doi.org/10.1109/ICORR.2017.8009387},
year = {2017}
}
RIS format (EndNote, RefMan)
TY - CPAPER
AB - The ability of robotic rehabilitation devices to support paralysed end-users is ultimately limited by the degree to which human-machine-interaction is designed to be effective and efficient in translating user intention into robotic action. Specifically, we evaluate the novel possibility of binocular eye-tracking technology to detect voluntary winks from involuntary blink commands, to establish winks as a novel low-latency control signal to trigger robotic action. By wearing binocular eye-tracking glasses we enable users to directly observe their environment or the actuator and trigger movement actions, without having to interact with a visual display unit or user interface. We compare our novel approach to two conventional approaches for controlling robotic devices based on electromyo-graphy (EMG) and speech-based human-computer interaction technology. We present an integrated software framework based on ROS that allows transparent integration of these multiple modalities with a robotic system. We use a soft-robotic SEM glove (Bioservo Technologies AB, Sweden) to evaluate how the 3 modalities support the performance and subjective experience of the end-user when movement assisted. All 3 modalities are evaluated in streaming, closed-loop control operation for grasping physical objects. We find that wink control shows the lowest error rate mean with lowest standard deviation of (0.23 ± 0.07, mean ± SEM) followed by speech control (0.35 ± 0. 13) and EMG gesture control (using the Myo armband by Thalamic Labs), with the highest mean and standard deviation (0.46 ± 0.16). We conclude that with our novel own developed eye-tracking based approach to control assistive technologies is a well suited alternative to conventional approaches, especially when combined with 3D eye-tracking based robotic end-point control.
AU - Noronha,B
AU - Dziemian,S
AU - Zito,GA
AU - Konnaris,C
AU - Faisal,AA
DO - 10.1109/ICORR.2017.8009387
EP - 1048
PB - IEEE
PY - 2017///
SP - 1043
TI - "Wink to grasp" – comparing eye, voice & EMG gesture control of grasp with soft-robotic gloves
UR - http://dx.doi.org/10.1109/ICORR.2017.8009387
UR - http://hdl.handle.net/10044/1/48592
ER -