BibTex format
@article{Rivera-Rubio:2016:10.1016/j.cviu.2016.02.014,
author = {Rivera-Rubio, J and Arulkumaran, K and Rishi, H and Alexiou, I and Bharath, AA},
doi = {10.1016/j.cviu.2016.02.014},
journal = {Computer Vision and Image Understanding},
pages = {126--145},
title = {An assistive haptic interface for appearance-based indoor navigation},
url = {http://dx.doi.org/10.1016/j.cviu.2016.02.014},
volume = {149},
year = {2016}
}
RIS format (EndNote, RefMan)
TY - JOUR
AB - Computer vision remains an under-exploited technology for assistive devices. Here, we propose a navigation technique using low-resolution images from wearable or hand-held cameras to identify landmarks that are indicative of a user’s position along crowdsourced paths. We test the components of a system that is able to provide blindfolded users with information about location via tactile feedback. We assess the accuracy of vision-based localisation by making comparisons with estimates of location derived from both a recent SLAM-based algorithm and from indoor surveying equipment. We evaluate the precision and reliability by which location information can be conveyed to human subjects by analysing their ability to infer position from electrostatic feedback in the form of textural (haptic) cues on a tablet device. Finally, we describe a relatively lightweight systems architecture that enables images to be captured and location results to be served back to the haptic device based on journey information from multiple users and devices.
AU - Rivera-Rubio,J
AU - Arulkumaran,K
AU - Rishi,H
AU - Alexiou,I
AU - Bharath,AA
DO - 10.1016/j.cviu.2016.02.014
EP - 145
PY - 2016///
SN - 1077-3142
SP - 126
TI - An assistive haptic interface for appearance-based indoor navigation
T2 - Computer Vision and Image Understanding
UR - http://dx.doi.org/10.1016/j.cviu.2016.02.014
UR - https://www.sciencedirect.com/science/article/pii/S1077314216000680
UR - http://hdl.handle.net/10044/1/38889
VL - 149
ER -