Back to top
Citation
@inproceedings{rivière2019,
author = {Rivière, Marc-Aurèle and Gay, Simon and Romeo, Katerine and
Pissaloux, Edwige and Bujacz, Michal and Skulimowski, Piotr and
Strumillo, Pawel},
publisher = {IEEE},
title = {NAV-VIR: An Audio-Tactile Virtual Environment to Assist
Visually Impaired People},
booktitle = {Proceedings of the International IEEE/EMBS Conference on
Neural Engineering},
pages = {1038-1041},
date = {2019-05-20},
url = {https://ieeexplore.ieee.org/document/8717086},
doi = {10.1109/NER.2019.8717086},
isbn = {978-1-5386-7921-0},
langid = {en},
abstract = {This paper introduces the
{[}NAV-VIR{]}(/content/projects/NAV-VIR) system, a multimodal
virtual environment to assist visually impaired people in virtually
discovering and exploring unknown areas from the safety of their
home. The originality of NAV-VIR resides in (1) an optimized
representation of the surrounding topography, the spatial gist,
based on human spatial cognition models and the sensorimotor
supplementation framework, and (2) a multimodal orientation-aware
immersive virtual environment relying on two synergetic interfaces:
an interactive force feedback tablet, the F2T, and an immersive
HRTF-based 3D audio simulation relying on binaural recordings of
real environments. This paper presents NAV-VIR functionalities and
its preliminary evaluation through a simple shape and movement
perception task.}
}