@inproceedings{44, author = {V. Nair and M. Budhai and G. Olmschenk and W. Seiple and Z. Zhu}, title = {ASSIST: Personalized Indoor Navigation via Multimodal Sensors and High-Level Semantic Information}, abstract = {Blind & visually impaired (BVI) individuals and those with Autism Spectrum Disorder (ASD) each face unique challenges in navigating unfamiliar indoor environments. In this paper, we propose an indoor positioning and navigation system that guides a user from point A to point B indoors with high accuracy while augmenting their situational awareness. This system has three major components: location recognition (a hybrid indoor localization app that uses Bluetooth Low Energy beacons and Google Tango to provide high accuracy), object recognition (a body-mounted camera to provide the user momentary situational awareness of objects and people), and semantic recognition (map-based annotations to alert the user of static environmental characteristics). This system also features personalized interfaces built upon the unique experiences that both BVI and ASD individuals have in indoor wayfinding and tailors its multimodal feedback to their needs. Here, the technical approach and implementation of this system are discussed, and the results of human subject tests with both BVI and ASD individuals are presented. In addition, we discuss and show the system’s user-centric interface and present points for future work and expansion.}, year = {2019}, journal = {Leal-Taixé L., Roth S. (eds) Computer Vision – ECCV 2018 Workshops. ECCV 2018. Lecture Notes in Computer Science}, volume = {11134}, chapter = {128}, pages = {16}, month = {01}, url = {https://par.nsf.gov/biblio/10110459}, doi = {10.1007/978-3-030-11024-6_9}, }