<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.0 20040830//EN" "http://dtd.nlm.nih.gov/publishing/2.0/journalpublishing.dtd">
<article xmlns:xlink="http://www.w3.org/1999/xlink" article-type="research-article" dtd-version="2.0">
  <front>
    <journal-meta>
      <journal-id journal-id-type="publisher-id">JRAT</journal-id>
      <journal-id journal-id-type="nlm-ta">JMIR Rehabil Assist Technol</journal-id>
      <journal-title>JMIR Rehabilitation and Assistive Technologies</journal-title>
      <issn pub-type="epub">2369-2529</issn>
      <publisher>
        <publisher-name>JMIR Publications</publisher-name>
        <publisher-loc>Toronto, Canada</publisher-loc>
      </publisher>
    </journal-meta>
    <article-meta>
      <article-id pub-id-type="publisher-id">v11i1e55776</article-id>
      <article-id pub-id-type="pmid">39556804</article-id>
      <article-id pub-id-type="doi">10.2196/55776</article-id>
      <article-categories>
        <subj-group subj-group-type="heading">
          <subject>Original Paper</subject>
        </subj-group>
        <subj-group subj-group-type="article-type">
          <subject>Original Paper</subject>
        </subj-group>
      </article-categories>
      <title-group>
        <article-title>Navigation Training for Persons With Visual Disability Through Multisensory Assistive Technology: Mixed Methods Experimental Study</article-title>
      </title-group>
      <contrib-group>
        <contrib contrib-type="editor">
          <name>
            <surname>Munce</surname>
            <given-names>Sarah</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Baxter</surname>
            <given-names>Clarence</given-names>
          </name>
        </contrib>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Pimentel</surname>
            <given-names>Maria da Graca</given-names>
          </name>
        </contrib>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Abbatantuono</surname>
            <given-names>Chiara</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib id="contrib1" contrib-type="author" equal-contrib="yes">
          <name name-style="western">
            <surname>Ricci</surname>
            <given-names>Fabiana Sofia</given-names>
          </name>
          <degrees>MSc</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <xref rid="aff2" ref-type="aff">2</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0009-0002-3818-7395</ext-link>
        </contrib>
        <contrib id="contrib2" contrib-type="author" equal-contrib="yes">
          <name name-style="western">
            <surname>Liguori</surname>
            <given-names>Lorenzo</given-names>
          </name>
          <degrees>MSc</degrees>
          <xref rid="aff2" ref-type="aff">2</xref>
          <xref rid="aff3" ref-type="aff">3</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-0352-0736</ext-link>
        </contrib>
        <contrib id="contrib3" contrib-type="author">
          <name name-style="western">
            <surname>Palermo</surname>
            <given-names>Eduardo</given-names>
          </name>
          <degrees>PhD</degrees>
          <xref rid="aff3" ref-type="aff">3</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-3213-8261</ext-link>
        </contrib>
        <contrib id="contrib4" contrib-type="author">
          <name name-style="western">
            <surname>Rizzo</surname>
            <given-names>John-Ross</given-names>
          </name>
          <degrees>MD</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <xref rid="aff4" ref-type="aff">4</xref>
          <xref rid="aff5" ref-type="aff">5</xref>
          <xref rid="aff6" ref-type="aff">6</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-4084-0085</ext-link>
        </contrib>
        <contrib id="contrib5" contrib-type="author" corresp="yes">
          <name name-style="western">
            <surname>Porfiri</surname>
            <given-names>Maurizio</given-names>
          </name>
          <degrees>PhD</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <xref rid="aff2" ref-type="aff">2</xref>
          <address>
            <institution>Center for Urban Science and Progress</institution>
            <institution>New York University Tandon School of Engineering</institution>
            <addr-line>Brooklyn, New York, NY</addr-line>
            <country>United States</country>
            <phone>1 646 997 3681</phone>
            <email>mporfiri@nyu.edu</email>
          </address>
          <xref rid="aff4" ref-type="aff">4</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-1480-3539</ext-link>
        </contrib>
      </contrib-group>
      <aff id="aff1">
        <label>1</label>
        <institution>Department of Biomedical Engineering</institution>
        <institution>New York University Tandon School of Engineering</institution>
        <addr-line>Brooklyn, New York, NY</addr-line>
        <country>United States</country>
      </aff>
      <aff id="aff2">
        <label>2</label>
        <institution>Center for Urban Science and Progress</institution>
        <institution>New York University Tandon School of Engineering</institution>
        <addr-line>Brooklyn, New York, NY</addr-line>
        <country>United States</country>
      </aff>
      <aff id="aff3">
        <label>3</label>
        <institution>Department of Mechanical and Aerospace Engineering</institution>
        <institution>Sapienza University of Rome</institution>
        <addr-line>Rome</addr-line>
        <country>Italy</country>
      </aff>
      <aff id="aff4">
        <label>4</label>
        <institution>Department of Mechanical and Aerospace Engineering</institution>
        <institution>New York University Tandon School of Engineering</institution>
        <addr-line>Brooklyn, New York, NY</addr-line>
        <country>United States</country>
      </aff>
      <aff id="aff5">
        <label>5</label>
        <institution>Department of Rehabilitation Medicine</institution>
        <institution>New York University Langone Health</institution>
        <addr-line>New York, NY</addr-line>
        <country>United States</country>
      </aff>
      <aff id="aff6">
        <label>6</label>
        <institution>Department of Neurology</institution>
        <institution>New York University Langone Health</institution>
        <addr-line>New York, NY</addr-line>
        <country>United States</country>
      </aff>
      <author-notes>
        <corresp>Corresponding Author: Maurizio Porfiri <email>mporfiri@nyu.edu</email></corresp>
      </author-notes>
      <pub-date pub-type="collection">
        <year>2024</year>
      </pub-date>
      <pub-date pub-type="epub">
        <day>18</day>
        <month>11</month>
        <year>2024</year>
      </pub-date>
      <volume>11</volume>
      <elocation-id>e55776</elocation-id>
      <history>
        <date date-type="received">
          <day>23</day>
          <month>12</month>
          <year>2023</year>
        </date>
        <date date-type="rev-request">
          <day>12</day>
          <month>3</month>
          <year>2024</year>
        </date>
        <date date-type="rev-recd">
          <day>8</day>
          <month>4</month>
          <year>2024</year>
        </date>
        <date date-type="accepted">
          <day>14</day>
          <month>10</month>
          <year>2024</year>
        </date>
      </history>
      <copyright-statement>©Fabiana Sofia Ricci, Lorenzo Liguori, Eduardo Palermo, John-Ross Rizzo, Maurizio Porfiri. Originally published in JMIR Rehabilitation and Assistive Technology (https://rehab.jmir.org), 18.11.2024.</copyright-statement>
      <copyright-year>2024</copyright-year>
      <license license-type="open-access" xlink:href="https://creativecommons.org/licenses/by/4.0/">
        <p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (https://creativecommons.org/licenses/by/4.0/), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work, first published in JMIR Rehabilitation and Assistive Technology, is properly cited. The complete bibliographic information, a link to the original publication on https://rehab.jmir.org/, as well as this copyright and license information must be included.</p>
      </license>
      <self-uri xlink:href="https://rehab.jmir.org/2024/1/e55776" xlink:type="simple"/>
      <abstract>
        <sec sec-type="background">
          <title>Background</title>
          <p>Visual disability is a growing problem for many middle-aged and older adults. Conventional mobility aids, such as white canes and guide dogs, have notable limitations that have led to increasing interest in electronic travel aids (ETAs). Despite remarkable progress, current ETAs lack empirical evidence and realistic testing environments and often focus on the substitution or augmentation of a single sense.</p>
        </sec>
        <sec sec-type="objective">
          <title>Objective</title>
          <p>This study aims to (1) establish a novel virtual reality (VR) environment to test the efficacy of ETAs in complex urban environments for a simulated visual impairment (VI) and (2) evaluate the impact of haptic and audio feedback, individually and combined, on navigation performance, movement behavior, and perception. Through this study, we aim to address gaps to advance the pragmatic development of assistive technologies (ATs) for persons with VI.</p>
        </sec>
        <sec sec-type="methods">
          <title>Methods</title>
          <p>The VR platform was designed to resemble a subway station environment with the most common challenges faced by persons with VI during navigation. This environment was used to test our multisensory, AT-integrated VR platform among 72 healthy participants performing an obstacle avoidance task while experiencing symptoms of VI. Each participant performed the task 4 times: once with haptic feedback, once with audio feedback, once with both feedback types, and once without any feedback. Data analysis encompassed metrics such as completion time, head and body orientation, and trajectory length and smoothness. To evaluate the effectiveness and interaction of the 2 feedback modalities, we conducted a 2-way repeated measures ANOVA on continuous metrics and a Scheirer-Ray-Hare test on discrete ones. We also conducted a descriptive statistical analysis of participants’ answers to a questionnaire, assessing their experience and preference for feedback modalities.</p>
        </sec>
        <sec sec-type="results">
          <title>Results</title>
          <p>Results from our study showed that haptic feedback significantly reduced collisions (<italic>P</italic>=.05) and the variability of the pitch angle of the head (<italic>P</italic>=.02). Audio feedback improved trajectory smoothness (<italic>P</italic>=.006) and mitigated the increase in the trajectory length from haptic feedback alone (<italic>P</italic>=.04). Participants reported a high level of engagement during the experiment (52/72, 72%) and found it interesting (42/72, 58%). However, when it came to feedback preferences, less than half of the participants (29/72, 40%) favored combined feedback modalities. This indicates that a majority preferred dedicated single modalities over combined ones.</p>
        </sec>
        <sec sec-type="conclusions">
          <title>Conclusions</title>
          <p>AT is crucial for individuals with VI; however, it often lacks user-centered design principles. Research should prioritize consumer-oriented methodologies, testing devices in a staged manner with progression toward more realistic, ecologically valid settings to ensure safety. Our multisensory, AT-integrated VR system takes a holistic approach, offering a first step toward enhancing users’ spatial awareness, promoting safer mobility, and holds potential for applications in medical treatment, training, and rehabilitation. Technological advancements can further refine such devices, significantly improving independence and quality of life for those with VI.</p>
        </sec>
      </abstract>
      <kwd-group>
        <kwd>assistive technology</kwd>
        <kwd>human-computer interaction</kwd>
        <kwd>multisensory feedback</kwd>
        <kwd>virtual reality</kwd>
        <kwd>visual impairment</kwd>
        <kwd>haptic</kwd>
      </kwd-group>
    </article-meta>
  </front>
  <body>
    <sec sec-type="introduction">
      <title>Introduction</title>
      <sec>
        <title>Background</title>
        <p>Visual impairment (VI) affects a considerable proportion of middle-aged and older adults [<xref ref-type="bibr" rid="ref1">1</xref>]. In the United States alone, approximately 12 million people aged ≥40 years experience VI, with about a million of them experiencing blindness [<xref ref-type="bibr" rid="ref2">2</xref>]. Globally, the statistics are similar, with reports from the World Health Organization indicating that there are ≥2.2 billion people with eye and vision problems [<xref ref-type="bibr" rid="ref3">3</xref>]. Not only is VI an important contribution to mobility disability, it is also associated with increased risks of stroke, arthritis, diabetes, and cancer [<xref ref-type="bibr" rid="ref4">4</xref>-<xref ref-type="bibr" rid="ref6">6</xref>]. VI is also significantly associated with decreased life satisfaction, unemployment, and social isolation, which may lead to depression and increased risk of suicidal behavior [<xref ref-type="bibr" rid="ref7">7</xref>-<xref ref-type="bibr" rid="ref9">9</xref>]. Considerable economic costs are also associated with VI due to productivity losses, costs to the health system to provide accessible eye care, and other financial implications of vision loss and its comorbid conditions [<xref ref-type="bibr" rid="ref10">10</xref>,<xref ref-type="bibr" rid="ref11">11</xref>].</p>
        <p>The autonomy of persons with VI is often jeopardized for the many everyday tasks they need to attend to, including travelling unknown environments. One of the chief challenges to achieving independence for persons with VI is associated with safe, independent, and efficient navigation, particularly in unfamiliar locations [<xref ref-type="bibr" rid="ref12">12</xref>-<xref ref-type="bibr" rid="ref14">14</xref>]. Conventional navigation aids include white canes and guide dogs [<xref ref-type="bibr" rid="ref15">15</xref>]. Although these aids provide valuable mobility support, they bear important limitations that preclude their widespread adoption. In fact, only an estimated 2% to 8% of persons with VI use white canes or guide dogs in the United States [<xref ref-type="bibr" rid="ref16">16</xref>,<xref ref-type="bibr" rid="ref17">17</xref>]. The white cane is light, portable, and easily replaceable, but it can only detect objects through physical contact. It is unable to provide any information about sublevel pits or holes, uneven terrain, and obstacles that are not in the range of the stick. Likewise, it is difficult to use for detecting moving objects, such as cars and people [<xref ref-type="bibr" rid="ref18">18</xref>,<xref ref-type="bibr" rid="ref19">19</xref>]. Guide dogs may help with more security in new and unfamiliar areas and can improve the safety of their owners. However, guide dogs are expensive, their training period is long, and they are only viable for about 7 years [<xref ref-type="bibr" rid="ref18">18</xref>,<xref ref-type="bibr" rid="ref19">19</xref>]. In the last 20 years, several studies have focused on assistive devices to foster independence and facilitate navigation of persons with VI in indoor and outdoor environments. These technologies, known as electronic travel aids (ETAs), are devices that collect environmental information using 1 or more sensors and transmit such information to the user through touch and sound [<xref ref-type="bibr" rid="ref20">20</xref>]. The state of the art offers a wide range of ETAs that incorporate functions for obstacle avoidance or r route selection [<xref ref-type="bibr" rid="ref21">21</xref>-<xref ref-type="bibr" rid="ref23">23</xref>].</p>
        <p>Development of ETAs with regard to production and commercialization is still hindered by 2 main factors [<xref ref-type="bibr" rid="ref24">24</xref>].</p>
        <p>The first factor is the lack of empirical evidence about the extent to which such devices detect obstacles and improve performance in mobility tasks [<xref ref-type="bibr" rid="ref25">25</xref>]. In fact, most systems developed for persons with VI have concentrated on addressing the deficit of sight through the enhancement of a singular sensory input. Often, the emphasis has been on substituting or augmenting visual information through technologies that cater to touch or sound [<xref ref-type="bibr" rid="ref26">26</xref>-<xref ref-type="bibr" rid="ref28">28</xref>]. While these approaches to sensory substitution have shown promising outcomes, they may miss out on the broader advantages of combining multiple senses. Relying on a single sensory modality could limit the overall appraisal of the environment for individuals with VI [<xref ref-type="bibr" rid="ref29">29</xref>]. A multisensory approach could offer a more nuanced and complete perception of surroundings, paving the way for more effective solutions for persons with VI [<xref ref-type="bibr" rid="ref30">30</xref>,<xref ref-type="bibr" rid="ref31">31</xref>].</p>
        <p>Second, the state of the art on ETA testing has relied on artificial or noncontrolled settings that limit one’s ability to assess the value of any particular approach before field deployment [<xref ref-type="bibr" rid="ref32">32</xref>-<xref ref-type="bibr" rid="ref35">35</xref>]. In particular, users are guided through these setups using game pads or joysticks, which may inadequately emulate the unpredictable challenges encountered in daily life by persons with VI [<xref ref-type="bibr" rid="ref36">36</xref>]. Experimental validation in these less-than-realistic environments with limited ecological validity might result in an inaccurate estimation of the system’s effectiveness. Real-world conditions introduce a multitude of variables and complexities that are challenging to replicate artificially, emphasizing the need for more comprehensive testing strategies that better reflect the dynamic nature of everyday scenarios.</p>
        <p>In this study, we propose an assistive technology (AT) combining haptic and audio cues to provide comprehensive obstacle avoidance assistance. The haptic feedback was delivered through an improved version of the wearable system previously developed by our group [<xref ref-type="bibr" rid="ref37">37</xref>-<xref ref-type="bibr" rid="ref39">39</xref>], consisting of a belt equipped with an array of actuators positioned around the user’s abdomen. This tactile interface served as an intuitive guide, conveying real-time information about the proximity of obstacles in the user’s surroundings. The proposed ETA features an audio feedback component that uses beep sounds to alert users to potential obstacles.</p>
        <p>We developed a virtual reality (VR) framework to explore the effectiveness of the multisensory AT on healthy participants, before field deployment on persons with VI. VR provides a versatile platform for seamlessly incorporating various haptic feedback modalities and enhancing them with complementary audio effects, thereby facilitating navigation within virtual environments [<xref ref-type="bibr" rid="ref40">40</xref>,<xref ref-type="bibr" rid="ref41">41</xref>]. The precision of VR allows for the accurate simulation of diverse, and even rare, forms of eye pathologies [<xref ref-type="bibr" rid="ref42">42</xref>-<xref ref-type="bibr" rid="ref44">44</xref>]. The ability to simulate VI has broad applications across science, engineering, and medicine. For example, effective VI simulations could enhance public understanding of VIs, potentially aiding in early disease diagnosis [<xref ref-type="bibr" rid="ref45">45</xref>-<xref ref-type="bibr" rid="ref48">48</xref>].</p>
      </sec>
      <sec>
        <title>Study Overview</title>
        <p>Our study involved the creation of a realistic and dynamic subway station environment, where 72 healthy participants performed a virtual obstacle avoidance task while experiencing simulated VI. The experiment comprised 4 conditions: haptic feedback only, audio feedback only, both haptic and audio feedback, and no feedback. For each condition, we gathered data on participants’ navigation performance, including time to complete the task, number of collisions, trajectory length, and smoothness, as well as their movement behavior, encompassing head and body orientation. Through a multifaceted comparison of participants’ movement behavior and navigation performance across conditions, we sought to evaluate the role of haptic and audio feedback, both individually and in combination, on users’ mobility and behavior. We envision this platform as a robust and easily customizable tool for investigating diverse feedback modalities, contributing to a deeper understanding of the needs of individuals with VI, and fostering continuous advancements in the design and development of ATs.</p>
      </sec>
    </sec>
    <sec sec-type="methods">
      <title>Methods</title>
      <sec>
        <title>VR Platform</title>
        <sec>
          <title>Design of the Environment</title>
          <p>We built a VR platform to assess the effect of different types of feedback modalities and their combination on users’ behavior and navigation performance. VR constitutes an ideal framework to test different conditions in highly realistic and dynamic scenarios [<xref ref-type="bibr" rid="ref49">49</xref>-<xref ref-type="bibr" rid="ref51">51</xref>]. We designed a multisensory, AT-integrated VR system comprising audio feedback implemented in VR and a haptic feedback device interfaced with the virtual environment. We conceived an obstacle avoidance task to assess the ability of the 2 feedback modalities (individually or together) to enhance the mobility of persons with VI.</p>
          <p>The application was built and run on a Lenovo Legion 5 15IMH05H gaming laptop. To optimize the gaming stream and ensure the immersiveness of the application, we used a TP-Link Archer GX90 AX6600 Tri-Band Wi-Fi 6 Gaming Router. The Unity game engine (version 2019.4.9f1) was used to develop a VR application for the Meta Quest 2 headset and Touch controllers. Users navigated the virtual environment by physically walking in a first-person perspective. In VR, we designed 2 floors of a subway station whose size matched the dimensions of the physical environment where the experiment took place. The 2 environments included common obstacles and hazards that may be encountered while walking in a subway station, such as broken elevators, construction sites, working tools, garbage, scaffoldings, signage furnishing, and turnstiles (<xref rid="figure1" ref-type="fig">Figure 1</xref>). A food vendor, a street musician, and other pedestrians were included to increase engagement and dynamism of the overall environment (<xref rid="figure1" ref-type="fig">Figure 1</xref>). We also simulated an elevator ride from the first floor to the second floor of the virtual subway station (<xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>).</p>
          <fig id="figure1" position="float">
            <label>Figure 1</label>
            <caption>
              <p>Example of the virtual reality environments implemented in this study: (A) the first floor and (B) second floor of a subway station. We simulated an elevator ride from the first floor to the second floor of the subway station environment.</p>
            </caption>
            <graphic xlink:href="rehab_v11i1e55776_fig1.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          </fig>
          <p>To create a realistic VR experience, sound effects related to a subway station environment, including those of animated pedestrians, were added. As shown by prior studies, integrating sounds related to the visual content enhances the sense of presence of participants in a virtual environment [<xref ref-type="bibr" rid="ref52">52</xref>,<xref ref-type="bibr" rid="ref53">53</xref>]. To integrate realistic audio effects in the VR application, we used FMOD, an end-to-end solution for sound that integrates seamlessly with Unity. It simplifies the process of creating sound behaviors, with a comprehensive set of features that allows one to quickly and easily build adaptive audio.</p>
        </sec>
        <sec>
          <title>VI Simulation</title>
          <p>In VR, we simulated different aspects of VI, including peripheral vision loss, reduced contrast sensitivity, altered color perception, and glare [<xref ref-type="bibr" rid="ref54">54</xref>], as shown in <xref rid="figure2" ref-type="fig">Figure 2</xref> (refer to <xref ref-type="supplementary-material" rid="app2">Multimedia Appendix 2</xref> for more details). Impairment severity was based on the extent of peripheral vision loss and the intensity of the simulated symptoms. Our simulation of peripheral vision loss specifically targeted the severe stage of glaucoma, a prevalent cause of VI among adults in the United States, which is known for its substantial impact on mobility [<xref ref-type="bibr" rid="ref55">55</xref>]. This progressive reduction of the peripheral visual field in glaucoma impedes the clear identification of objects, which is crucial for obtaining wide-field information about the environment [<xref ref-type="bibr" rid="ref56">56</xref>,<xref ref-type="bibr" rid="ref57">57</xref>]. Realistic simulation of such symptoms was accomplished by combining postprocessing effects and C# scripts coded in Unity. Specifically, we combined rendering and graphic tools provided by Unity, such as shader and culling mask. A shader is a mini-program that provides a flexible way of dynamically tweaking the appearance of any components in the scene (such as models and lights). A culling mask is a camera’s property that allows one to selectively render objects in the scene. We used a Gaussian blur shader to reproduce the symptoms of glare and blurriness and a culling mask to create the visual effects of peripheral vision loss, reduced contrast sensitivity, and altered color perception.</p>
          <fig id="figure2" position="float">
            <label>Figure 2</label>
            <caption>
              <p>Effects on vision due to a visual impairment simulated in virtual reality: peripheral vision loss, reduced contrast sensitivity, altered color perception, and glare.</p>
            </caption>
            <graphic xlink:href="rehab_v11i1e55776_fig2.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          </fig>
          <p>To ensure the realism and accuracy of our simulations, we sought the expertise of 2 professionals familiar with low-vision conditions. Specifically, a certified orientation and mobility specialist (also a certified low-vision therapist) with ≥30 years of experience in the field and the chief research officer at an American nonprofit organization dedicated to vision rehabilitation and advocacy for the blind, who is also a research professor of ophthalmology at New York University Grossman School of Medicine, provided their expertise.</p>
        </sec>
      </sec>
      <sec>
        <title>Multisensory, AT-Integrated VR System</title>
        <sec>
          <title>Obstacle Detection</title>
          <p>Obstacle detection was implemented using the <italic>UnityEngine.PhysicsModule</italic>. Specifically, the <italic>Spherecast</italic> function was used to project a sphere of a given radius into the scene. The function returns a true Boolean value when an object in the virtual environment is hit by the sphere, and it provides information about the distance between the projection point and the object.</p>
        </sec>
        <sec>
          <title>Haptic Feedback</title>
          <p>The haptic feedback was provided by a wearable device in the form of a belt that improves on our team’s previous effort [<xref ref-type="bibr" rid="ref37">37</xref>-<xref ref-type="bibr" rid="ref39">39</xref>]. The belt was equipped with 10 cylindrical eccentric rotating mass actuators (Precision Microdrives Ltd, model number 307-103) with a diameter of 9 mm and a length of 25 mm. We opted for this type of actuator as it is widely available, simple to use, and inexpensive. The actuators were arranged on 6 distinct modular units that could be added or removed easily based on users’ preference, ability, and experience with the device (<xref rid="figure3" ref-type="fig">Figure 3</xref>). The units were designed in SolidWorks (version 2019) and 3D printed on a Bambu Lab X1C. Precisely, the 4 central modules had 2 actuators each disposed horizontally and separated by a vertical distance of 85 mm. In these central modules, each actuator was enclosed in a parallelepipedal housing of dimensions 35 mm × 42 mm × 10 mm. The housing was made of polylactic acid. To minimize the vibrations inside the modules, each actuator was connected through springs to a flexible element of thermoplastic polyurethane. The 2 modules at the ends of the belt each had a single actuator positioned vertically in the center. In these lateral modules, each actuator was enclosed in a parallelepipedal housing of dimensions 45 mm × 60 mm × 12 mm.</p>
          <fig id="figure3" position="float">
            <label>Figure 3</label>
            <caption>
              <p>Picture of the new prototype of the haptic feedback device tested in this study.</p>
            </caption>
            <graphic xlink:href="rehab_v11i1e55776_fig3.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          </fig>
          <p>Once assembled, the modules were evenly inserted on the 2 straps of a commercial waist bag, which was secured above the user’s hips through a buckle. Inside the waist bag, we placed all the electronic components needed to control and power the belt, namely, a custom printed circuit board, an EPS32 microcontroller (Espressif Systems), and a Krisdonia 50,000 mAh power bank. The function of the actuators on the belt was to provide environmental information through vibration feedback on the users’ abdomen. Specifically, the vibration indicated the presence and location of obstacles near the user in the virtual environment. The amplitude and frequency of the vibration were programmed to vary on 3 levels based on the distance from the obstacles; information about the position and location of closer obstacles was conveyed through higher amplitude and frequency. The belt was connected to the laptop via Wi-Fi using the EPS32 microcontroller. The interface between the belt and the VR environment was enabled through a server or client transmission control protocol established in a C# script.</p>
          <p>The user’s field of view in VR, characterized by a horizontal span of 89° and a vertical span of 93° (per the Meta Oculus Quest 2 specifications), was discretized into a grid comprising 10 sectors. This grid layout closely mirrored the configuration of actuators on the haptic feedback belt. Each sector was then associated with a virtual sphere projected from the user’s body. The 10 resulting spheres were positioned to align with the 10 field of view sectors. Anytime an obstacle fell into a sector, it was detected by a specific sphere, and information to activate the actuators was sent through the transmission control protocol to the EPS32 microcontroller. The latter used pulse width modulation to control a metal-oxide-semiconductor field-effect transistor driver (Texas Instruments) placed in the printed circuit board, which fed the actuators. The maximum hit distance of the spheres was set to 2.5 m based on pilot testing of the haptic feedback system. This value determined the range of action of the belt. The frequency of vibration was regulated on 3 levels based on the distance of the object from the user in VR by means of a C++ code.</p>
        </sec>
        <sec>
          <title>Audio Feedback</title>
          <p>The audio feedback was provided through the VR headset, and it consisted of a beep sound added to the VR application using an FMOD sound effect engine. Similar to haptic feedback, audio feedback serves the purpose of alerting users of the presence of obstacles in their surroundings via a beep sound. The sound was played at increasingly short intervals as the user approached an obstacle. The VR device was connected to the laptop via Wi-Fi using the Oculus application and Quest Link.</p>
          <p>Obstacle detection through audio feedback was again implemented in a C# script using the Spherecast function. However, in this case, only 1 sphere was designed to be projected from the user’s head in the virtual environment. Anytime an object was in the direction the user was facing, it was detected by the sphere and a beep sound was emitted by the VR headset to alert the user about the presence of an obstacle. Similar to the haptic feedback, the maximum hit distance of the sphere was set to 2.5 m. The rationale behind this audio feedback design was to enhance users’ residual vision while exploring the environment with their head movement via simple and intuitive audio feedback.</p>
          <p>Moving forward, future implementations could explore additional sensory cues to further enrich the user experience in virtual environments. For example, synchronized footstep sounds tailored to users’ movements have been shown to significantly elevate perceived presence in the virtual environment. This heightened presence fosters greater awareness of one’s gait and posture, resulting in more authentic interactions and enhanced movement control [<xref ref-type="bibr" rid="ref58">58</xref>]. The efficacy of echo-acoustic cues in navigating virtual environments has also been previously assessed [<xref ref-type="bibr" rid="ref59">59</xref>]; not only could these cues improve collision avoidance and navigation efficiency, but they may also enhance the perception and evaluation of different routes after training.</p>
        </sec>
      </sec>
      <sec>
        <title>Experimental Methods</title>
        <sec>
          <title>Participants</title>
          <p>A total of 72 healthy participants with a mean age of 25.93 (SD 4.48) years were recruited from New York University Tandon School of Engineering. Of these 72 participants, 26 (36%) self-identified as women and 46 (64%) as men. To reduce the risk of injury or discomfort associated with the use of a VR device, we excluded people who were pregnant; older adults; had preexisting binocular vision abnormalities or psychiatric disorders; had a heart condition, seizures, or other serious medical conditions; and used medical devices. We opted for self-reported visual acuity to exclude persons with preexisting binocular vision abnormalities, as conducting objective screenings for all participants would have required additional resources, including time and personnel. Given the nature of our research and the characteristics of our target population, we felt self-reporting was a practical and feasible approach, allowing us to efficiently gather relevant data without significantly extending the duration of participant recruitment and data collection. Participants with normal or normal corrected vision were included in the study.</p>
        </sec>
        <sec>
          <title>Procedure</title>
          <p>The experimental study took place in a multipurpose production space at New York University’s Media Commons, consisting of 4 bays, each of which was 6 m long and 2 m wide with a total area of 178 m<sup>2</sup>. Other than 4 curtains positioned along the sidewall of the bays, the environment was free from obstructions. Thus, participants were able to walk freely during the experiment (<xref rid="figure4" ref-type="fig">Figure 4</xref>).</p>
          <fig id="figure4" position="float">
            <label>Figure 4</label>
            <caption>
              <p>Multipurpose production space used to conduct the experiments.</p>
            </caption>
            <graphic xlink:href="rehab_v11i1e55776_fig4.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          </fig>
          <p>There was no training provided for using the haptic feedback device or the VR platform; participants completed the experiment in a single session.</p>
          <p>Participants performed an obstacle avoidance task on the 2 floors of the virtual subway station environment while experiencing the most common symptoms and signs of a VI. Specifically, participants were asked to physically walk from a starting point until they reached a virtual elevator and then turn 180° and walk back until they reach the train platform. To help participants understand that they had reached the final destination, arrival was signaled through the sound of a turnstile opening and the animation of a train passing by. Immediately after the completion of each condition, participants were asked to fill out a questionnaire concerning their overall experience and the 2 types of feedback (refer to the Questionnaire subsection).</p>
          <p>During the experiment, the belt and the VR headset alerted users about the presence of obstacles in the surrounding environment through vibration feedback on the abdomen and audio feedback, respectively, to minimize the possibility of a collision. The right Oculus Touch controller vibrated any time a user hit an obstacle in the virtual environment to reproduce the sensation of touching an object. The left Oculus Touch controller was attached to the haptic feedback device vertically to track the position of the users during the experiment (refer to the Data Collection subsection). The experiment was aimed at realistically recreating a path from the entrance of a subway station to the train platform, with a maximum duration of 30 minutes to prevent distress associated with extended VR sessions [<xref ref-type="bibr" rid="ref60">60</xref>].</p>
        </sec>
        <sec>
          <title>Conditions and Research Questions</title>
          <p>A total of 4 experimental conditions were tested to elucidate the individual and combined effects of haptic and audio feedback on movement behavior, navigation performance, and self-reported ratings. Each participant performed the task in 4 different conditions: no feedback, haptic feedback only, audio feedback only, and both feedbacks. Apart from the type of feedback provided, all conditions were identically structured. Each participant was assigned to only 1 (4%) of the 24 possible combinations for the following purposes: (1) preventing fatigue from potentially diminishing the impact of the feedback on users’ performance in the later stages of the experiment and (2) mitigating biases related to increased familiarity with the devices. During the obstacle avoidance task, data on the navigation performance (task completion time, number of collisions, and trajectory) and movement behavior (head and body orientation) of the participants were collected (refer to Data Collection subsection).</p>
          <p>This study aimed to answer the following research questions (RQs) based on the collected data:</p>
          <list list-type="bullet">
            <list-item>
              <p>RQ1. How did individual and synergistic use of the 2 types of feedback affect the navigation performance of participants across experimental conditions?</p>
            </list-item>
            <list-item>
              <p>RQ2. How did individual and synergistic use of the 2 types of feedback affect the movement behavior of participants across experimental conditions?</p>
            </list-item>
            <list-item>
              <p>RQ3. How did participants perceive the individual and synergistic use of the 2 types of feedback across experimental conditions?</p>
            </list-item>
          </list>
        </sec>
      </sec>
      <sec>
        <title>Data Collection</title>
        <sec>
          <title>Metrics</title>
          <p>During each experiment, we collected the following metrics: number of collisions, completion time, head orientation, and body position and orientation. To save these metrics, we used 2 C# scripts. The first script was used to start and reset a stopwatch at the beginning of each experiment and to collect the following data: (1) head orientation (Euler angles) from the VR headset, (2) body position from the user’s body in VR, and (3) body orientation from the left Oculus Touch controller. Specifically, to collect data on users’ body position, we provided the player with a CapsuleCollider and a RigidBody component. The former is an invisible capsule-shaped primitive that represents the user’s body in VR, while the latter provides the user’s body with physics properties. These 2 components moved in the virtual environment according to the movement of the user in the real environment. The left Oculus Touch controller was secured vertically on the belt by means of an element 3D-printed in carbon fiber reinforced polylactic acid and used for collecting users’ body orientation. The game object representing the left Oculus Touch controller in VR moved in the virtual environment according to the movement of the physical controller in the real environment.</p>
          <p>The second script was used to simulate the collision with obstacles and to alert the user through a vibration provided by the right Oculus Touch controller. To enable the vibration of the controller, each virtual object was provided with a RigidBody and a Collider component. In this case, we used a BoxCollider, an invisible box-shaped primitive that encloses the object. When a BoxCollider of an object came in contact with the collider of the player, the script initiated the vibration of the right Oculus Touch controller and registered a collision.</p>
        </sec>
        <sec>
          <title>Questionnaire</title>
          <p>A questionnaire was created to collect participants’ opinions on the overall experience and the 2 types of feedback. The questionnaire (<xref ref-type="supplementary-material" rid="app3">Multimedia Appendix 3</xref>) included 8 items. Questions 1 to 3 were designed to investigate participants’ familiarity with VR, emotional reaction, and potential motion sickness felt during the experiment. Question 4 sought to understand participants’ personal perception of their navigation performance during the 4 experimental conditions. Question 5 asked for an explanation about their answer to question 4. Question 6 was designed to explore participants’ preference toward 1 specific condition. Question 7 required an explanation about that preference. Finally, the participants were asked to give an overall evaluation of the experience using a 5-point scale (not at all interesting, slightly interesting, moderately interesting, fairly interesting, and extremely interesting). The questionnaire was developed in a Google form, and it was accessible to participants by scanning a QR code. Participants filled out the questionnaire only after they completed all the 4 experimental conditions.</p>
        </sec>
        <sec>
          <title>Data Processing</title>
          <p>The data processing was performed in MATLAB (MathWorks, version 2021b). The body position was defined in a coordinate system CS0 whose origin was set at the experiment starting position, as shown in <xref rid="figure5" ref-type="fig">Figure 5</xref>A. The x- and y-axes were oriented along the main dimensions of the room, while the z-axis was aligned with the direction of gravity. Euler angles (<italic>ψ<sub>b</sub></italic>, <italic>θ<sub>b</sub></italic>, <italic>ϕ<sub>b</sub></italic>) were used to describe the orientation of the trunk, and Euler angles (<italic>ψ<sub>h</sub></italic>, <italic>θ<sub>h</sub></italic>, <italic>ϕ<sub>h</sub></italic>) were used to describe the spatial orientation of the head; coordinate systems are shown in <xref rid="figure5" ref-type="fig">Figure 5</xref>B. Raw data of the Euler angles and body position were smoothed using a quadratic regression method over a window of 20 samples to minimize noise from the measured data.</p>
          <fig id="figure5" position="float">
            <label>Figure 5</label>
            <caption>
              <p>Coordinate systems used to define (A) body position (CS0; X0, Y0, and Z0) and (B) head (CSh; Xh, Yh, and Zh) and body (CSb; Xb, Yb, and Zb) orientation.</p>
            </caption>
            <graphic xlink:href="rehab_v11i1e55776_fig5.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          </fig>
        </sec>
        <sec>
          <title>Trajectory Length and Smoothness</title>
          <p>We computed participants’ trajectory length and smoothness. The trajectory length of each participant was calculated as follows:</p>
          <disp-formula>
            <graphic xlink:href="rehab_v11i1e55776_fig7.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          </disp-formula>
          <p>where <italic>nF</italic> is the number of frames, <italic>p<sub>t</sub></italic> = [X<italic><sub>0,t</sub></italic>,Y<italic><sub>0,t</sub></italic>] is the body position in 2 dimensions at time step t, and &#124;&#124;・&#124;&#124; is the Euclidean norm.</p>
          <p>Smoothness was estimated through the spectral arc length (SPARC) [<xref ref-type="bibr" rid="ref61">61</xref>] and computed as follows. First, we performed a numeric derivative on the speed profile <italic>v</italic>. Then, we computed the fast Fourier transform on the speed to obtain the spectrum magnitude <italic>V</italic>(<italic>f</italic>) as a function of the frequency <italic>f</italic>, which we normalized with respect to its maximum to obtain.</p>
          <disp-formula>
            <graphic xlink:href="rehab_v11i1e55776_fig8.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          </disp-formula>
          <p>where <italic>f<sub>i</sub></italic> is the <italic>i-th</italic> frequency component of the spectrum.</p>
          <p>We determined the cut-off frequency <italic>f<sub>c</sub></italic> as the maximum frequency where the spectral magnitude is above a threshold <italic>V</italic> and below a maximum frequency limit <italic>f<sub>max</sub></italic>,</p>
          <p><italic>f<sub>c</sub></italic> = {<italic>f<sub>i</sub></italic> &#60; <italic>f<sub>max</sub></italic>, <italic>V<sub>norm</sub></italic>(<italic>f<sub>i</sub></italic>) &#62; <italic>V</italic>}</p>
          <p>Finally, we computed the SPARC,</p>
          <disp-formula>
            <graphic xlink:href="rehab_v11i1e55776_fig9.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          </disp-formula>
          <p>where <italic>N<sub>fc</sub></italic> is the number of frequency components up to <italic>f<sub>c</sub></italic> and <italic>ΔV</italic>(<italic>f<sub>i</sub></italic>) is the difference in the normalized spectrum magnitude between adjacent frequency components, calculated as <italic>ΔV</italic>(<italic>f<sub>i</sub></italic>) = <italic>V<sub>norm</sub></italic>(<italic>f<sub>i+1</sub></italic>) − <italic>V<sub>norm</sub></italic>(<italic>f<sub>i</sub></italic>). We set <italic>V</italic> = 0.05 and <italic>f<sub>max</sub></italic> = 10 Hz. The SPARC is related to the frequency content of the velocity, and therefore, a smoother movement presents a higher value of SPARC.</p>
        </sec>
        <sec>
          <title>Head and Body Motion Entropy</title>
          <p>To evaluate how each condition affected the user’s head motion, we performed an analysis of the variability of the pitch angle of the head <italic>θ<sub>h</sub></italic> and the difference between the head yaw angle <italic>ψ<sub>h</sub></italic> and body yaw angle <italic>ψ<sub>t</sub></italic>, defined as <italic>χ</italic>.</p>
          <p>The angle variability was calculated by computing Shannon entropy, defined as</p>
          <disp-formula>
            <graphic xlink:href="rehab_v11i1e55776_fig10.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          </disp-formula>
          <p>where <italic>p</italic>(∙) denotes probability and <italic>λ</italic> is a realization of <italic>Λ</italic> in the sample space of all the possible realizations <italic>Ω</italic>. The entropy <italic>H</italic>(<italic>Λ</italic>) is expressed in bits because a logarithm with base 2 was used. To compute the entropy for the aforementioned angles, we split the range of motion into single-degree intervals and computed the probability for each bin.</p>
        </sec>
      </sec>
      <sec>
        <title>Statistical Analysis</title>
        <p>The statistical analysis was performed in RStudio (Posit PBC, version 2022.07.2). Specifically, the function <italic>kolmogorov_test</italic> of the <italic>nortest</italic> package (version 1.0-4) was used to perform the normality test on residuals. The function <italic>lmer</italic> of the <italic>lmerTest</italic> package (version 3.1-3) and the function <italic>anova</italic> of the <italic>rstatix</italic> (version 0.7.0) were used to conduct the 2-way repeated measures ANOVA. The function <italic>rank</italic> of the car package (version 3.1-2) was used for the rank transformation. The function <italic>Scheirer-Ray-Hare</italic> of the package 2.4.35 was used to conduct the Scheirer-Ray-Hare test. The graphical representations of the statistical analysis shown in the interaction plots were computed using the function <italic>ggplot</italic> of the <italic>ggplot2</italic> package (version 0.4.0).</p>
        <p>Before the execution of the statistical analysis, we used the Kolmogorov-Smirnov test to evaluate the normality of residuals derived from our linear model. For each performance metric, we conducted normality tests across various experimental conditions, encompassing scenarios with no feedback, haptic feedback only, audio feedback only, and both feedback modalities. For the time taken to complete the task, trajectory length, entropy of the pitch angle, and difference between the yaw angle of the head and the yaw angle of the body, we found evidence to reject the null hypothesis that the data do not follow a normal distribution. However, for the number of collisions and trajectory smoothness, the test did not provide sufficient evidence to reject the null hypothesis. On the basis of these findings, we rank-transformed the trajectory smoothness and verified the normality of the residuals, akin to the other continuous metrics mentioned in the Metrics subsection, and chose an alternative test, Scheirer-Ray-Hare, for the specific treatment of the number of collisions, the only discrete metric of our study (whose residuals from a standard ANOVA would not satisfy the normality assumption).</p>
        <p>To study the individual and synergistic effects of haptic and audio feedback on participants’ navigation performance (RQ1), we performed a 2-way repeated measures ANOVA on the following metrics: (1) time taken by each participant to complete the task across all conditions; (2) trajectory length, L, of each participant across all conditions; and (3) rank-transformed trajectory smoothness, SPARC, of each participant across all conditions.</p>
        <p>We performed a Scheirer-Ray-Hare test on the number of collisions of each participant while performing the task across all conditions.</p>
        <p>To address the individual and synergistic effects of the haptic and audio feedback on participants’ movement behavior (RQ2), we performed a 2-way repeated measures ANOVA on the following metrics: (1) entropy of the pitch angle of the head, <italic>H</italic>(<italic>θ<sub>h</sub></italic>), of each participant across all conditions; and (2) entropy of the difference between the yaw angle of the head and yaw angle of the body, <italic>H</italic>(<italic>χ</italic>), of each participant across all conditions.</p>
        <p>Finally, to gather participants’ opinion regarding their overall experience and their perceptions of the 2 types of feedback used across the 4 experimental conditions (RQ3), we conducted a descriptive statistical analysis of their answers to the questionnaire.</p>
        <p>Before the statistical analysis, we identified outliers in the datasets. Out of 288 observations, the analysis revealed the presence of 6 (2.1%) outliers in the completion time dataset, 92 (31.9%) outliers in the number of collisions dataset, 25 (8.7%) outliers in the trajectory length dataset, 19 (6.6%) outliers in the SPARC dataset, 1 (0.4%) outlier in the entropy of pitch angle dataset, and 5 (1.7%) outliers in the dataset of the entropy of the difference between the yaw angle of the head and body yaw angle. The presence of outliers is ascribed to instances in which participants may have encountered challenges in comprehending the functioning of the devices or may not have paid attention to 1 or both feedback types. We removed all the outliers from the analysis<italic>.</italic></p>
      </sec>
      <sec>
        <title>Ethical Considerations</title>
        <p>Before starting the experiment, all participants signed an informed consent form in accordance with procedures approved by the Institutional Review Board at New York University (IRB-FY2023-7774). Participants were also told that they could take breaks between each condition and withdraw from the study at any time. All data collected during the study are nonidentifiable, ensuring participants' privacy and confidentiality. Furthermore, participants did not receive any compensation for their participation in the experiment.</p>
      </sec>
    </sec>
    <sec sec-type="results">
      <title>Results</title>
      <p>Experimental results in terms of mean and SE of the mean for individual and synergistic effects of haptic and audio feedback are reported in <xref rid="figure6" ref-type="fig">Figure 6</xref>. To determine the effectiveness of each feedback as well as their interaction, we conducted a 2-way repeated measures ANOVA on continuous metrics and a Scheirer-Ray-Hare test on discrete ones.</p>
      <fig id="figure6" position="float">
        <label>Figure 6</label>
        <caption>
          <p>Interaction plots showing the individual and synergistic effect of the haptic and audio feedback on participants’ navigation performance: (A) time to complete the task, (B) number of collisions, (C) trajectory length (L), and (D) trajectory smoothness (spectral arc length; SPARC). Interaction plots showing the individual and synergistic effect of the haptic and audio feedback on participants’ movement behavior: (E) entropy of the head, H(θh), and (F) entropy of the difference between the yaw angle of the head and the yaw angle of the body, H(χ).</p>
        </caption>
        <graphic xlink:href="rehab_v11i1e55776_fig6.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
      </fig>
      <sec>
        <title>Navigation Performance</title>
        <sec>
          <title>Number of Collisions and Completion Time</title>
          <p>Experimental results on completion time and number of collisions are reported in <xref rid="figure6" ref-type="fig">Figure 6</xref>A and <xref rid="figure6" ref-type="fig">Figure 6</xref>B, respectively. The haptic feedback through the belt was conducive to an increase in the completion time of the task (<italic>F</italic><sub>1,207.5</sub>=4.7962; <italic>P</italic>=.03) and a decrease in the number of collisions (test statistic from the Scheirer-Ray-Hare test, H=3.8285; <italic>P</italic>=.05). The audio feedback, instead, was not found to modulate the completion time and number of collisions; neither did we find a main effect of the audio feedback (completion time: <italic>F</italic><sub>1,207.5</sub>=0.1467; <italic>P</italic>=.70 and collisions: H=0.6110; <italic>P</italic>=.43), nor did we observe a significant interaction between the audio and haptic feedback (completion time: <italic>F</italic><sub>1,207.5</sub>=1.7725; <italic>P</italic>=.18 and number of collisions: H=0.8518; <italic>P</italic>=.35).</p>
        </sec>
        <sec>
          <title>Trajectory Length and Smoothness</title>
          <p>Experimental results on L and SPARC are reported in <xref rid="figure6" ref-type="fig">Figure 6</xref>C and <xref rid="figure6" ref-type="fig">Figure 6</xref>D, respectively. The haptic feedback through the belt was linked to a notable increase in trajectory length (<italic>F</italic><sub>1,188.12</sub>=7.3482; <italic>P</italic>=.007), though it did not yield a significant variation of the trajectory smoothness (<italic>F</italic><sub>1,213</sub>=0.0127; <italic>P</italic>=.91). In contrast, audio feedback yielded a significant enhancement in the trajectory smoothness (<italic>F</italic><sub>1,213</sub>=7.6342; <italic>P</italic>=.006), but it did not influence the trajectory length (<italic>F</italic><sub>1,188.09</sub>=0.2972; <italic>P</italic>=.58). A significant interaction between haptic and audio feedback was observed with respect to the trajectory length (<italic>F</italic><sub>1,186.73</sub>=4.20092; <italic>P</italic>=.04) but not with respect to the trajectory smoothness (<italic>F</italic><sub>1,213</sub>=1.2684; <italic>P</italic>=.26).</p>
        </sec>
      </sec>
      <sec>
        <title>Movement Behavior</title>
        <p>Experimental results on <italic>H</italic>(<italic>θ<sub>h</sub></italic>) and <italic>H</italic>(<italic>χ</italic>) are reported in <xref rid="figure6" ref-type="fig">Figure 6</xref>E and <xref rid="figure6" ref-type="fig">Figure 6</xref>F, respectively. The haptic feedback through the belt resulted in a reduction of the entropy of the pitch angle of the head (<italic>H</italic>(<italic>θ<sub>h</sub></italic>): <italic>F</italic><sub>1,208.54</sub>=6.1273; <italic>P</italic>=.02), but it did not yield a significant variation in the entropy of the difference between the yaw angle of the head and the yaw angle of the body (<italic>H</italic>(<italic>χ</italic>): <italic>F</italic><sub>1,210.93</sub>=1.5553; <italic>P</italic>=.21). Audio feedback was not found to influence either the entropy of the pitch angle of the head or the entropy of the difference between the yaw angle of the head and the yaw angle of the body (<italic>H</italic>(<italic>θ<sub>h</sub></italic>): <italic>F</italic><sub>1,209.10</sub>=0.0356; <italic>P</italic>=.85 and <italic>H</italic>(<italic>χ</italic>): <italic>F</italic><sub>1,210.93</sub>=0.1791; <italic>P</italic>=.67). No significant interaction was observed between audio and haptic feedback <italic>H</italic>(<italic>θ<sub>h</sub></italic>): <italic>F</italic><sub>1,208.54</sub>=1.9633; <italic>P</italic>=.16 and <italic>H</italic>(<italic>χ</italic>): <italic>F</italic><sub>1,210.93</sub>=1.0517; <italic>P</italic>=.31).</p>
      </sec>
      <sec>
        <title>Perception</title>
        <p>From the analysis of the questionnaires, we found that 63% (45/72) of the participants had previous experience with VR, 76% (55/72) felt engaged while performing the experiment, and only 6% (4/72) experienced nausea or motion sickness while performing the experiment. We discovered that 50% (36/72) of the participants thought that their navigation performance, in terms of completion time and collision number, was better in the condition where they received both the haptic and audio feedback. In total, 24% (17/72) of the participants thought their navigation performance was better in the condition where they received only the audio feedback, and 14% (10/72) of the participants thought their navigation performance was better in the condition where they received only the haptic feedback.</p>
        <p>We found that 40% (29/72) of the participants preferred the condition where they received both the haptic and audio feedback, 32% (23/72) of the participants favored the condition where they received only the audio feedback, and 18% (13/72) of the participants favored the condition where they received only the haptic feedback. Finally, 58% (42/72) of the participants evaluated the overall experiment as extremely interesting, and 39% (28/72) of the participants evaluated the overall experiment as fairly interesting.</p>
      </sec>
    </sec>
    <sec sec-type="discussion">
      <title>Discussion</title>
      <sec>
        <title>Context and Significance</title>
        <p>VI refers to a condition where an individual experiences limited vision that cannot be fully corrected by glasses, contact lenses, or medical interventions. Persons with VI often encounter significant mobility issues that may affect their confidence in engaging with their surroundings, hindering social interactions and community involvement. Ongoing advancements in ETAs continue to contribute to the increased autonomy and improved mobility of individuals with VIs, highlighting the potential of technology to positively impact the lives of those facing mobility challenges. These devices leverage technology to assist users in navigating their surroundings more effectively. Common characteristics of ETAs include the use of sensors, GPS technology, and auditory or tactile feedback systems to detect obstacles and provide users with real-time feedback about their environment or to help users with route planning and destination guidance.</p>
        <p>In this study, we introduced a multisensory AT system based on haptic and audio feedback for obstacle avoidance. We tested our system in a VR environment resembling a complex urban environment. VR offers the possibility to design highly realistic and easily customizable environments where ATs can be tested and refined under various experimental conditions while avoiding potential risks of the real world. In addition, rendering and postprocessing tools available in VR enable an accurate simulation of various forms of VI at different stages of progress. While we recognize that studying healthy participants with simulated VI does not fully replicate real-life scenarios of organic VI individuals, it is a critical first step in developing ATs. Using healthy participants in early technology phases allows us to test and refine ATs without causing stress for actual VI individuals, who may prefer later-stage trials. Recognizing the importance of inclusivity in participant selection, we intend to expand our research to include a broader range of persons with blindness or various experiences of VI, including those with acquired or congenital eye pathologies, to ensure the clinical relevance of our findings.</p>
        <p>We extended our previous work on the use of VR for testing, refining, and training with ETAs [<xref ref-type="bibr" rid="ref42">42</xref>]. We proposed a multisensory system where haptic feedback is provided by an upgraded version of our in-house built haptic feedback device [<xref ref-type="bibr" rid="ref37">37</xref>-<xref ref-type="bibr" rid="ref39">39</xref>], complemented by audio feedback that is provided by a VR headset. The system was evaluated through an experiment where 72 healthy participants performed an obstacle avoidance task in a virtual subway station while experiencing the simulation of VI symptoms at an advanced severity stage. The virtual environment was designed to align with the dimensions of the physical environment where the experiment took place. During the experiment, participants were asked to walk in the VR environment trying to avoid obstacles that were presented along their path. Each participant performed the experiment 4 times under different conditions (with haptic feedback only, with audio feedback only, with both haptic and audio feedback, and without any feedback). Depending on the experimental condition, participants received vibrotactile feedback on the abdomen through the belt and audio feedback, consisting of a beep sound, from the VR headset that indicated the presence of obstacles along their path.</p>
        <p>Through this experiment, we investigated the impact of our multisensory, AT-integrated VR system on participants’ mobility performance and movement behavior. Specifically, we evaluated how the individual and synergistic use of the 2 types of feedback affected the navigation performance (RQ1), movement behavior (RQ2), and perception (RQ3) of participants across experimental conditions. We performed a 2-way repeated measures ANOVA on task completion time, number of collisions, trajectory length and smoothness (RQ1), the entropy of the pitch angle, and the entropy of the difference between the yaw angle of the head and the yaw angle of the body (RQ2). Finally, we conducted a descriptive statistical analysis of their answers to the questionnaire (RQ3).</p>
      </sec>
      <sec>
        <title>Principal Findings</title>
        <sec>
          <title>Navigation Performance and Movement Behavior</title>
          <p>Our investigation of the efficacy of the haptic feedback device indicated notable improvements in participants’ navigation performance, specifically in reducing the number of collisions. However, these positive effects did not extend to task completion time, trajectory length, or trajectory smoothness. Contrary to our expectations, the introduction of the haptic feedback device led to a significant increase in task completion time. Participants exhibited hesitancy in their walking behavior when relying only on the haptic feedback device, as evidenced by observable delays in reacting to stimuli. Such an outcome diverged from our earlier work [<xref ref-type="bibr" rid="ref42">42</xref>], where the haptic feedback device was found to reduce task completion time. This disparity can be attributed to the increased difficulty and duration of the obstacle avoidance task in this study as well as the distinct walking modality used. In this experiment, participants navigated a dynamic and complex urban environment, whereas in our previous study, they traversed a simpler and smaller outdoor environment using a controller. The prolonged task completion time resulted in longer trajectories in response to haptic feedback.</p>
          <p>The spatial resolution of the haptic feedback device played a crucial role in these findings, as the detailed environmental information prompted participants to navigate cautiously, resulting in intricate trajectories. Examining participants’ movement behavior, we observed a significant reduction in the entropy of the pitch angle of the head due to haptic feedback. Just as participants moved more smoothly in the environment, they also maintained a more constant and less variable head orientation. However, the device did not affect the entropy of the difference between the yaw angle of the head and the yaw angle of the body. This result may be attributed to the spatial information provided by the haptic feedback device, which guided users based on their body orientation and prompted them to reduce their vertical head movements.</p>
          <p>We registered an effect of audio feedback on participants’ navigation performance with respect to trajectory smoothness. Using audio feedback, participants were likely to favor straight paths, as seen from reduced instances of halted movement and a reduced tendency to course-correct during navigation. Moreover, the design of the audio feedback system, which alerted users to obstacles in their line of sight, may have facilitated the exploration of the environment with just the movement of their head. We did not register a variation in the number of collisions, likely due to the modality used by the audio feedback device for obstacle detection. In contrast to the haptic feedback device, which detected obstacles using 10 vibrating actuators on the user’s abdomen, the audio feedback device signaled the presence of obstacles in the user’s line of sight through a distinctive beep sound emitted by the VR headset. The lower spatial resolution of the audio feedback device may have been less effective in aiding users to avoid obstacles compared to the haptic feedback.</p>
          <p>While not effective in reducing the trajectory length alone, audio feedback had a positive effect on haptic feedback in the form of a significant interaction between the 2 modalities. In fact, the increase in trajectory length due to haptic feedback alone is mitigated by the concurrent use of audio feedback, thereby suggesting that participants were able to leverage both information cues and make informed decisions as they negotiated haptic versus audio cues. However, a positive role of combined feedback was not observed for all metrics, likely due to the increased cognitive load resulting from the use of both feedbacks. It is tenable that the delivery of multiple feedback cues poses some difficulties in terms of assimilation and requires a learning curve for users to adapt to new approaches. In principle, this may be mitigated by increasing the training time for users to become more proficient with combined feedback. Overall, the combined use of audio and haptic feedback enhances safety by facilitating informed decision-making, and it contributes to travel efficiency by addressing trajectory length and smoothness. This underscores the potential of blending feedback modalities to optimize both safety and travel efficiency.</p>
        </sec>
        <sec>
          <title>Participants’ Feedback</title>
          <p>The results of the questionnaire offer valuable insights into participants’ experiences and preferences during the experiment. The high engagement reported by 76% (55/72) of the participants suggests that the multisensory feedback, comprising both haptic and audio cues, contributed to an immersive and captivating experience. Notably, only a minimal percentage of participants (4/72, 6%) experienced nausea or motion sickness, indicating that the implemented feedback modalities were well tolerated. Participants’ perceptions of navigation performance revealed a preference for the combined haptic and audio feedback condition, with 50% (36/72) of them believing that it enhanced their performance. Participants emphasized that haptic and audio cues offer distinct information. Many participants note that having both types of feedback provides a more complete and nuanced understanding of their surroundings, aiding in better decision-making and spatial awareness. Finally, others found the combination more intuitive, with haptic feedback offering directional cues and audio feedback providing information on the proximity of obstacles. The preference for both modalities suggests that, when used together, they complement each other, addressing potential limitations or confusion that might arise when using either haptic or audio feedback alone. Interestingly, the preferred condition did not always align with perceived performance, highlighting the complexity of user preferences. Finally, most of the participants (42/72, 58%) found the overall experiment extremely interesting, emphasizing the potential of multisensory, AT-integrated VR systems in maintaining user engagement. These findings underscore the importance of considering user preferences and experiences when developing and refining multisensory ATs, ensuring that future iterations are tailored to meet the needs of individuals with visual impairments.</p>
        </sec>
      </sec>
      <sec>
        <title>Limitations</title>
        <p>Our study is not free of limitations with respect to the wearable design, VR environment, and experimental approach. Specifically, we identified the following 5 main limitations.</p>
        <p>First, we used only 1 type of audio feedback. We cannot exclude the possibility that other forms of audio feedback may have different effects on our haptic feedback system. We chose this particular design for the audio feedback after pilot trials because it offered a straightforward and intuitive means for users to access environmental information, aiding them in obstacle avoidance. In the future, we plan to design experiments that will involve the evaluation of auditory cues individually and in combination to assess their impact on participants’ task performance and overall user experience. Specifically, participants will be immersed in virtual environments simulating crowded urban settings and real-world challenges, including navigating through busy intersections, crossing streets safely, and locating specific points of interest within the urban environment. These tasks will provide valuable insights into how different types of audio feedback can influence participants’ navigation strategies, decision-making processes, and overall spatial awareness in crowded urban environments.</p>
        <p>The second limitation pertains to simulating only the most common symptoms of VI. While glaucoma is a prevalent eye pathology and our methodology can be readily expanded to other eye pathologies, we acknowledge the need for future research to tackle a wider range of end users. Specifically, we anticipate the development of new systems, incorporating varied forms of audio and haptic feedbacks and tested in diverse conditions and with individuals experiencing different eye pathologies. Testing our system on various types and forms of VI could provide more robust evidence, demonstrating broader applicability to a diverse range of users.</p>
        <p>The third limitation arises from the fact that, in the real world, individuals would exercise caution in avoiding obstacles to prevent injury. This instinctive behavior may not be fully present in VR environments, where collisions do not result in any negative consequences. As a result, participants might prioritize completing the task quickly over minimizing the number of collisions. One potential strategy to mitigate this issue involves introducing incentives, such as rewards, or placing cardboard obstacles in the environment to encourage participants to focus more on avoiding obstacles rather than completing the experiment quickly.</p>
        <p>A fourth limitation is related to the number and placement of actuators on the ETA used in the research. The current configuration of actuators was determined based on a practical balance between providing enough information and avoiding overwhelming users with excessive tactile stimuli. However, the optimal arrangement and quantity of actuators may vary among individuals, as sensory preferences and sensitivities can differ widely. Recognizing this limitation, we acknowledge the necessity of future investigations that explore alternative configurations of actuators on the ETA. Our upcoming research plans include testing different numbers and arrangements of actuators to identify an optimal solution that caters to the diverse sensory needs of users with VI. This iterative approach aims to enhance the user experience and effectiveness of the multisensory, AT-integrated VR system, ensuring its adaptability and usability across a broader spectrum of individuals with varying preferences and sensitivities.</p>
        <p>Finally, it is essential to acknowledge that our study only involved healthy participants with simulated VI. Such an experimental choice limits the direct applicability of our findings to the broader community of individuals with VI and the practical implication of the proposed ETA. At the same time, our research provides valuable insights into the use of VR in research disability and serves as an important preliminary step in the development of ATs tailored to address the specific needs of individuals with VI. Recognizing the significance of simulating real-world challenges within the VR environment for effective rehabilitation interventions, we will broaden our research scope to encompass diverse neurological conditions. In our upcoming studies, we plan to include individuals with balance and neurological issues to further explore the applicability of our multisensory AT solutions in rehabilitation settings. Doing so will undoubtedly enhance the clinical relevance and generalizability of our findings, aligning with our overarching goal of developing and validating tailored interventions for various clinical populations.</p>
      </sec>
      <sec>
        <title>Conclusions</title>
        <p>AT for persons with VI plays a pivotal role in enhancing their sensory perception and spatial awareness. These devices often integrate a combination of auditory, haptic, and visual cues to provide comprehensive information about the surrounding environment. However, most devices are designed without a user-centered focus, often featuring complexities beyond consumer necessity [<xref ref-type="bibr" rid="ref62">62</xref>-<xref ref-type="bibr" rid="ref64">64</xref>]. Research needs to hone methodologies that better support consumer-oriented and user-centered devices as well as test and evaluate them in realistic scenarios while limiting safety issues and concerns for persons with VI. This holistic approach aims to bridge the gap between theoretical advancements and practical applications, ultimately enhancing the usability and impact of ETAs on the lives of individuals with VI. Our multisensory, AT-integrated VR system is a first step in this direction that may enhance the user’s ability to interpret and interact with their surroundings. Our synergistic approach facilitates safer mobility with improved travel efficiency and opens avenues for innovative applications in areas such as education, training, and rehabilitation for persons with VI.</p>
        <p>In our forthcoming research, we aim to enhance and evaluate our multisensory AT-integrated VR system for persons with VI. This endeavor will be guided by a comprehensive methodology that encompasses various domains of knowledge and caters to the diverse needs of the target population. Our design process will take into account the wide spectrum of VI, which ranges from low vision to total blindness, considering the varying degrees of VI and potential additional impairments such as hearing loss or peripheral neuropathy. In addition, we will acknowledge the diversity within the VI population in terms of visual experience, spanning from congenital blindness to acquired blindness later in life, which can significantly influence their interaction with ATs [<xref ref-type="bibr" rid="ref65">65</xref>]. Central to our approach is understanding user preferences, technological familiarity, and motivation, as these factors are pivotal for the acceptance and effectiveness of AT devices [<xref ref-type="bibr" rid="ref65">65</xref>]. The experimental phase will include a cohort of healthy participants and individuals with VI. The VR setting will be equivalent for both groups to ensure consistency and comparability of results. By comparing the experimental outcomes between the 2 groups, we aim to pinpoint limitations associated with experiments performed solely on healthy participants, particularly those related to sensory compensation. In addition, this comparison will help identify the behavioral traits that are preserved when experimenting with healthy participants, providing valuable insights for the development and optimization of our multisensory AT in real-world clinical settings.</p>
      </sec>
    </sec>
  </body>
  <back>
    <app-group>
      <supplementary-material id="app1">
        <label>Multimedia Appendix 1</label>
        <p>A video showcasing the virtual reality environment used for evaluating the multisensory assistive technology outlined in this study, providing the perspective of an individual with normal vision.</p>
        <media xlink:href="rehab_v11i1e55776_app1.mp4" xlink:title="MP4 File  (MP4 Video), 19766 KB"/>
      </supplementary-material>
      <supplementary-material id="app2">
        <label>Multimedia Appendix 2</label>
        <p>A video displaying the virtual reality environment used to assess the multisensory assistive technology outlined in this study, offering the perspective of an individual experiencing simulated symptoms of visual impairment.</p>
        <media xlink:href="rehab_v11i1e55776_app2.mp4" xlink:title="MP4 File  (MP4 Video), 2724 KB"/>
      </supplementary-material>
      <supplementary-material id="app3">
        <label>Multimedia Appendix 3</label>
        <p>The questionnaire administered to participants through a Google form at the conclusion of the experiment.</p>
        <media xlink:href="rehab_v11i1e55776_app3.pdf" xlink:title="PDF File  (Adobe PDF File), 81 KB"/>
      </supplementary-material>
    </app-group>
    <glossary>
      <title>Abbreviations</title>
      <def-list>
        <def-item>
          <term id="abb1">AT</term>
          <def>
            <p>assistive technology</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb2">ETA</term>
          <def>
            <p>electronic travel aid</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb3">RQ</term>
          <def>
            <p>research question</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb4">SPARC</term>
          <def>
            <p>spectral arch length</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb5">VI</term>
          <def>
            <p>visual impairment</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb6">VR</term>
          <def>
            <p>virtual reality</p>
          </def>
        </def-item>
      </def-list>
    </glossary>
    <ack>
      <p>This study was supported by the National Science Foundation (CNS-1952180, DUE-2129076, ECCS-1928614, ITE-2236097, and ITE-2345139). The authors would like to thank Christina Samuel, who helped with the recruitment of the participants and the experiments.</p>
    </ack>
    <fn-group>
      <fn fn-type="conflict">
        <p>JRR discloses conflicts of interest because of intellectual property owned by New York University, as well as related advisory positions with equity and ad hoc compensation. In the future, the aforementioned project may relate to multicomponent wearable technologies relevant to the stated interests. All other authors declare no other conflicts of interest.</p>
      </fn>
    </fn-group>
    <ref-list>
      <ref id="ref1">
        <label>1</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Swenor</surname>
              <given-names>BK</given-names>
            </name>
            <name name-style="western">
              <surname>Lee</surname>
              <given-names>MJ</given-names>
            </name>
            <name name-style="western">
              <surname>Varadaraj</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Whitson</surname>
              <given-names>HE</given-names>
            </name>
            <name name-style="western">
              <surname>Ramulu</surname>
              <given-names>PY</given-names>
            </name>
          </person-group>
          <article-title>Aging with vision loss: a framework for assessing the impact of visual impairment on older adults</article-title>
          <source>Gerontologist</source>
          <year>2020</year>
          <month>08</month>
          <day>14</day>
          <volume>60</volume>
          <issue>6</issue>
          <fpage>989</fpage>
          <lpage>95</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/31504483"/>
          </comment>
          <pub-id pub-id-type="doi">10.1093/geront/gnz117</pub-id>
          <pub-id pub-id-type="medline">31504483</pub-id>
          <pub-id pub-id-type="pii">5554379</pub-id>
          <pub-id pub-id-type="pmcid">PMC7427480</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref2">
        <label>2</label>
        <nlm-citation citation-type="web">
          <article-title>Fast facts about vision loss</article-title>
          <source>Centers for Disease Control and Prevention</source>
          <access-date>2024-04-29</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.cdc.gov/visionhealth/basics/ced/fastfacts.htm">https://www.cdc.gov/visionhealth/basics/ced/fastfacts.htm</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref3">
        <label>3</label>
        <nlm-citation citation-type="web">
          <article-title>Blindness and vision impairment</article-title>
          <source>World Health Organization (WHO)</source>
          <access-date>2024-04-29</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.who.int/news-room/fact-sheets/detail/blindness-and-visual-impairment">https://www.who.int/news-room/fact-sheets/detail/blindness-and-visual-impairment</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref4">
        <label>4</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Crews</surname>
              <given-names>JE</given-names>
            </name>
            <name name-style="western">
              <surname>Chou</surname>
              <given-names>CF</given-names>
            </name>
            <name name-style="western">
              <surname>Sekar</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Saaddine</surname>
              <given-names>JB</given-names>
            </name>
          </person-group>
          <article-title>The prevalence of chronic conditions and poor health among people with and without vision impairment, aged ≥65 years, 2010-2014</article-title>
          <source>Am J Ophthalmol</source>
          <year>2017</year>
          <month>10</month>
          <volume>182</volume>
          <fpage>18</fpage>
          <lpage>30</lpage>
          <pub-id pub-id-type="doi">10.1016/j.ajo.2017.06.038</pub-id>
          <pub-id pub-id-type="medline">28734819</pub-id>
          <pub-id pub-id-type="pii">S0002-9394(17)30299-4</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref5">
        <label>5</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Verbeek</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Drewes</surname>
              <given-names>YM</given-names>
            </name>
            <name name-style="western">
              <surname>Gussekloo</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Visual impairment as a predictor for deterioration in functioning: the Leiden 85-plus Study</article-title>
          <source>BMC Geriatr</source>
          <year>2022</year>
          <month>05</month>
          <day>06</day>
          <volume>22</volume>
          <issue>1</issue>
          <fpage>397</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://bmcgeriatr.biomedcentral.com/articles/10.1186/s12877-022-03071-x"/>
          </comment>
          <pub-id pub-id-type="doi">10.1186/s12877-022-03071-x</pub-id>
          <pub-id pub-id-type="medline">35524168</pub-id>
          <pub-id pub-id-type="pii">10.1186/s12877-022-03071-x</pub-id>
          <pub-id pub-id-type="pmcid">PMC9074345</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref6">
        <label>6</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Swenor</surname>
              <given-names>BK</given-names>
            </name>
            <name name-style="western">
              <surname>Lee</surname>
              <given-names>MJ</given-names>
            </name>
            <name name-style="western">
              <surname>Tian</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Varadaraj</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Bandeen-Roche</surname>
              <given-names>K</given-names>
            </name>
          </person-group>
          <article-title>Visual impairment and frailty: examining an understudied relationship</article-title>
          <source>J Gerontol A Biol Sci Med Sci</source>
          <year>2020</year>
          <month>02</month>
          <day>14</day>
          <volume>75</volume>
          <issue>3</issue>
          <fpage>596</fpage>
          <lpage>602</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/31419280"/>
          </comment>
          <pub-id pub-id-type="doi">10.1093/gerona/glz182</pub-id>
          <pub-id pub-id-type="medline">31419280</pub-id>
          <pub-id pub-id-type="pii">5550824</pub-id>
          <pub-id pub-id-type="pmcid">PMC7328203</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref7">
        <label>7</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Rajeshkannan</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Yenuganti</surname>
              <given-names>VV</given-names>
            </name>
            <name name-style="western">
              <surname>Solomon</surname>
              <given-names>MA</given-names>
            </name>
            <name name-style="western">
              <surname>Rajsri</surname>
              <given-names>TR</given-names>
            </name>
            <name name-style="western">
              <surname>Janana Priya</surname>
              <given-names>G</given-names>
            </name>
          </person-group>
          <article-title>Association of visual impairment with suicidal ideation and suicide attempts: a systematic review and meta-analysis</article-title>
          <source>Indian J Psychol Med</source>
          <year>2023</year>
          <month>07</month>
          <volume>45</volume>
          <issue>4</issue>
          <fpage>345</fpage>
          <lpage>51</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/37483569"/>
          </comment>
          <pub-id pub-id-type="doi">10.1177/02537176221087124</pub-id>
          <pub-id pub-id-type="medline">37483569</pub-id>
          <pub-id pub-id-type="pii">10.1177_02537176221087124</pub-id>
          <pub-id pub-id-type="pmcid">PMC10357907</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref8">
        <label>8</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Demmin</surname>
              <given-names>DL</given-names>
            </name>
            <name name-style="western">
              <surname>Silverstein</surname>
              <given-names>SM</given-names>
            </name>
          </person-group>
          <article-title>Visual impairment and mental health: unmet needs and treatment options</article-title>
          <source>Clin Ophthalmol</source>
          <year>2020</year>
          <volume>14</volume>
          <fpage>4229</fpage>
          <lpage>51</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/33299297"/>
          </comment>
          <pub-id pub-id-type="doi">10.2147/OPTH.S258783</pub-id>
          <pub-id pub-id-type="medline">33299297</pub-id>
          <pub-id pub-id-type="pii">258783</pub-id>
          <pub-id pub-id-type="pmcid">PMC7721280</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref9">
        <label>9</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Siira</surname>
              <given-names>HJ</given-names>
            </name>
            <name name-style="western">
              <surname>Falck</surname>
              <given-names>AA</given-names>
            </name>
            <name name-style="western">
              <surname>Kyngäs</surname>
              <given-names>HA</given-names>
            </name>
          </person-group>
          <article-title>Health-related quality of life and related factors among older adults with visual impairments</article-title>
          <source>Br J Vis Impair</source>
          <year>2019</year>
          <month>03</month>
          <day>27</day>
          <volume>37</volume>
          <issue>3</issue>
          <fpage>183</fpage>
          <lpage>93</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://journals.sagepub.com/doi/10.1177/0264619619839737"/>
          </comment>
          <pub-id pub-id-type="doi">10.1177/0264619619839737</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref10">
        <label>10</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Marques</surname>
              <given-names>AP</given-names>
            </name>
            <name name-style="western">
              <surname>Ramke</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Cairns</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Butt</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>JH</given-names>
            </name>
            <name name-style="western">
              <surname>Jones</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Jovic</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Nandakumar</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Faal</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Taylor</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Bastawrous</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Braithwaite</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Resnikoff</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Khaw</surname>
              <given-names>PT</given-names>
            </name>
            <name name-style="western">
              <surname>Bourne</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Gordon</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Frick</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Burton</surname>
              <given-names>MJ</given-names>
            </name>
          </person-group>
          <article-title>The economics of vision impairment and its leading causes: a systematic review</article-title>
          <source>EClinicalMedicine</source>
          <year>2022</year>
          <month>04</month>
          <volume>46</volume>
          <fpage>101354</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://linkinghub.elsevier.com/retrieve/pii/S2589-5370(22)00084-0"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.eclinm.2022.101354</pub-id>
          <pub-id pub-id-type="medline">35340626</pub-id>
          <pub-id pub-id-type="pii">S2589-5370(22)00084-0</pub-id>
          <pub-id pub-id-type="pmcid">PMC8943414</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref11">
        <label>11</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Rein</surname>
              <given-names>DB</given-names>
            </name>
            <name name-style="western">
              <surname>Wittenborn</surname>
              <given-names>JS</given-names>
            </name>
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Sublett</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Lamuda</surname>
              <given-names>PA</given-names>
            </name>
            <name name-style="western">
              <surname>Lundeen</surname>
              <given-names>EA</given-names>
            </name>
            <name name-style="western">
              <surname>Saaddine</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>The economic burden of vision loss and blindness in the United States</article-title>
          <source>Ophthalmology</source>
          <year>2022</year>
          <month>04</month>
          <volume>129</volume>
          <issue>4</issue>
          <fpage>369</fpage>
          <lpage>78</lpage>
          <pub-id pub-id-type="doi">10.1016/j.ophtha.2021.09.010</pub-id>
          <pub-id pub-id-type="medline">34560128</pub-id>
          <pub-id pub-id-type="pii">S0161-6420(21)00710-7</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref12">
        <label>12</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Croce</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Giarre</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Pascucci</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Tinnirello</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Galioto</surname>
              <given-names>GE</given-names>
            </name>
            <name name-style="western">
              <surname>Garlisi</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Lo Valvo</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>An indoor and outdoor navigation system for visually impaired people</article-title>
          <source>IEEE Access</source>
          <year>2019</year>
          <volume>7</volume>
          <fpage>170406</fpage>
          <lpage>18</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://ieeexplore.ieee.org/abstract/document/8910607/authors][CrossRef"/>
          </comment>
          <pub-id pub-id-type="doi">10.1109/ACCESS.2019.2955046</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref13">
        <label>13</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Müller</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Engel</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Loitsch</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Stiefelhagen</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Weber</surname>
              <given-names>G</given-names>
            </name>
          </person-group>
          <article-title>Traveling more independently: a study on the diverse needs and challenges of people with visual or mobility impairments in unfamiliar indoor environments</article-title>
          <source>ACM Trans Access Comput</source>
          <year>2022</year>
          <month>05</month>
          <day>19</day>
          <volume>15</volume>
          <issue>2</issue>
          <fpage>1</fpage>
          <lpage>44</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://dl.acm.org/doi/10.1145/3514255"/>
          </comment>
          <pub-id pub-id-type="doi">10.1145/3514255</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref14">
        <label>14</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Madake</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Bhatlawande</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Solanke</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Shilaskar</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>A qualitative and quantitative analysis of research in mobility technologies for visually impaired people</article-title>
          <source>IEEE Access</source>
          <year>2023</year>
          <volume>11</volume>
          <fpage>82496</fpage>
          <lpage>520</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://ieeexplore.ieee.org/abstract/document/10168878"/>
          </comment>
          <pub-id pub-id-type="doi">10.1109/access.2023.3291074</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref15">
        <label>15</label>
        <nlm-citation citation-type="web">
          <article-title>Guide dogs vs. white canes: the comprehensive comparison</article-title>
          <source>Clovernook Center for the Blind &#38; Visually Impaired</source>
          <year>2022</year>
          <access-date>2024-04-29</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://clovernook.org/2020/09/18/guide-dogs-vs-white-canes-the-comprehensive-comparison/">https://clovernook.org/2020/09/18/guide-dogs-vs-white-canes-the-comprehensive-comparison/</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref16">
        <label>16</label>
        <nlm-citation citation-type="web">
          <article-title>10 fascinating facts about the white cane</article-title>
          <source>Perkins School for the Blind</source>
          <access-date>2024-04-29</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://stdunstans.org.za/downloads/the-torch29.pdf">https://stdunstans.org.za/downloads/the-torch29.pdf</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref17">
        <label>17</label>
        <nlm-citation citation-type="web">
          <article-title>Guide dogs 101</article-title>
          <source>Guiding Eyes for the Blind</source>
          <access-date>2024-04-29</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.guidingeyes.org/guide-dogs-101/#:~:text=What%20is%20a%20guide%20dog,landmarks%2C%20among%20countless%20other%20tasks">https://www.guidingeyes.org/guide-dogs-101/#:~:text=What%20is%20a%20guide%20dog,landmarks%2C%20among%20countless%20other%20tasks</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref18">
        <label>18</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Shi</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Shan</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Okada</surname>
              <given-names>Y</given-names>
            </name>
          </person-group>
          <article-title>A navigation system for visual impaired people based on object detection</article-title>
          <source>proceedings of the 12th International Congress on Advanced Applied Informatics</source>
          <year>2022</year>
          <conf-name>IIAI-AAI '22</conf-name>
          <conf-date>July 2-8, 2022</conf-date>
          <conf-loc>Kanazawa, Japan</conf-loc>
          <fpage>354</fpage>
          <lpage>8</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://ieeexplore.ieee.org/document/9894628"/>
          </comment>
          <pub-id pub-id-type="doi">10.1109/iiaiaai55812.2022.00078</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref19">
        <label>19</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Lolo</surname>
              <given-names>YS</given-names>
            </name>
            <name name-style="western">
              <surname>Ohammah</surname>
              <given-names>KL</given-names>
            </name>
            <name name-style="western">
              <surname>Alfa</surname>
              <given-names>AN</given-names>
            </name>
            <name name-style="western">
              <surname>Mohammed</surname>
              <given-names>SA</given-names>
            </name>
            <name name-style="western">
              <surname>Ginsau</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Obadiah</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Design and implementation of obstacle detection and warning system for visually impaired people</article-title>
          <source>Proceedings of the 2022 IEEE Nigeria 4th International Conference on Disruptive Technologies for Sustainable Development</source>
          <year>2022</year>
          <conf-name>NIGERCON '22</conf-name>
          <conf-date>April 5-7, 2022</conf-date>
          <conf-loc>Lagos, Nigeria</conf-loc>
          <fpage>1</fpage>
          <lpage>5</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://ieeexplore.ieee.org/document/9803138"/>
          </comment>
          <pub-id pub-id-type="doi">10.1109/NIGERCON54645.2022.9803168</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref20">
        <label>20</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kuriakose</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Shrestha</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Sandnes</surname>
              <given-names>FE</given-names>
            </name>
          </person-group>
          <article-title>Tools and technologies for blind and visually impaired navigation support: a review</article-title>
          <source>IETE Tech Rev</source>
          <year>2020</year>
          <month>09</month>
          <day>27</day>
          <volume>39</volume>
          <issue>1</issue>
          <fpage>3</fpage>
          <lpage>18</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.tandfonline.com/doi/full/10.1080/02564602.2020.1819893"/>
          </comment>
          <pub-id pub-id-type="doi">10.1080/02564602.2020.1819893</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref21">
        <label>21</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Meshram</surname>
              <given-names>VV</given-names>
            </name>
            <name name-style="western">
              <surname>Patil</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Meshram</surname>
              <given-names>VA</given-names>
            </name>
            <name name-style="western">
              <surname>Shu</surname>
              <given-names>FC</given-names>
            </name>
          </person-group>
          <article-title>An astute assistive device for mobility and object recognition for visually impaired people</article-title>
          <source>IEEE Trans Human Mach Syst</source>
          <year>2019</year>
          <month>10</month>
          <volume>49</volume>
          <issue>5</issue>
          <fpage>449</fpage>
          <lpage>60</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://ieeexplore.ieee.org/document/8801898"/>
          </comment>
          <pub-id pub-id-type="doi">10.1109/thms.2019.2931745</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref22">
        <label>22</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Masud</surname>
              <given-names>U</given-names>
            </name>
            <name name-style="western">
              <surname>Saeed</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Malaikah</surname>
              <given-names>HM</given-names>
            </name>
            <name name-style="western">
              <surname>Islam</surname>
              <given-names>FU</given-names>
            </name>
            <name name-style="western">
              <surname>Abbas</surname>
              <given-names>G</given-names>
            </name>
          </person-group>
          <article-title>Smart assistive system for visually impaired people obstruction avoidance through object detection and classification</article-title>
          <source>IEEE Access</source>
          <year>2022</year>
          <volume>10</volume>
          <fpage>13428</fpage>
          <lpage>41</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://ieeexplore.ieee.org/document/9691323?utm_source=researcher_app&#38;utm_medium=referral&#38;utm_campaign=RESR_MRKT_Researcher_inbound"/>
          </comment>
          <pub-id pub-id-type="doi">10.1109/access.2022.3146320</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref23">
        <label>23</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Khan</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Khusro</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>An insight into smartphone-based assistive solutions for visually impaired and blind people: issues, challenges and opportunities</article-title>
          <source>Univ Access Inf Soc</source>
          <year>2020</year>
          <month>07</month>
          <day>04</day>
          <volume>20</volume>
          <issue>2</issue>
          <fpage>265</fpage>
          <lpage>98</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://link.springer.com/article/10.1007/s10209-020-00733-8"/>
          </comment>
          <pub-id pub-id-type="doi">10.1007/S10209-020-00733-8</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref24">
        <label>24</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Zhao</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Kupferstein</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Rojnirun</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Findlater</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Azenkot</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>The effectiveness of visual and audio wayfinding guidance on Smartglasses for people with low vision</article-title>
          <source>Proceedings of the 2020 CHI Conference on Human Factors in Computing Systems</source>
          <year>2020</year>
          <conf-name>CHI '20</conf-name>
          <conf-date>April 25-30, 2020</conf-date>
          <conf-loc>Honolulu, HI</conf-loc>
          <fpage>1</fpage>
          <lpage>14</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://dl.acm.org/doi/10.1145/3313831.3376516"/>
          </comment>
          <pub-id pub-id-type="doi">10.1145/3313831.3376516</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref25">
        <label>25</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Romlay</surname>
              <given-names>MR</given-names>
            </name>
            <name name-style="western">
              <surname>Toha</surname>
              <given-names>SF</given-names>
            </name>
            <name name-style="western">
              <surname>Ibrahim</surname>
              <given-names>AI</given-names>
            </name>
            <name name-style="western">
              <surname>Venkat</surname>
              <given-names>I</given-names>
            </name>
          </person-group>
          <article-title>Methodologies and evaluation of electronic travel aids for the visually impaired people: a review</article-title>
          <source>Bull Electr Eng Inform</source>
          <year>2021</year>
          <volume>10</volume>
          <issue>3</issue>
          <fpage>1747</fpage>
          <lpage>58</lpage>
          <pub-id pub-id-type="doi">10.11591/eei.v10i3.3055</pub-id>
          <pub-id pub-id-type="pii">https://beei.org/index.php/EEI/article/view/3055/2219</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref26">
        <label>26</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>dos Santos</surname>
              <given-names>AD</given-names>
            </name>
            <name name-style="western">
              <surname>Medola</surname>
              <given-names>FO</given-names>
            </name>
            <name name-style="western">
              <surname>Cinelli</surname>
              <given-names>MJ</given-names>
            </name>
            <name name-style="western">
              <surname>Garcia Ramirez</surname>
              <given-names>AR</given-names>
            </name>
            <name name-style="western">
              <surname>Sandnes</surname>
              <given-names>FE</given-names>
            </name>
          </person-group>
          <article-title>Are electronic white canes better than traditional canes? A comparative study with blind and blindfolded participants</article-title>
          <source>Univ Access Inf Soc</source>
          <year>2020</year>
          <month>02</month>
          <day>17</day>
          <volume>20</volume>
          <issue>1</issue>
          <fpage>93</fpage>
          <lpage>103</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://link.springer.com/article/10.1007/s10209-020-00712-z"/>
          </comment>
          <pub-id pub-id-type="doi">10.1007/s10209-020-00712-z</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref27">
        <label>27</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Caraiman</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Zvoristeanu</surname>
              <given-names>O</given-names>
            </name>
            <name name-style="western">
              <surname>Burlacu</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Herghelegiu</surname>
              <given-names>P</given-names>
            </name>
          </person-group>
          <article-title>Stereo vision based sensory substitution for the visually impaired</article-title>
          <source>Sensors (Basel)</source>
          <year>2019</year>
          <month>06</month>
          <day>20</day>
          <volume>19</volume>
          <issue>12</issue>
          <fpage>2771</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=s19122771"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/s19122771</pub-id>
          <pub-id pub-id-type="medline">31226796</pub-id>
          <pub-id pub-id-type="pii">s19122771</pub-id>
          <pub-id pub-id-type="pmcid">PMC6630569</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref28">
        <label>28</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Neugebauer</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Rifai</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Getzlaff</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Wahl</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>Navigation aid for blind persons by visual-to-auditory sensory substitution: a pilot study</article-title>
          <source>PLoS One</source>
          <year>2020</year>
          <volume>15</volume>
          <issue>8</issue>
          <fpage>e0237344</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://dx.plos.org/10.1371/journal.pone.0237344"/>
          </comment>
          <pub-id pub-id-type="doi">10.1371/journal.pone.0237344</pub-id>
          <pub-id pub-id-type="medline">32818953</pub-id>
          <pub-id pub-id-type="pii">PONE-D-20-09308</pub-id>
          <pub-id pub-id-type="pmcid">PMC7446825</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref29">
        <label>29</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Brooks</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Kristjánsson</surname>
              <given-names>Á</given-names>
            </name>
            <name name-style="western">
              <surname>Unnthorsson</surname>
              <given-names>R</given-names>
            </name>
          </person-group>
          <person-group person-group-type="editor">
            <name name-style="western">
              <surname>Holmes</surname>
              <given-names>MP</given-names>
            </name>
          </person-group>
          <article-title>Sensory substitution: visual information via haptics</article-title>
          <source>Somatosensory Research Methods</source>
          <year>2023</year>
          <publisher-loc>Cham, Switzerland</publisher-loc>
          <publisher-name>Springer</publisher-name>
          <fpage>287</fpage>
          <lpage>302</lpage>
        </nlm-citation>
      </ref>
      <ref id="ref30">
        <label>30</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Buchs</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Haimler</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Kerem</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Maidenbaum</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Braun</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Amedi</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>A self-training program for sensory substitution devices</article-title>
          <source>PLoS One</source>
          <year>2021</year>
          <volume>16</volume>
          <issue>4</issue>
          <fpage>e0250281</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://dx.plos.org/10.1371/journal.pone.0250281"/>
          </comment>
          <pub-id pub-id-type="doi">10.1371/journal.pone.0250281</pub-id>
          <pub-id pub-id-type="medline">33905446</pub-id>
          <pub-id pub-id-type="pii">PONE-D-20-29833</pub-id>
          <pub-id pub-id-type="pmcid">PMC8078811</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref31">
        <label>31</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kuriakose</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Shrestha</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Sandnes</surname>
              <given-names>FE</given-names>
            </name>
          </person-group>
          <article-title>Multimodal navigation systems for users with visual impairments—a review and analysis</article-title>
          <source>Multimodal Technol Interact</source>
          <year>2020</year>
          <month>10</month>
          <day>16</day>
          <volume>4</volume>
          <issue>4</issue>
          <fpage>73</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/2414-4088/4/4/73"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/MTI4040073</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref32">
        <label>32</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Bakir</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Mansour</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Kamel</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Moustafa</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Khalil</surname>
              <given-names>MH</given-names>
            </name>
          </person-group>
          <article-title>The spatial experience of visually impaired and blind: an approach to understanding the importance of multisensory perception</article-title>
          <source>Civ Eng Archit</source>
          <year>2022</year>
          <month>3</month>
          <day>01</day>
          <volume>10</volume>
          <issue>2</issue>
          <fpage>644</fpage>
          <lpage>58</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.hrpub.org/journals/article_info.php?aid=11820"/>
          </comment>
          <pub-id pub-id-type="doi">10.13189/cea.2022.100220</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref33">
        <label>33</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Hoffmann</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Spagnol</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Kristjánsson</surname>
              <given-names>Á</given-names>
            </name>
            <name name-style="western">
              <surname>Unnthorsson</surname>
              <given-names>R</given-names>
            </name>
          </person-group>
          <article-title>Evaluation of an audio-haptic sensory substitution device for enhancing spatial awareness for the visually impaired</article-title>
          <source>Optom Vis Sci</source>
          <year>2018</year>
          <month>09</month>
          <volume>95</volume>
          <issue>9</issue>
          <fpage>757</fpage>
          <lpage>65</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/30153241"/>
          </comment>
          <pub-id pub-id-type="doi">10.1097/OPX.0000000000001284</pub-id>
          <pub-id pub-id-type="medline">30153241</pub-id>
          <pub-id pub-id-type="pmcid">PMC6133230</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref34">
        <label>34</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Real</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Araujo</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>VES: a mixed-reality development platform of navigation systems for blind and visually impaired</article-title>
          <source>Sensors (Basel)</source>
          <year>2021</year>
          <month>09</month>
          <day>18</day>
          <volume>21</volume>
          <issue>18</issue>
          <fpage>6275</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=s21186275"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/s21186275</pub-id>
          <pub-id pub-id-type="medline">34577482</pub-id>
          <pub-id pub-id-type="pii">s21186275</pub-id>
          <pub-id pub-id-type="pmcid">PMC8469526</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref35">
        <label>35</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Xie</surname>
              <given-names>Z</given-names>
            </name>
            <name name-style="western">
              <surname>Li</surname>
              <given-names>Z</given-names>
            </name>
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Liu</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Chen</surname>
              <given-names>W</given-names>
            </name>
          </person-group>
          <article-title>A multi-sensory guidance system for the visually impaired using YOLO and ORB-SLAM</article-title>
          <source>Information</source>
          <year>2022</year>
          <month>07</month>
          <day>15</day>
          <volume>13</volume>
          <issue>7</issue>
          <fpage>343</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/2078-2489/13/7/343"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/info13070343</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref36">
        <label>36</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Lecuyer</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Mobuchon</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Megard</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Perret</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Andriot</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Colinot</surname>
              <given-names>JP</given-names>
            </name>
          </person-group>
          <article-title>HOMERE: a multimodal system for visually impaired people to explore virtual environments</article-title>
          <source>Proceedings of the 2003 IEEE Conference Virtual Reality</source>
          <year>2003</year>
          <conf-name>VR '03</conf-name>
          <conf-date>March 22-26, 2003</conf-date>
          <conf-loc>Washington, DC</conf-loc>
          <fpage>251</fpage>
          <lpage>8</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://ieeexplore.ieee.org/abstract/document/1191147"/>
          </comment>
          <pub-id pub-id-type="doi">10.1109/vr.2003.1191147</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref37">
        <label>37</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Boldini</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Garcia</surname>
              <given-names>AL</given-names>
            </name>
            <name name-style="western">
              <surname>Sorrentino</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Beheshti</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Ogedegbe</surname>
              <given-names>O</given-names>
            </name>
            <name name-style="western">
              <surname>Fang</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Porfiri</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Rizzo</surname>
              <given-names>JR</given-names>
            </name>
          </person-group>
          <article-title>An inconspicuous, integrated electronic travel aid for visual impairment</article-title>
          <source>ASME Letters Dyn Sys Control</source>
          <year>2021</year>
          <volume>1</volume>
          <issue>4</issue>
          <fpage>041004</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1115/1.4050186"/>
          </comment>
          <pub-id pub-id-type="doi">10.1115/1.4050186</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref38">
        <label>38</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Boldini</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Rizzo</surname>
              <given-names>JR</given-names>
            </name>
            <name name-style="western">
              <surname>Porfiri</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>A piezoelectric-based advanced wearable: obstacle avoidance for the visually impaired built into a backpack</article-title>
          <source>Proc SPIE</source>
          <year>2020</year>
          <fpage>11378</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.spiedigitallibrary.org/conference-proceedings-of-spie/11378/2558306/A-piezoelectric-based-advanced-wearable--obstacle-avoidance-for-the/10.1117/12.2558306.short"/>
          </comment>
          <pub-id pub-id-type="doi">10.1117/12.2558306</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref39">
        <label>39</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Phamduy</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Rizzo</surname>
              <given-names>JR</given-names>
            </name>
            <name name-style="western">
              <surname>Hudson</surname>
              <given-names>TE</given-names>
            </name>
            <name name-style="western">
              <surname>Torre</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Levon</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Porfiri</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>Communicating through touch: macro fiber composites for tactile stimulation on the abdomen</article-title>
          <source>IEEE Trans Haptics</source>
          <year>2018</year>
          <month>4</month>
          <day>1</day>
          <volume>11</volume>
          <issue>2</issue>
          <fpage>174</fpage>
          <lpage>84</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://ieeexplore.ieee.org/document/8207625"/>
          </comment>
          <pub-id pub-id-type="doi">10.1109/toh.2017.2781244</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref40">
        <label>40</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Ricci</surname>
              <given-names>FS</given-names>
            </name>
            <name name-style="western">
              <surname>Boldini</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Rizzo</surname>
              <given-names>JR</given-names>
            </name>
            <name name-style="western">
              <surname>Porfiri</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>Learning to use electronic travel aids for visually impaired in virtual reality</article-title>
          <source>Proc SPIE</source>
          <year>2022</year>
          <fpage>1204504</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1117/12.2612097"/>
          </comment>
          <pub-id pub-id-type="doi">10.1117/12.2612097</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref41">
        <label>41</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Liu</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Xu</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Jin</surname>
              <given-names>W</given-names>
            </name>
          </person-group>
          <article-title>Electronic travel aids for the blind based on sensory substitution</article-title>
          <source>Proceedings of the 5th International Conference on Computer Science &#38; Education</source>
          <year>2010</year>
          <conf-name>ICCSE '10</conf-name>
          <conf-date>August 24-27, 2010</conf-date>
          <conf-loc>Hefei, China</conf-loc>
          <fpage>1328</fpage>
          <lpage>31</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://ieeexplore.ieee.org/document/5593738?denied=][CrossRef"/>
          </comment>
          <pub-id pub-id-type="doi">10.1109/iccse.2010.5593738</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref42">
        <label>42</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Ricci</surname>
              <given-names>FS</given-names>
            </name>
            <name name-style="western">
              <surname>Boldini</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Ma</surname>
              <given-names>X</given-names>
            </name>
            <name name-style="western">
              <surname>Beheshti</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Geruschat</surname>
              <given-names>DR</given-names>
            </name>
            <name name-style="western">
              <surname>Seiple</surname>
              <given-names>WH</given-names>
            </name>
            <name name-style="western">
              <surname>Rizzo</surname>
              <given-names>JR</given-names>
            </name>
            <name name-style="western">
              <surname>Porfiri</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>Virtual reality as a means to explore assistive technologies for the visually impaired</article-title>
          <source>PLOS Digit Health</source>
          <year>2023</year>
          <month>06</month>
          <volume>2</volume>
          <issue>6</issue>
          <fpage>e0000275</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/37339135"/>
          </comment>
          <pub-id pub-id-type="doi">10.1371/journal.pdig.0000275</pub-id>
          <pub-id pub-id-type="medline">37339135</pub-id>
          <pub-id pub-id-type="pii">PDIG-D-23-00009</pub-id>
          <pub-id pub-id-type="pmcid">PMC10281573</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref43">
        <label>43</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Jones</surname>
              <given-names>PR</given-names>
            </name>
            <name name-style="western">
              <surname>Somoskeöy</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Chow-Wing-Bom</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Crabb</surname>
              <given-names>DP</given-names>
            </name>
          </person-group>
          <article-title>Seeing other perspectives: evaluating the use of virtual and augmented reality to simulate visual impairments (OpenVisSim)</article-title>
          <source>NPJ Digit Med</source>
          <year>2020</year>
          <volume>3</volume>
          <fpage>32</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1038/s41746-020-0242-6"/>
          </comment>
          <pub-id pub-id-type="doi">10.1038/s41746-020-0242-6</pub-id>
          <pub-id pub-id-type="medline">32195367</pub-id>
          <pub-id pub-id-type="pii">242</pub-id>
          <pub-id pub-id-type="pmcid">PMC7064490</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref44">
        <label>44</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Krösl</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Elvezio</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Hürbe</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Karst</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Feiner</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Wimmer</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>XREye: simulating visual impairments in eye-tracked XR</article-title>
          <source>Proceedings of the 2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops</source>
          <year>2020</year>
          <conf-name>VRW '20</conf-name>
          <conf-date>March 22-26, 2020</conf-date>
          <conf-loc>Atlanta, GA</conf-loc>
          <fpage>830</fpage>
          <lpage>1</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://ieeexplore.ieee.org/document/9090438?denied="/>
          </comment>
          <pub-id pub-id-type="doi">10.1109/vrw50115.2020.00266</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref45">
        <label>45</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Dickinson</surname>
              <given-names>CM</given-names>
            </name>
            <name name-style="western">
              <surname>Taylor</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>The effect of simulated visual impairment on speech-reading ability</article-title>
          <source>Ophthalmic Physiol Opt</source>
          <year>2011</year>
          <month>05</month>
          <volume>31</volume>
          <issue>3</issue>
          <fpage>249</fpage>
          <lpage>57</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://onlinelibrary.wiley.com/doi/pdf/10.1111/j.1475-1313.2010.00810.x"/>
          </comment>
          <pub-id pub-id-type="doi">10.1111/j.1475-1313.2010.00810.x</pub-id>
          <pub-id pub-id-type="medline">21410739</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref46">
        <label>46</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Butt</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Crossland</surname>
              <given-names>MD</given-names>
            </name>
            <name name-style="western">
              <surname>West</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Orr</surname>
              <given-names>SW</given-names>
            </name>
            <name name-style="western">
              <surname>Rubin</surname>
              <given-names>GS</given-names>
            </name>
          </person-group>
          <article-title>Simulation contact lenses for AMD health state utility values in NICE appraisals: a different reality</article-title>
          <source>Br J Ophthalmol</source>
          <year>2015</year>
          <month>04</month>
          <volume>99</volume>
          <issue>4</issue>
          <fpage>540</fpage>
          <lpage>4</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="http://bjo.bmj.com/lookup/pmidlookup?view=long&#38;pmid=25351679"/>
          </comment>
          <pub-id pub-id-type="doi">10.1136/bjophthalmol-2014-305802</pub-id>
          <pub-id pub-id-type="medline">25351679</pub-id>
          <pub-id pub-id-type="pii">bjophthalmol-2014-305802</pub-id>
          <pub-id pub-id-type="pmcid">PMC4392203</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref47">
        <label>47</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Goodman-Deane</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Waller</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Bradley</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Yoxall</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Wiggins</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Clarkson</surname>
              <given-names>PJ</given-names>
            </name>
          </person-group>
          <person-group person-group-type="editor">
            <name name-style="western">
              <surname>Burgess</surname>
              <given-names>P</given-names>
            </name>
          </person-group>
          <article-title>Designing inclusive packaging</article-title>
          <source>Integrating the Packaging and Product Experience in Food and Beverages: A Road-Map to Consumer Satisfaction</source>
          <year>2016</year>
          <publisher-loc>Nashville, TN</publisher-loc>
          <publisher-name>Elsevier</publisher-name>
          <fpage>37</fpage>
          <lpage>57</lpage>
        </nlm-citation>
      </ref>
      <ref id="ref48">
        <label>48</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Scott</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Mclachlan</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Brookfield</surname>
              <given-names>K</given-names>
            </name>
          </person-group>
          <article-title>Inclusive design and pedagogy: an outline of three innovations</article-title>
          <source>built environ</source>
          <year>2018</year>
          <month>04</month>
          <day>01</day>
          <volume>44</volume>
          <issue>1</issue>
          <fpage>9</fpage>
          <lpage>22</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.ingentaconnect.com/content/alex/benv/2018/00000044/00000001/art00003"/>
          </comment>
          <pub-id pub-id-type="doi">10.2148/benv.44.1.9</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref49">
        <label>49</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Krohn</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Tromp</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Quinque</surname>
              <given-names>EM</given-names>
            </name>
            <name name-style="western">
              <surname>Belger</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Klotzsche</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Rekers</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Chojecki</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>de Mooij</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Akbal</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>McCall</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Villringer</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Gaebler</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Finke</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Thöne-Otto</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Multidimensional evaluation of virtual reality paradigms in clinical neuropsychology: application of the VR-check framework</article-title>
          <source>J Med Internet Res</source>
          <year>2020</year>
          <month>04</month>
          <day>27</day>
          <volume>22</volume>
          <issue>4</issue>
          <fpage>e16724</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.jmir.org/2020/4/e16724/"/>
          </comment>
          <pub-id pub-id-type="doi">10.2196/16724</pub-id>
          <pub-id pub-id-type="medline">32338614</pub-id>
          <pub-id pub-id-type="pii">v22i4e16724</pub-id>
          <pub-id pub-id-type="pmcid">PMC7215516</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref50">
        <label>50</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Harris</surname>
              <given-names>DJ</given-names>
            </name>
            <name name-style="western">
              <surname>Bird</surname>
              <given-names>JM</given-names>
            </name>
            <name name-style="western">
              <surname>Smart</surname>
              <given-names>PA</given-names>
            </name>
            <name name-style="western">
              <surname>Wilson</surname>
              <given-names>MR</given-names>
            </name>
            <name name-style="western">
              <surname>Vine</surname>
              <given-names>SJ</given-names>
            </name>
          </person-group>
          <article-title>A framework for the testing and validation of simulated environments in experimentation and training</article-title>
          <source>Front Psychol</source>
          <year>2020</year>
          <volume>11</volume>
          <fpage>605</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/32296379"/>
          </comment>
          <pub-id pub-id-type="doi">10.3389/fpsyg.2020.00605</pub-id>
          <pub-id pub-id-type="medline">32296379</pub-id>
          <pub-id pub-id-type="pmcid">PMC7136518</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref51">
        <label>51</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Arthur</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Loveland-Perkins</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Williams</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Harris</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Wilson</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>de Burgh</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Dhanda</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Vine</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>Examining the validity and fidelity of a virtual reality simulator for basic life support training</article-title>
          <source>BMC Digit Health</source>
          <year>2023</year>
          <month>05</month>
          <day>11</day>
          <volume>1</volume>
          <issue>1</issue>
          <fpage>16</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1186/s44247-023-00016-1"/>
          </comment>
          <pub-id pub-id-type="doi">10.1186/S44247-023-00016-1</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref52">
        <label>52</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kern</surname>
              <given-names>AC</given-names>
            </name>
            <name name-style="western">
              <surname>Ellermeier</surname>
              <given-names>W</given-names>
            </name>
          </person-group>
          <article-title>Audio in VR: effects of a soundscape and movement-triggered step sounds on presence</article-title>
          <source>Front Robot AI</source>
          <year>2020</year>
          <volume>7</volume>
          <fpage>20</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/33501189"/>
          </comment>
          <pub-id pub-id-type="doi">10.3389/frobt.2020.00020</pub-id>
          <pub-id pub-id-type="medline">33501189</pub-id>
          <pub-id pub-id-type="pmcid">PMC7805954</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref53">
        <label>53</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Serafin</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Serafin</surname>
              <given-names>G</given-names>
            </name>
          </person-group>
          <article-title>Sound design to enhance presence in photorealistic virtual reality</article-title>
          <source>Proceedings of ICAD 04-10th Meeting of the International Conference on Auditory Display</source>
          <year>2004</year>
          <conf-name>ICAD '04</conf-name>
          <conf-date>July 6-9, 2004</conf-date>
          <conf-loc>Sydney, Australia</conf-loc>
          <fpage>1</fpage>
          <lpage>4</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://api.semanticscholar.org/CorpusID:4517351"/>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref54">
        <label>54</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Sodhi</surname>
              <given-names>PK</given-names>
            </name>
          </person-group>
          <article-title>Seeing world from the eyes of low vision subject</article-title>
          <source>Delhi J Ophthalmol</source>
          <year>2020</year>
          <month>10</month>
          <day>1</day>
          <volume>31</volume>
          <issue>2</issue>
          <fpage>23</fpage>
          <lpage>9</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://journals.lww.com/djo/Abstract/2020/31020][CrossRef"/>
          </comment>
          <pub-id pub-id-type="doi">10.7869/djo.586</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref55">
        <label>55</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Lee</surname>
              <given-names>SS</given-names>
            </name>
            <name name-style="western">
              <surname>Mackey</surname>
              <given-names>DA</given-names>
            </name>
          </person-group>
          <article-title>Glaucoma - risk factors and current challenges in the diagnosis of a leading cause of visual impairment</article-title>
          <source>Maturitas</source>
          <year>2022</year>
          <month>09</month>
          <volume>163</volume>
          <fpage>15</fpage>
          <lpage>22</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.sciencedirect.com/science/article/pii/S0378512222000950"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.maturitas.2022.05.002</pub-id>
          <pub-id pub-id-type="medline">35597227</pub-id>
          <pub-id pub-id-type="pii">S0378-5122(22)00095-0</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref56">
        <label>56</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Zwierko</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Jedziniak</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Florkiewicz</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Ceylan</surname>
              <given-names>Hİ</given-names>
            </name>
            <name name-style="western">
              <surname>Lesiakowski</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Śliwiak</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Kirkiewicz</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Lubiński</surname>
              <given-names>W</given-names>
            </name>
          </person-group>
          <article-title>The consequences of glaucoma on mobility and balance control in the older adults: a cross-sectional study</article-title>
          <source>J Aging Phys Act</source>
          <year>2021</year>
          <month>06</month>
          <day>01</day>
          <volume>29</volume>
          <issue>3</issue>
          <fpage>372</fpage>
          <lpage>81</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://journals.humankinetics.com/view/journals/japa/29/3/article-p372.xml"/>
          </comment>
          <pub-id pub-id-type="doi">10.1123/japa.2020-0079</pub-id>
          <pub-id pub-id-type="medline">32994380</pub-id>
          <pub-id pub-id-type="pii">japa.2020-0079</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref57">
        <label>57</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>E</surname>
              <given-names>JY</given-names>
            </name>
            <name name-style="western">
              <surname>Mihailovic</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Kuo</surname>
              <given-names>PL</given-names>
            </name>
            <name name-style="western">
              <surname>West</surname>
              <given-names>SK</given-names>
            </name>
            <name name-style="western">
              <surname>Friedman</surname>
              <given-names>DS</given-names>
            </name>
            <name name-style="western">
              <surname>Gitlin</surname>
              <given-names>LN</given-names>
            </name>
            <name name-style="western">
              <surname>Li</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Schrack</surname>
              <given-names>JA</given-names>
            </name>
            <name name-style="western">
              <surname>Ramulu</surname>
              <given-names>PY</given-names>
            </name>
          </person-group>
          <article-title>Characterizing the impact of fear of falling on activity and falls in older adults with glaucoma</article-title>
          <source>J Am Geriatr Soc</source>
          <year>2020</year>
          <month>08</month>
          <volume>68</volume>
          <issue>8</issue>
          <fpage>1847</fpage>
          <lpage>51</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/32413186"/>
          </comment>
          <pub-id pub-id-type="doi">10.1111/jgs.16516</pub-id>
          <pub-id pub-id-type="medline">32413186</pub-id>
          <pub-id pub-id-type="pmcid">PMC7537827</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref58">
        <label>58</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Hoppe</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Karolus</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Dietz</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Woźniak</surname>
              <given-names>PW</given-names>
            </name>
            <name name-style="western">
              <surname>Schmidt</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Machulla</surname>
              <given-names>TK</given-names>
            </name>
          </person-group>
          <article-title>VRsneaky: increasing presence in VR through gait-aware auditory feedback</article-title>
          <source>Proceedings of the 2019 CHI Conference on Human Factors in Computing Systems</source>
          <year>2019</year>
          <conf-name>CHI '19</conf-name>
          <conf-date>May 4-9, 2019</conf-date>
          <conf-loc>Glasgow, Scotland</conf-loc>
          <fpage>1</fpage>
          <lpage>9</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://dl.acm.org/doi/10.1145/3290605.3300776"/>
          </comment>
          <pub-id pub-id-type="doi">10.1145/3290605.3300776</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref59">
        <label>59</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Dodsworth</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Norman</surname>
              <given-names>LJ</given-names>
            </name>
            <name name-style="western">
              <surname>Thaler</surname>
              <given-names>L</given-names>
            </name>
          </person-group>
          <article-title>Navigation and perception of spatial layout in virtual echo-acoustic space</article-title>
          <source>Cognition</source>
          <year>2020</year>
          <month>04</month>
          <volume>197</volume>
          <fpage>104185</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://linkinghub.elsevier.com/retrieve/pii/S0010-0277(20)30004-4"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.cognition.2020.104185</pub-id>
          <pub-id pub-id-type="medline">31951856</pub-id>
          <pub-id pub-id-type="pii">S0010-0277(20)30004-4</pub-id>
          <pub-id pub-id-type="pmcid">PMC7033557</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref60">
        <label>60</label>
        <nlm-citation citation-type="web">
          <article-title>Oculus quest safety and warranty manual</article-title>
          <source>Oculus Safety Center</source>
          <access-date>2024-04-29</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.meta.com/quest/safety-center/quest-2/">https://www.meta.com/quest/safety-center/quest-2/</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref61">
        <label>61</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Balasubramanian</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Melendez-Calderon</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Roby-Brami</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Burdet</surname>
              <given-names>E</given-names>
            </name>
          </person-group>
          <article-title>On the analysis of movement smoothness</article-title>
          <source>J Neuroeng Rehabil</source>
          <year>2015</year>
          <month>12</month>
          <day>09</day>
          <volume>12</volume>
          <fpage>112</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://jneuroengrehab.biomedcentral.com/articles/10.1186/s12984-015-0090-9"/>
          </comment>
          <pub-id pub-id-type="doi">10.1186/s12984-015-0090-9</pub-id>
          <pub-id pub-id-type="medline">26651329</pub-id>
          <pub-id pub-id-type="pii">10.1186/s12984-015-0090-9</pub-id>
          <pub-id pub-id-type="pmcid">PMC4674971</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref62">
        <label>62</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Budrionis</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Plikynas</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Daniušis</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Indrulionis</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Smartphone-based computer vision travelling aids for blind and visually impaired individuals: a systematic review</article-title>
          <source>Assist Technol</source>
          <year>2022</year>
          <month>03</month>
          <day>04</day>
          <volume>34</volume>
          <issue>2</issue>
          <fpage>178</fpage>
          <lpage>94</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.tandfonline.com/doi/full/10.1080/10400435.2020.1743381"/>
          </comment>
          <pub-id pub-id-type="doi">10.1080/10400435.2020.1743381</pub-id>
          <pub-id pub-id-type="medline">32207640</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref63">
        <label>63</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>El-Taher</surname>
              <given-names>FE</given-names>
            </name>
            <name name-style="western">
              <surname>Taha</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Courtney</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Mckeever</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>A systematic review of urban navigation systems for visually impaired people</article-title>
          <source>Sensors (Basel)</source>
          <year>2021</year>
          <month>04</month>
          <day>29</day>
          <volume>21</volume>
          <issue>9</issue>
          <fpage>3103</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=s21093103"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/s21093103</pub-id>
          <pub-id pub-id-type="medline">33946857</pub-id>
          <pub-id pub-id-type="pii">s21093103</pub-id>
          <pub-id pub-id-type="pmcid">PMC8125253</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref64">
        <label>64</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Islam</surname>
              <given-names>MM</given-names>
            </name>
            <name name-style="western">
              <surname>Sheikh Sadi</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Zamli</surname>
              <given-names>KZ</given-names>
            </name>
            <name name-style="western">
              <surname>Ahmed</surname>
              <given-names>MM</given-names>
            </name>
          </person-group>
          <article-title>Developing walking assistants for visually impaired people: a review</article-title>
          <source>IEEE Sensors J</source>
          <year>2019</year>
          <month>4</month>
          <day>15</day>
          <volume>19</volume>
          <issue>8</issue>
          <fpage>2814</fpage>
          <lpage>28</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://ieeexplore.ieee.org/document/8598842"/>
          </comment>
          <pub-id pub-id-type="doi">10.1109/jsen.2018.2890423</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref65">
        <label>65</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Pawluk</surname>
              <given-names>DT</given-names>
            </name>
            <name name-style="western">
              <surname>Adams</surname>
              <given-names>RJ</given-names>
            </name>
            <name name-style="western">
              <surname>Kitada</surname>
              <given-names>R</given-names>
            </name>
          </person-group>
          <article-title>Designing haptic assistive technology for individuals who are blind or visually impaired</article-title>
          <source>IEEE Trans Haptics</source>
          <year>2015</year>
          <month>7</month>
          <day>1</day>
          <volume>8</volume>
          <issue>3</issue>
          <fpage>258</fpage>
          <lpage>78</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://ieeexplore.ieee.org/document/7226847"/>
          </comment>
          <pub-id pub-id-type="doi">10.1109/toh.2015.2471300</pub-id>
        </nlm-citation>
      </ref>
    </ref-list>
  </back>
</article>
