<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.0 20040830//EN" "journalpublishing.dtd"><article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" dtd-version="2.0" xml:lang="en" article-type="research-article"><front><journal-meta><journal-id journal-id-type="nlm-ta">JMIR Rehabil Assist Technol</journal-id><journal-id journal-id-type="publisher-id">rehab</journal-id><journal-id journal-id-type="index">17</journal-id><journal-title>JMIR Rehabilitation and Assistive Technologies</journal-title><abbrev-journal-title>JMIR Rehabil Assist Technol</abbrev-journal-title><issn pub-type="epub">2369-2529</issn><publisher><publisher-name>JMIR Publications</publisher-name><publisher-loc>Toronto, Canada</publisher-loc></publisher></journal-meta><article-meta><article-id pub-id-type="publisher-id">v13i1e87128</article-id><article-id pub-id-type="doi">10.2196/87128</article-id><article-categories><subj-group subj-group-type="heading"><subject>Original Paper</subject></subj-group></article-categories><title-group><article-title>Validity and Reliability of the Track-UL Algorithm Compared With Kinovea Software for Measuring Upper-Limb Functional Range of Motion in People After Stroke: Cross-Sectional Observational Study</article-title></title-group><contrib-group><contrib contrib-type="author" corresp="yes"><name name-style="western"><surname>Lazem</surname><given-names>Hatem</given-names></name><degrees>MSc</degrees><xref ref-type="aff" rid="aff1">1</xref><xref ref-type="aff" rid="aff2">2</xref><xref ref-type="aff" rid="aff3">3</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Harris</surname><given-names>David</given-names></name><degrees>PhD</degrees><xref ref-type="aff" rid="aff4">4</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Hall</surname><given-names>Abi</given-names></name><degrees>PhD</degrees><xref ref-type="aff" rid="aff1">1</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Richards</surname><given-names>Thomas C</given-names></name><degrees>PhD</degrees><xref ref-type="aff" rid="aff1">1</xref><xref ref-type="aff" rid="aff3">3</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Leveridge</surname><given-names>Phaedra</given-names></name><degrees>BSc</degrees><xref ref-type="aff" rid="aff3">3</xref><xref ref-type="aff" rid="aff4">4</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Mansoubi</surname><given-names>Maedeh</given-names></name><degrees>PhD</degrees><xref ref-type="aff" rid="aff1">1</xref><xref ref-type="aff" rid="aff3">3</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Xu</surname><given-names>Xiaohan</given-names></name><degrees>PhD</degrees><xref ref-type="aff" rid="aff4">4</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Newell</surname><given-names>Paul</given-names></name><degrees>BSc, MSc</degrees><xref ref-type="aff" rid="aff1">1</xref><xref ref-type="aff" rid="aff3">3</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Lamb</surname><given-names>Sarah E</given-names></name><degrees>MA, MSc, DPhil</degrees><xref ref-type="aff" rid="aff1">1</xref><xref ref-type="aff" rid="aff3">3</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Dawes</surname><given-names>Helen</given-names></name><degrees>PhD</degrees><xref ref-type="aff" rid="aff1">1</xref><xref ref-type="aff" rid="aff3">3</xref></contrib></contrib-group><aff id="aff1"><institution>Medical School, Faculty of Health and Life Sciences, University of Exeter</institution><addr-line>St Luke's Campus</addr-line><addr-line>Exeter</addr-line><country>United Kingdom</country></aff><aff id="aff2"><institution>Basic Science Department, Faculty of Physical Therapy, Cairo University</institution><addr-line>Giza</addr-line><country>Egypt</country></aff><aff id="aff3"><institution>Faculty of Health and Life Sciences, NIHR Exeter Biomedical Research Centre, University of Exeter</institution><addr-line>Exeter</addr-line><country>United Kingdom</country></aff><aff id="aff4"><institution>School of Public Health and Sport Science, Faculty of Health and Life Sciences, University of Exeter</institution><addr-line>Exeter</addr-line><country>United Kingdom</country></aff><contrib-group><contrib contrib-type="editor"><name name-style="western"><surname>Munce</surname><given-names>Sarah</given-names></name></contrib></contrib-group><contrib-group><contrib contrib-type="reviewer"><name name-style="western"><surname>ELMelhat</surname><given-names>Ahmed</given-names></name></contrib><contrib contrib-type="reviewer"><name name-style="western"><surname>Son</surname><given-names>Jongsang</given-names></name></contrib></contrib-group><author-notes><corresp>Correspondence to Hatem Lazem, MSc, Medical School, Faculty of Health and Life Sciences, University of Exeter, St Luke's Campus, Exeter, EX1 2LP, United Kingdom, +44 7436 959585; <email>hl756@exeter.ac.uk</email></corresp></author-notes><pub-date pub-type="collection"><year>2026</year></pub-date><pub-date pub-type="epub"><day>11</day><month>5</month><year>2026</year></pub-date><volume>13</volume><elocation-id>e87128</elocation-id><history><date date-type="received"><day>27</day><month>11</month><year>2025</year></date><date date-type="rev-recd"><day>19</day><month>03</month><year>2026</year></date><date date-type="accepted"><day>09</day><month>04</month><year>2026</year></date></history><copyright-statement>&#x00A9; Hatem Lazem, David Harris, Abi Hall, Thomas C Richards, Phaedra Leveridge, Maedeh Mansoubi, Xiaohan Xu, Paul Newell, Sarah E Lamb, Helen Dawes. Originally published in JMIR Rehabilitation and Assistive Technology (<ext-link ext-link-type="uri" xlink:href="https://rehab.jmir.org">https://rehab.jmir.org</ext-link>), 11.5.2026. </copyright-statement><copyright-year>2026</copyright-year><license license-type="open-access" xlink:href="https://creativecommons.org/licenses/by/4.0/"><p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (<ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">https://creativecommons.org/licenses/by/4.0/</ext-link>), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work, first published in JMIR Rehabilitation and Assistive Technology, is properly cited. The complete bibliographic information, a link to the original publication on <ext-link ext-link-type="uri" xlink:href="https://rehab.jmir.org/">https://rehab.jmir.org/</ext-link>, as well as this copyright and license information must be included.</p></license><self-uri xlink:type="simple" xlink:href="https://rehab.jmir.org/2026/1/e87128"/><abstract><sec><title>Background</title><p>Approximately 70% of survivors of stroke have problems with arm function. Physiotherapists assess arm functional range of motion (ROM) using either a goniometer or functional questionnaires, which lack objective accuracy and require a skilled physiotherapist. We developed the Track-UL algorithm based on a markerless motion capture system to measure arm ROM.</p></sec><sec><title>Objective</title><p>This study aimed to measure the agreement between our novel Track-UL algorithm and Kinovea software in assessing arm ROM during functional tasks in the laboratory and home settings.</p></sec><sec sec-type="methods"><title>Methods</title><p>Videos were recorded while 27 survivors of chronic stroke performed 4 functional tasks (forward reaching, arm abduction, moving the hand toward the mouth, and moving the hand toward the head) in the laboratory and at home. The videos were analyzed by 2 independent raters using the Track-UL algorithm and Kinovea software. The limits of agreement and intraclass correlation coefficients were calculated.</p></sec><sec sec-type="results"><title>Results</title><p>We found no clinically significant systematic bias in shoulder and elbow angle, with good agreement between the Track-UL algorithm and Kinovea software (assessed via Bland-Altman plots). The 95% limits of agreement were &#x2013;3.18 to 6.41 degrees for the shoulder joint and &#x2212;5.35 to 8.78 degrees for the elbow joint in the laboratory setting, and &#x2013;6.21 to 3.62 degrees for the shoulder joint and &#x2212;4.06 to 2.53 degrees for the elbow joint in the home setting. There was excellent absolute agreement between the measurement tools across all tasks and joints; intraclass correlation coefficient values ranged from 0.97 (95% CI 0.97-0.99) to 0.99 (95% CI 0.99-0.99; <italic>P</italic>&#x003C;.001 for both laboratory and home measurements).</p></sec><sec sec-type="conclusions"><title>Conclusions</title><p>The novel Track-UL algorithm is an accurate, valid, and easy tool that can be used to assess upper-limb ROM in survivors of stroke at clinics and potentially at home. This will support physiotherapists in remotely monitoring and adapting rehabilitation programs.</p></sec></abstract><kwd-group><kwd>stroke</kwd><kwd>telemonitoring</kwd><kwd>range of motion</kwd><kwd>MediaPipe</kwd><kwd>markerless tracking</kwd><kwd>validity</kwd><kwd>reliability</kwd></kwd-group></article-meta></front><body><sec id="s1" sec-type="intro"><title>Introduction</title><p>Stroke is the second leading cause of death and the third leading cause of long-term disability for approximately 70% of people who experience a stroke, affecting their quality of life [<xref ref-type="bibr" rid="ref1">1</xref>]. Approximately 40% of people with stroke experience upper-limb problems at the chronic stage of recovery [<xref ref-type="bibr" rid="ref2">2</xref>]. Following a stroke, people can experience muscle weakness [<xref ref-type="bibr" rid="ref3">3</xref>], spasticity [<xref ref-type="bibr" rid="ref4">4</xref>], and changes in the pattern of arm mobility on the affected side [<xref ref-type="bibr" rid="ref5">5</xref>].</p><p>According to the National Institute for Health and Care Excellence guidelines, physiotherapists routinely design rehabilitation programs to improve upper-limb function and, therefore, need to efficiently and effectively assess and monitor improvements in the functional range of motion (ROM) in people with stroke [<xref ref-type="bibr" rid="ref6">6</xref>]. In clinical settings, physiotherapists typically use a clinical goniometer [<xref ref-type="bibr" rid="ref7">7</xref>] or validated clinical questionnaires such as the Fugl-Meyer Assessment for the upper extremity (FMA-UE) [<xref ref-type="bibr" rid="ref8">8</xref>] to evaluate and monitor the upper-limb ROM in people with stroke.</p><p>A goniometer is the current standard tool for assessing ROM in physiotherapy practice [<xref ref-type="bibr" rid="ref9">9</xref>]. However, this requires an adequate level of expertise from the practitioner to execute, so its precision can be influenced by clinical skills and experience [<xref ref-type="bibr" rid="ref10">10</xref>,<xref ref-type="bibr" rid="ref11">11</xref>]. In addition, the presence of compensation during upper-limb movements, such as shoulder and trunk rotation, and the disruption of elbow-shoulder coordination can affect the ease with which goniometers can be used to measure ROM [<xref ref-type="bibr" rid="ref12">12</xref>]. A limitation of upper-limb clinical assessment questionnaires such as the FMA-UE scale is the time required to complete them, which can be up to 25 minutes. They also rely on a Likert-based rating system in which scores of 0, 1, and 2 indicate no movement, partial movement, and full movement, respectively, which is discrete and bounded, depends on the raters&#x2019; experience, and lacks the precision in quantifying progression and monitoring small changes in arm mobility over time [<xref ref-type="bibr" rid="ref8">8</xref>].</p><p>An alternative approach is the use of affordable and free 2D markerless motion analysis tools such as the Kinovea software that allow therapists to measure the upper-limb kinematics from video footage [<xref ref-type="bibr" rid="ref13">13</xref>]. However, such tools are better suited to research as the time and expertise of an experienced health care professional required for operation make them less accessible to physiotherapists in clinics. Computer vision algorithms based on markerless motion capture and pose estimation frameworks offer a potential solution for faster and easier measurement of the ROM without the need for an experienced health care professional [<xref ref-type="bibr" rid="ref14">14</xref>]. So far, computer vision&#x2013;based algorithms have been validated for people with ankylosing spondylitis [<xref ref-type="bibr" rid="ref15">15</xref>] and following knee replacement [<xref ref-type="bibr" rid="ref16">16</xref>] but are still in need of validation among people with stroke, who have different movement complexities such as compensatory movement patterns and reduced ROM, which differ from the normative datasets on which most algorithms are trained, which can introduce the possibility of tracking errors [<xref ref-type="bibr" rid="ref12">12</xref>,<xref ref-type="bibr" rid="ref17">17</xref>,<xref ref-type="bibr" rid="ref18">18</xref>].</p><p>Therefore, based on the gap in the current literature on the validity and reliability of the pose estimation algorithms in stroke, the aim of this study was to assess the concurrent validity and test-retest reliability of a novel Track-UL algorithm based on MediaPipe, a pose estimation framework, as an objective and easy-to-use tool for measuring and monitoring upper-limb mobility in people with stroke in the laboratory and home settings.</p><p>The main objectives of this study were as follows:</p><list list-type="bullet"><list-item><p>To investigate the concurrent validity of the Track-UL algorithm based on a 2D pose estimation framework compared to the Kinovea software for measuring shoulder and elbow joint functional active ROM</p></list-item><list-item><p>To investigate the test-retest reliability of the Track-UL algorithm based on a 2D pose estimation framework for measuring shoulder and elbow joint functional active ROM</p></list-item><list-item><p>To explore the potential for using the Track-UL algorithm as a telemonitoring tool for measuring upper-limb functional active ROM from videos recorded by people with stroke in their own homes</p></list-item></list></sec><sec id="s2" sec-type="methods"><title>Methods</title><sec id="s2-1"><title>Ethical Considerations</title><p>This study was conducted in accordance with the ethical standards of the responsible institutional and national committees on human experimentation and with the World Medical Association Declaration of Helsinki (1975, as revised in 2013). This project was reviewed and approved by an independent National Health Service research ethics committee and the Health Research Authority (reference 24/LO/0434). Written informed consent was obtained from all participants prior to their inclusion in the study. The privacy and confidentiality of participants&#x2019; data and identities were strictly maintained. Personally identifiable information was stored securely and separately from research data, with access limited to authorized members of the research team. Participants received compensation for travel expenses associated with study participation.</p></sec><sec id="s2-2"><title>Participants</title><p>This was a cross-sectional observational study in which a total of 27 people with stroke participated. Data collection was conducted in the laboratory by the researcher (a physiotherapist) at the University of Exeter and participants&#x2019; homes. We recruited people with stroke aged 18 years or older who were more than 6 months after stroke with a score of 2 or lower on the Modified Ashworth Scale and any degree of upper-limb impairment resulting from stroke (FMA-UE score&#x003C;57). Survivors of stroke were excluded if they had severe cognitive deficits and could not follow the instructions given by the researcher or had severe spasticity (score of &#x003E;2 on the Modified Ashworth Scale). The reason for these exclusions was that these patient groups are not able to achieve active movements, which are a requirement of the tasks this study aimed to validate.</p></sec><sec id="s2-3"><title>Procedure</title><p>All participants completed a single face-to-face laboratory testing session at the University of Exeter with a physiotherapist, which lasted up to 60 minutes, including collection of demographic data and assessment of upper-limb ROM, with videos recorded while participants performed 4 functional upper-limb tasks (<xref ref-type="fig" rid="figure1">Figure 1</xref>). Participants were then asked to repeat the same 4 tasks alone at home with or without support from their carers following written instructions provided by the research team while recording videos of the tasks within 3 days following the initial assessment.</p><fig position="float" id="figure1"><label>Figure 1.</label><caption><p>The 4 functional complex upper-limb tasks.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="rehab_v13i1e87128_fig01.png"/></fig><p>To familiarize participants with the tasks, each movement was demonstrated to them, who then practiced it twice before starting. Participants were asked to wear light clothes with sleeveless shirts to aid identification of bony landmarks on the upper limb. Before recording the videos, colored circular stickers (markers) were applied to predefined bony landmarks of the upper limb (acromion process, anterior and midthoracic line, lateral epicondyle, midshaft of the humerus, midforearm, and midwrist). The use of these markers enabled measurement of shoulder and elbow range of movements during the 4 tasks (<xref ref-type="fig" rid="figure1">Figure 1</xref>). For the videos recorded at home by the participants or their carers, we used virtual markers implemented in the Kinovea software.</p><p>All videos were imported into a laptop and analyzed using both the Kinovea software and PyCharm Community Edition (version 2022.3.1; JetBrains) using the Track-UL algorithm, a function programmed in PyCharm. This function calculates the angle of joints of the human body based on the joint coordinates using the 2D vector dot product formula. All videos were recorded by 1 physiotherapist. Two physiotherapists (one of whom recorded the videos) who were blinded to each other&#x2019;s measurements then independently analyzed the video recordings: one using the Kinovea software and the other using the Track-UL algorithm.</p><p>For home testing, we asked the participants themselves or their carers to record the videos only for the affected side within 3 days of the laboratory test and then send the videos back to the researcher.</p></sec><sec id="s2-4"><title>Instruments</title><p>Kinovea is free and open-source analysis software used by health care professionals in research and clinical settings. It is considered to be a valid and reliable tool for measuring ROM using a digital goniometer, which is widely used in clinical and sports biomechanics for 2D kinematic analysis [<xref ref-type="bibr" rid="ref19">19</xref>,<xref ref-type="bibr" rid="ref20">20</xref>]. While the gold standard for measuring ROM is the 3D motion capture system, using Kinovea allows for a more applicable comparison aligned with the intended use of this algorithm in real-world environments such as a clinic or the home setting, in which stroke assessment and telerehabilitation usually occur. Analysis of the captured videos was carried out using Kinovea (version 0.9.5; Joan Charmant). Circular colored markers were placed on the bony landmarks of the upper limb prior to capture to facilitate calculation of upper-limb joint angles using a digital goniometer.</p><p>MediaPipe is an open-source pose estimation model developed by Google for machine learning applications [<xref ref-type="bibr" rid="ref21">21</xref>] (<xref ref-type="fig" rid="figure2">Figure 2</xref> [<xref ref-type="bibr" rid="ref22">22</xref>]). The model infers 33 pose landmarks from the whole body; offers real-time processing and compatibility with standard video input from commonly available devices such as mobile phones; and offers a practical balance between accuracy and usability [<xref ref-type="bibr" rid="ref23">23</xref>] compared to other pose estimation software such as OpenPose [<xref ref-type="bibr" rid="ref24">24</xref>] and OpenCap [<xref ref-type="bibr" rid="ref25">25</xref>] that require high computational resources and more complex setup, which can limit their applicability in a clinical setting or at home. The Track-UL algorithm was developed using PyCharm for automatic calculation of both shoulder and elbow joint angles from videos, which are then presented to the user. To calculate joint angles, coordinates for bony landmarks (hip, shoulder, elbow, and wrist) were extracted from video frames using the MediaPipe pose estimation framework. Each landmark provides 2D coordinates (<italic>x</italic>, <italic>y</italic>). Shoulder and elbow joint angles were computed by treating the connected rigid body segments (trunk, upper arm, and forearm) as 2D vectors (<xref ref-type="fig" rid="figure2">Figures 2</xref> and <xref ref-type="fig" rid="figure3">3</xref> [<xref ref-type="bibr" rid="ref22">22</xref>]).</p><fig position="float" id="figure2"><label>Figure 2.</label><caption><p>BlazePose model of 33 human poses [<xref ref-type="bibr" rid="ref22">22</xref>].</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="rehab_v13i1e87128_fig02.png"/></fig><fig position="float" id="figure3"><label>Figure 3.</label><caption><p>How the video appears to the physiotherapist when we apply the Track-UL algorithm.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="rehab_v13i1e87128_fig03.png"/></fig><p>A 2D vector dot product formula was used to calculate shoulder joint angle (term B in the following formulas) using coordinates of the shoulder joint and the adjacent hip (term A) and elbow (term C) joints:</p><disp-formula id="equWL1"><mml:math id="eqn1"><mml:mi>&#x03B8;</mml:mi><mml:mtext>=</mml:mtext><mml:mi>a</mml:mi><mml:mi>r</mml:mi><mml:mi>c</mml:mi><mml:mi>c</mml:mi><mml:mi>o</mml:mi><mml:mi>s</mml:mi><mml:mfenced separators="|"><mml:mrow><mml:mfrac><mml:mrow><mml:mfenced separators="|"><mml:mrow><mml:mi>A</mml:mi><mml:mtext>-</mml:mtext><mml:mi>B</mml:mi></mml:mrow></mml:mfenced><mml:mo>&#x22C5;</mml:mo><mml:mfenced separators="|"><mml:mrow><mml:mi>C</mml:mi><mml:mtext>-</mml:mtext><mml:mi>B</mml:mi></mml:mrow></mml:mfenced></mml:mrow><mml:mrow><mml:mfenced open="&#x2016;" close="&#x2016;" separators="|"><mml:mrow><mml:mi>A</mml:mi><mml:mtext>-</mml:mtext><mml:mi>B</mml:mi></mml:mrow></mml:mfenced><mml:mfenced open="&#x2016;" close="&#x2016;" separators="|"><mml:mrow><mml:mi>C</mml:mi><mml:mtext>-</mml:mtext><mml:mi>B</mml:mi></mml:mrow></mml:mfenced></mml:mrow></mml:mfrac></mml:mrow></mml:mfenced></mml:math></disp-formula><disp-formula id="equWL2"><mml:math id="eqn2"><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mrow><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mi>&#x03B8;</mml:mi><mml:mo>=</mml:mo><mml:mrow><mml:mi>a</mml:mi><mml:mi>r</mml:mi><mml:mi>r</mml:mi><mml:mi>c</mml:mi><mml:mi>o</mml:mi><mml:mtext>&#x00A0;</mml:mtext><mml:mi>s</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>x</mml:mi><mml:mi>A</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>x</mml:mi><mml:mi>B</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo>&#x22C5;</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>x</mml:mi><mml:mi>C</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>x</mml:mi><mml:mi>B</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo>+</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>y</mml:mi><mml:mi>A</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>y</mml:mi><mml:mi>B</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo>&#x22C5;</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>y</mml:mi><mml:mi>C</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>y</mml:mi><mml:mi>B</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mrow><mml:mo>/</mml:mo></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msqrt><mml:mo stretchy="false">(</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>x</mml:mi><mml:mi>A</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>x</mml:mi><mml:mi>B</mml:mi><mml:msup><mml:mo stretchy="false">)</mml:mo><mml:mn>2</mml:mn></mml:msup><mml:mo>+</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>y</mml:mi><mml:mi>A</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>y</mml:mi><mml:mi>B</mml:mi><mml:msup><mml:mo stretchy="false">)</mml:mo><mml:mn>2</mml:mn></mml:msup><mml:mo stretchy="false">)</mml:mo></mml:msqrt><mml:mo>&#x00D7;</mml:mo><mml:msqrt><mml:mo stretchy="false">(</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>x</mml:mi><mml:mi>C</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>x</mml:mi><mml:mi>B</mml:mi><mml:msup><mml:mo stretchy="false">)</mml:mo><mml:mn>2</mml:mn></mml:msup><mml:mo>+</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>y</mml:mi><mml:mi>C</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>y</mml:mi><mml:mi>B</mml:mi><mml:msup><mml:mo stretchy="false">)</mml:mo><mml:mn>2</mml:mn></mml:msup></mml:msqrt></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:mstyle></mml:mrow></mml:mstyle></mml:math></disp-formula></sec><sec id="s2-5"><title>Video Processing</title><p>The researcher recorded 8 videos of 4 different upper-limb functional tasks performed on each side (affected and unaffected side) using a mobile phone camera (iPhone 13 Pro Max [Apple Inc] with a 12-MP main camera with sensor-shift stabilization, 1.9-&#x00B5;m pixels, and a 26-mm equivalent f/1.5-aperture lens) and a tripod. The camera was placed 1.5 m from the participants on a tripod at a height of 90 cm oriented with an angle of 90 degrees perpendicular to the chair. Participants were then asked to record the same videos at home following standardized instructions (<xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>) within the following 3 days and only for the affected side to lower the risk of fatigue from repeated movements. Each task was repeated 3 times independently in each video with 10 seconds of rest in between each repetition and one 1-minute rest in between each task. During analysis, there were no apparent challenges regarding landmark detection by the Track-UL algorithm.</p><p>This study measured the functional active ROM for both the shoulder joint (flexion and abduction) and the elbow joint (flexion and extension) during 4 different functional complex tasks, which are routinely used in the clinic as part of the FMA-UE assessment. We measured both the affected and unaffected sides to understand the extent to which the Track-UL algorithm can estimate the functional active ROM accurately on the unaffected side as well as the affected side, which can include some substitute movements due to the motor impairments compared to the unaffected side.</p></sec><sec id="s2-6"><title>Analysis</title><p>We used SPSS Statistics (version 29; IBM Corp) for data analysis. To assess the Track-UL algorithm&#x2019;s concurrent validity against Kinovea, absolute agreement was evaluated using Bland-Altman analysis [<xref ref-type="bibr" rid="ref26">26</xref>] to obtain the 95% limit of agreement (LoA) and mean bias metrics using the following equation: LoA = mean difference &#x00B1; 1.96 &#x00D7; SD difference.</p><p>To measure the absolute agreement between our algorithm and Kinovea, we calculated the intraclass correlation coefficient (ICC) based on a 2-way random-effect model for a single measurement with an absolute agreement model (ICC(2,1)). This model accounts for both participant variability and systematic differences between the 2 assessment tools.</p><p>To assess the test-retest reliability for the Track-UL algorithm, we also calculated the ICC(2,1) for the repeated measures, the SE of measurement, and the minimum detectable change using the following equations [<xref ref-type="bibr" rid="ref27">27</xref>,<xref ref-type="bibr" rid="ref28">28</xref>]:</p><disp-formula id="E4"><mml:math id="eqn3"><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mrow><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mi>S</mml:mi><mml:mi>E</mml:mi><mml:mi>M</mml:mi><mml:mo>=</mml:mo><mml:mi>S</mml:mi><mml:mi>t</mml:mi><mml:mi>D</mml:mi><mml:mi>e</mml:mi><mml:mi>v</mml:mi><mml:mo>&#x22C5;</mml:mo><mml:msqrt><mml:mn>1</mml:mn><mml:mo>&#x2212;</mml:mo><mml:mi>I</mml:mi><mml:mi>C</mml:mi><mml:mi>C</mml:mi></mml:msqrt></mml:mstyle></mml:mrow></mml:mstyle></mml:math></disp-formula><disp-formula id="E5"><mml:math id="eqn4"><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mrow><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mi>M</mml:mi><mml:mi>D</mml:mi><mml:mi>C</mml:mi><mml:mo>=</mml:mo><mml:mi>S</mml:mi><mml:mi>E</mml:mi><mml:mi>M</mml:mi><mml:mo>&#x22C5;</mml:mo><mml:msqrt><mml:mn>2</mml:mn><mml:mo>&#x00D7;</mml:mo><mml:mn>1.96</mml:mn></mml:msqrt></mml:mstyle></mml:mrow></mml:mstyle></mml:math></disp-formula><p>ICC values between 0.81 and 1.0 are interpreted as very good or excellent, ICC values between 0.61 and 0.80 are interpreted as good, ICC values between 0.41 and 0.60 are interpreted as moderate, ICC values between 0.21 and 0.40 are interpreted as fair, and ICC values below 0.20 are interpreted as poor [<xref ref-type="bibr" rid="ref29">29</xref>].</p></sec></sec><sec id="s3" sec-type="results"><title>Results</title><sec id="s3-1"><title>Overview</title><p>The demographic data of the participants are listed in <xref ref-type="table" rid="table1">Table 1</xref>.</p><table-wrap id="t1" position="float"><label>Table 1.</label><caption><p>Participants&#x2019; demographic data (N=27).</p></caption><table id="table1" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Characteristics</td><td align="left" valign="bottom">Participants</td></tr></thead><tbody><tr><td align="left" valign="top">Age (years), mean (SD; range)</td><td align="left" valign="top">60.56 (13.82; 26-88)</td></tr><tr><td align="left" valign="top">Time since stroke (months), mean (SD; range)</td><td align="left" valign="top">42.93 (33.07; 6-126)</td></tr><tr><td align="left" valign="top">FMA-UE<sup><xref ref-type="table-fn" rid="table1fn1">a</xref></sup> score, mean (SD; range)</td><td align="left" valign="top">32.15 (16.21; 10-55)</td></tr><tr><td align="left" valign="top">MAS<sup><xref ref-type="table-fn" rid="table1fn2">b</xref></sup> score&#x2014;shoulder, mean (SD; range)</td><td align="left" valign="top">1.22 (0.42; 1-2)</td></tr><tr><td align="left" valign="top">MAS score&#x2014;elbow, mean (SD; range)</td><td align="left" valign="top">1.44 (0.51; 1-2)</td></tr><tr><td align="left" valign="top">MAS score&#x2014;hand, mean (SD; range)</td><td align="left" valign="top">1.41 (0.50; 1-2)</td></tr><tr><td align="left" valign="top" colspan="2">Sex, n (%)</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Male</td><td align="left" valign="top">21 (77.8)</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Female</td><td align="left" valign="top">6 (22.2)</td></tr><tr><td align="left" valign="top" colspan="2">Type of stroke, n (%)</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Ischemic</td><td align="left" valign="top">19 (70.4)</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Hemorrhagic</td><td align="left" valign="top">8 (29.6)</td></tr><tr><td align="left" valign="top" colspan="2">Affected side, n (%)</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Right</td><td align="left" valign="top">16 (59.3)</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Left</td><td align="left" valign="top">11 (40.7)</td></tr></tbody></table><table-wrap-foot><fn id="table1fn1"><p><sup>a</sup>FMA-UE: Fugl-Meyer Assessment for the upper extremity.</p></fn><fn id="table1fn2"><p><sup>b</sup>MAS: Modified Ashworth Scale.</p></fn></table-wrap-foot></table-wrap></sec><sec id="s3-2"><title>Concurrent Validity for Laboratory Measurement</title><p>The Bland-Altman plots (<xref ref-type="fig" rid="figure4">Figures 4</xref> and <xref ref-type="fig" rid="figure5">5</xref>) reveal that the mean difference in shoulder and elbow joint angles between the Track-UL algorithm and Kinovea was close to 0, with a mean value ranging from &#x2212;0.34 (SD 1.13) to 0.76 (SD 1.25) degrees for the unaffected side and &#x2212;1.24 (SD 3.84) to 1.17 (SD 2.67) degrees for the affected side, indicating no systematic bias. The 95% LoA for joint measurements on the unaffected side ranged from &#x2212;2.88 to 3.03 degrees for the shoulder and from &#x2212;3.41 to 3.21 degrees for the elbow (<xref ref-type="table" rid="table2">Table 2</xref>). On the affected side, the 95% LoA ranged from &#x2212;3.18 to 6.41 degrees for the shoulder and &#x2212;5.35 to 6.29 degrees for the elbow (<xref ref-type="table" rid="table2">Table 2</xref>). This difference is not considered clinically significant [<xref ref-type="bibr" rid="ref30">30</xref>]. There are a few outliers in some of the graphs; however, there is no clear evidence of a trend or heteroscedasticity in the plots except for the shoulder joint during shoulder abduction (task 1) on the affected side. In this case, a proportional bias was observed: as the abduction angle increased, the positive difference also increased. However, this positive difference of up to 6.41 degrees does not have a clinically significant impact on the measurement of the shoulder functional active ROM [<xref ref-type="bibr" rid="ref30">30</xref>].</p><fig position="float" id="figure4"><label>Figure 4.</label><caption><p>Bland-Altman plots for comparing the Track-UL algorithm and Kinovea software for the shoulder joint in both the unaffected and affected side in the laboratory setting.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="rehab_v13i1e87128_fig04.png"/></fig><fig position="float" id="figure5"><label>Figure 5.</label><caption><p>Bland-Altman plots for comparing the Track-UL algorithm and Kinovea software for the elbow joint in both the unaffected and affected side in the laboratory setting.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="rehab_v13i1e87128_fig05.png"/></fig><table-wrap id="t2" position="float"><label>Table 2.</label><caption><p>Mean values and limit of agreement (LoA) between the Track-UL algorithm and the Kinovea software based on the laboratory measurements.</p></caption><table id="table2" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Tested side and task number</td><td align="left" valign="bottom" colspan="4">Elbow joint</td><td align="left" valign="bottom" colspan="4">Shoulder joint</td></tr><tr><td align="left" valign="top"/><td align="left" valign="top">Observations, n</td><td align="left" valign="top">Values, mean (SD; 95% CI)</td><td align="left" valign="top">Lower LoA</td><td align="left" valign="top">Upper LoA</td><td align="left" valign="top">Observations, n</td><td align="left" valign="top">Values, mean (SD; 95% CI)</td><td align="left" valign="top">Lower LoA</td><td align="left" valign="top">Upper LoA</td></tr></thead><tbody><tr><td align="left" valign="top" colspan="9">Unaffected side</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>1</td><td align="left" valign="top">&#x2014;<sup><xref ref-type="table-fn" rid="table2fn1">a</xref></sup></td><td align="left" valign="top">&#x2003;&#x2014;</td><td align="left" valign="top">&#x2003;&#x2014;</td><td align="left" valign="top">&#x2003;&#x2014;</td><td align="left" valign="top">81</td><td align="left" valign="top">0.21 (1.45; &#x2013;0.12 to 0.52)</td><td align="left" valign="top">&#x2212;2.64</td><td align="left" valign="top">3.03</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>2</td><td align="left" valign="top">81</td><td align="left" valign="top">&#x2212;0.16 (1.65; &#x2013;0.52 to 0.21)</td><td align="left" valign="top">&#x2212;3.41</td><td align="left" valign="top">3.08</td><td align="left" valign="top">80</td><td align="left" valign="top">&#x2212;0.09 (1.43; &#x2013;0.40 to 0.23)</td><td align="left" valign="top">&#x2212;2.88</td><td align="left" valign="top">2.71</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>3</td><td align="left" valign="top">81</td><td align="left" valign="top">0.76 (1.25; 0.48 to 1.03)</td><td align="left" valign="top">&#x2212;1.69</td><td align="left" valign="top">3.21</td><td align="left" valign="top">81</td><td align="left" valign="top">&#x2212;0.02 (1.06; &#x2013;0.26 to 0.21)</td><td align="left" valign="top">&#x2212;2.10</td><td align="left" valign="top">2.06</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>4</td><td align="left" valign="top">81</td><td align="left" valign="top">&#x2212;0.11 (1.05; &#x2013;0.34 to 0.11)</td><td align="left" valign="top">&#x2212;2.19</td><td align="left" valign="top">1.95</td><td align="left" valign="top">81</td><td align="left" valign="top">&#x2212;0.34 (1.13; &#x2013;0.59 to &#x2013;0.09)</td><td align="left" valign="top">&#x2212;2.56</td><td align="left" valign="top">1.87</td></tr><tr><td align="left" valign="top" colspan="9">Affected side</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>1</td><td align="left" valign="top">84</td><td align="left" valign="top">&#x2003;&#x2014;</td><td align="left" valign="top">&#x2003;&#x2014;</td><td align="left" valign="top">&#x2003;&#x2014;</td><td align="left" valign="top">84</td><td align="left" valign="top">1.17 (2.67; 0.59 to 1.75)</td><td align="left" valign="top">&#x2212;4.06</td><td align="left" valign="top">6.41</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>2</td><td align="left" valign="top">84</td><td align="left" valign="top">&#x2212;1.24 (3.84; &#x2013;2.07 to &#x2013;0.40)</td><td align="left" valign="top">&#x2212;8.78</td><td align="left" valign="top">6.29</td><td align="left" valign="top">84</td><td align="left" valign="top">&#x2212;0.01 (2.58; &#x2013;0.57 to 0.55)</td><td align="left" valign="top">&#x2212;5.07</td><td align="left" valign="top">5.05</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>3</td><td align="left" valign="top">84</td><td align="left" valign="top">&#x2212;0.07 (1.23; &#x2013;0.34 to 0.18)</td><td align="left" valign="top">&#x2212;2.49</td><td align="left" valign="top">2.33</td><td align="left" valign="top">84</td><td align="left" valign="top">0.02 (1.63; &#x2013;0.34 to 0.37)</td><td align="left" valign="top">&#x2212;3.18</td><td align="left" valign="top">3.22</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>4</td><td align="left" valign="top">84</td><td align="left" valign="top">&#x2212;0.77 (2.33; &#x2013;1.27 to &#x2013;0.26)</td><td align="left" valign="top">&#x2212;5.35</td><td align="left" valign="top">3.81</td><td align="left" valign="top">84</td><td align="left" valign="top">&#x2212;0.23 (1.56; &#x2013;0.57 to 0.11)</td><td align="left" valign="top">&#x2212;3.31</td><td align="left" valign="top">2.84</td></tr></tbody></table><table-wrap-foot><fn id="table2fn1"><p><sup>a</sup>Not applicable.</p></fn></table-wrap-foot></table-wrap></sec><sec id="s3-3"><title>Absolute Agreement Analysis for Laboratory Measurement</title><p>We used the ICC to assess the absolute agreement between the Track-UL algorithm and the Kinovea software when measuring ROM from videos recorded in the laboratory. The ICC(2,1) for the absolute agreement was calculated, showing excellent agreement across all tasks, joints, and sides (unaffected and affected). The ICC values ranged from 0.98 to 0.99 for the unaffected side and from 0.98 to 0.99 for the affected side (<xref ref-type="table" rid="table3">Table 3</xref>). This indicates that measurements were very consistent between the 2 assessment tools.</p><table-wrap id="t3" position="float"><label>Table 3.</label><caption><p>Intraclass correlation between the Track-UL algorithm and the Kinovea software (laboratory setting).</p></caption><table id="table3" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Side and task number</td><td align="left" valign="bottom">Joint</td><td align="left" valign="bottom">Intraclass correlation coefficient (95% CI)</td><td align="left" valign="bottom"><italic>F</italic> test (<italic>df</italic>)</td><td align="left" valign="bottom"><italic>P</italic> value</td></tr></thead><tbody><tr><td align="left" valign="top" colspan="5">Unaffected side</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>1</td><td align="left" valign="top">Shoulder</td><td align="left" valign="top">0.99 (0.99-0.99)</td><td align="left" valign="top">714.557 (80, 80)</td><td align="left" valign="top">&#x003C;.001</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>2</td><td align="left" valign="top">Shoulder</td><td align="left" valign="top">0.98 (0.98-0.99)</td><td align="left" valign="top">156.212 (80, 80)</td><td align="left" valign="top">&#x003C;.001</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>2</td><td align="left" valign="top">Elbow</td><td align="left" valign="top">0.98 (0.98-0.99)</td><td align="left" valign="top">156.212 (80, 80)</td><td align="left" valign="top">&#x003C;.001</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>3</td><td align="left" valign="top">Shoulder</td><td align="left" valign="top">0.99 (0.99-0.99)</td><td align="left" valign="top">879.210 (80, 80)</td><td align="left" valign="top">&#x003C;.001</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>3</td><td align="left" valign="top">Elbow</td><td align="left" valign="top">0.99 (0.99-0.99)</td><td align="left" valign="top">1027.538 (80, 80)</td><td align="left" valign="top">&#x003C;.001</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>4</td><td align="left" valign="top">Shoulder</td><td align="left" valign="top">0.99 (0.99-0.99)</td><td align="left" valign="top">473.902 (80, 80)</td><td align="left" valign="top">&#x003C;.001</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>4</td><td align="left" valign="top">Elbow</td><td align="left" valign="top">0.99 (0.99-0.99)</td><td align="left" valign="top">481.433 (80, 80)</td><td align="left" valign="top">&#x003C;.001</td></tr><tr><td align="left" valign="top" colspan="5">Affected side</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>1</td><td align="left" valign="top">Shoulder</td><td align="left" valign="top">0.99 (0.99-0.99)</td><td align="left" valign="top">455.606 (83, 83)</td><td align="left" valign="top">&#x003C;.001</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>2</td><td align="left" valign="top">Shoulder</td><td align="left" valign="top">0.99 (0.99-0.99)</td><td align="left" valign="top">677.202 (83, 83)</td><td align="left" valign="top">&#x003C;.001</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>2</td><td align="left" valign="top">Elbow</td><td align="left" valign="top">0.98 (0.98-0.99)</td><td align="left" valign="top">164.610 (83, 83)</td><td align="left" valign="top">&#x003C;.001</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>3</td><td align="left" valign="top">Shoulder</td><td align="left" valign="top">0.99 (0.99-0.99)</td><td align="left" valign="top">1001.732 (83, 83)</td><td align="left" valign="top">&#x003C;.001</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>3</td><td align="left" valign="top">Elbow</td><td align="left" valign="top">0.99 (0.98-0.99)</td><td align="left" valign="top">216.907 (83, 83)</td><td align="left" valign="top">&#x003C;.001</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>4</td><td align="left" valign="top">Shoulder</td><td align="left" valign="top">0.99 (0.98-0.99)</td><td align="left" valign="top">260.500 (83, 83)</td><td align="left" valign="top">&#x003C;.001</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>4</td><td align="left" valign="top">Elbow</td><td align="left" valign="top">0.99 (0.99-0.99)</td><td align="left" valign="top">959.581 (83, 83)</td><td align="left" valign="top">&#x003C;.001</td></tr></tbody></table></table-wrap></sec><sec id="s3-4"><title>Track-UL Algorithm Test-Retest Reliability for Laboratory Measurement</title><p>The Track-UL algorithm reliability results for both the shoulder and elbow joints on both sides are shown in <xref ref-type="table" rid="table4">Table 4</xref>. The ICC showed that the Track-UL algorithm had excellent reliability (all ICC values&#x003E;0.81).</p><table-wrap id="t4" position="float"><label>Table 4.</label><caption><p>Track-UL algorithm test-retest reliability for the laboratory measurements (N=27).</p></caption><table id="table4" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Tested side and task number</td><td align="left" valign="bottom">Joint</td><td align="left" valign="bottom">ICC<sup><xref ref-type="table-fn" rid="table4fn1">a</xref></sup></td><td align="left" valign="bottom">SEM<sup><xref ref-type="table-fn" rid="table4fn2">b</xref></sup></td><td align="left" valign="bottom">MDC<sup><xref ref-type="table-fn" rid="table4fn3">c</xref></sup></td></tr></thead><tbody><tr><td align="left" valign="top" colspan="5">Affected side</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>2</td><td align="left" valign="top">Elbow</td><td align="left" valign="top">0.95</td><td align="left" valign="top">5.32</td><td align="left" valign="top">14.77</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>3</td><td align="left" valign="top">Elbow</td><td align="left" valign="top">0.91</td><td align="left" valign="top">11.78</td><td align="left" valign="top">32.65</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>4</td><td align="left" valign="top">Elbow</td><td align="left" valign="top">0.95</td><td align="left" valign="top">7.41</td><td align="left" valign="top">20.55</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>1</td><td align="left" valign="top">Shoulder</td><td align="left" valign="top">0.97</td><td align="left" valign="top">4.11</td><td align="left" valign="top">11.41</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>2</td><td align="left" valign="top">Shoulder</td><td align="left" valign="top">0.98</td><td align="left" valign="top">4.75</td><td align="left" valign="top">13.19</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>3</td><td align="left" valign="top">Shoulder</td><td align="left" valign="top">0.93</td><td align="left" valign="top">6.56</td><td align="left" valign="top">18.19</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>4</td><td align="left" valign="top">Shoulder</td><td align="left" valign="top">0.88</td><td align="left" valign="top">4.20</td><td align="left" valign="top">11.65</td></tr><tr><td align="left" valign="top" colspan="5">Unaffected side</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>2</td><td align="left" valign="top">Elbow</td><td align="left" valign="top">0.84</td><td align="left" valign="top">3.83</td><td align="left" valign="top">10.62</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>3</td><td align="left" valign="top">Elbow</td><td align="left" valign="top">0.91</td><td align="left" valign="top">5.82</td><td align="left" valign="top">16.15</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>4</td><td align="left" valign="top">Elbow</td><td align="left" valign="top">0.93</td><td align="left" valign="top">2.90</td><td align="left" valign="top">8.04</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>1</td><td align="left" valign="top">Shoulder</td><td align="left" valign="top">0.92</td><td align="left" valign="top">5.19</td><td align="left" valign="top">14.39</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>2</td><td align="left" valign="top">Shoulder</td><td align="left" valign="top">0.89</td><td align="left" valign="top">7.20</td><td align="left" valign="top">19.96</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>3</td><td align="left" valign="top">Shoulder</td><td align="left" valign="top">0.87</td><td align="left" valign="top">5.43</td><td align="left" valign="top">15.07</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>4</td><td align="left" valign="top">Shoulder</td><td align="left" valign="top">0.97</td><td align="left" valign="top">1.87</td><td align="left" valign="top">5.20</td></tr></tbody></table><table-wrap-foot><fn id="table4fn1"><p><sup>a</sup>ICC: intraclass correlation coefficient.</p></fn><fn id="table4fn2"><p><sup>b</sup>SEM: SE of measurement.</p></fn><fn id="table4fn3"><p><sup>c</sup>MDC: minimum detectable change.</p></fn></table-wrap-foot></table-wrap></sec><sec id="s3-5"><title>Concurrent Validity for Home Measurement</title><p>Only 48.1% (13/27) of the participants were able to record videos at home that were fit for analysis. For the acceptable videos, the Bland-Altman plots (<xref ref-type="fig" rid="figure6">Figures 6</xref> and <xref ref-type="fig" rid="figure7">7</xref>) indicated that the mean difference in the shoulder and elbow joint angles between the Track-UL algorithm and Kinovea software was close to 0; the mean values ranged from &#x2212;1.29 to 0.39 degrees, indicating no systematic bias. The 95% LoA ranged from &#x2212;6.21 to 3.62 degrees for shoulder joint measurements and from &#x2212;4.06 to 2.53 degrees for elbow joint measurements. This difference is not considered clinically significant [<xref ref-type="bibr" rid="ref30">30</xref>] (<xref ref-type="table" rid="table5">Table 5</xref>).</p><fig position="float" id="figure6"><label>Figure 6.</label><caption><p>Bland-Altman plots for comparing the Track-UL algorithm and Kinovea software for the shoulder joint in the affected side in the home setting.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="rehab_v13i1e87128_fig06.png"/></fig><fig position="float" id="figure7"><label>Figure 7.</label><caption><p>Bland-Altman plots for comparing the Track-UL algorithm and Kinovea software for the elbow joint in the affected side in the home setting.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="rehab_v13i1e87128_fig07.png"/></fig><p>There was no clear evidence of a trend or heteroscedasticity on the plots except for the shoulder joint during shoulder elevation (task 2), which displayed a proportional bias whereby the positive difference increased at higher angles of elevation and the negative difference increased at lower angles. However, the positive difference of up to 3.45 degrees and the negative difference of up to &#x2212;2.66 degrees had no substantial clinical influence on measuring shoulder functional active ROM [<xref ref-type="bibr" rid="ref30">30</xref>] (<xref ref-type="table" rid="table5">Table 5</xref>).</p><table-wrap id="t5" position="float"><label>Table 5.</label><caption><p>The limit of agreement (LoA) between the Track-UL algorithm and the Kinovea software from videos recorded at home.</p></caption><table id="table5" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Tested side and task number</td><td align="left" valign="bottom" colspan="4">Elbow joint</td><td align="left" valign="bottom" colspan="4">Shoulder joint</td></tr><tr><td align="left" valign="bottom"/><td align="left" valign="bottom">Observations, n</td><td align="left" valign="bottom">Mean (SD; 95% CI)</td><td align="left" valign="bottom">Lower LoA</td><td align="left" valign="bottom">Upper LoA</td><td align="left" valign="bottom">Observations, n</td><td align="left" valign="bottom">Mean (SD; 95% CI)</td><td align="left" valign="bottom">Lower LoA</td><td align="left" valign="bottom">Upper LoA</td></tr></thead><tbody><tr><td align="left" valign="top" colspan="9">Affected side</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>1</td><td align="left" valign="top">&#x2014;<sup><xref ref-type="table-fn" rid="table5fn1">a</xref></sup></td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">39</td><td align="left" valign="top">&#x2212;1.29 (2.51; &#x2013;2.11 to &#x2013;0.47)</td><td align="left" valign="top">&#x2212;6.21</td><td align="left" valign="top">3.62</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>2</td><td align="left" valign="top">39</td><td align="left" valign="top">&#x2212;1.25 (1.39; &#x2013;1.71 to &#x2013;0.81)</td><td align="left" valign="top">&#x2212;3.99</td><td align="left" valign="top">1.47</td><td align="left" valign="top">39</td><td align="left" valign="top">0.39 (1.56; &#x2013;0.11 to 0.89)</td><td align="left" valign="top">&#x2212;2.66</td><td align="left" valign="top">3.45</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>3</td><td align="left" valign="top">39</td><td align="left" valign="top">&#x2212;.63 (1.61; &#x2013;1.51 to &#x2013;0.11)</td><td align="left" valign="top">&#x2212;3.79</td><td align="left" valign="top">2.53</td><td align="left" valign="top">39</td><td align="left" valign="top">&#x2212;0.22 (1.69; &#x2013;0.77 to 0.32)</td><td align="left" valign="top">&#x2212;3.54</td><td align="left" valign="top">3.09</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>4</td><td align="left" valign="top">39</td><td align="left" valign="top">&#x2212;1.01 (1.56; &#x2013;1.51 to &#x2013;0.49)</td><td align="left" valign="top">&#x2212;4.06</td><td align="left" valign="top">2.05</td><td align="left" valign="top">39</td><td align="left" valign="top">&#x2212;0.27 (1.36; &#x2013;0.71 to 0.17)</td><td align="left" valign="top">&#x2212;2.95</td><td align="left" valign="top">2.41</td></tr></tbody></table><table-wrap-foot><fn id="table5fn1"><p><sup>a</sup>Not applicable.</p></fn></table-wrap-foot></table-wrap></sec><sec id="s3-6"><title>Absolute Agreement Analysis for Home Measurement</title><p>The absolute agreement between the 2 measurement tools (Track-UL algorithm and Kinovea software) for videos recorded at home was assessed using the ICC. The results demonstrated excellent reliability across all evaluated tasks and joints. ICC values ranged from 0.97 to 0.99, indicating a high degree of consistency between the 2 assessment tools (<xref ref-type="table" rid="table6">Table 6</xref>).</p><table-wrap id="t6" position="float"><label>Table 6.</label><caption><p>Intraclass correlation between the Track-UL algorithm and the Kinovea software (home setting).</p></caption><table id="table6" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Side and task number</td><td align="left" valign="bottom">Joint</td><td align="left" valign="bottom">Intraclass correlation coefficient (95% CI)</td><td align="left" valign="bottom"><italic>F</italic> test (<italic>df</italic>)</td><td align="left" valign="bottom"><italic>P</italic> value</td></tr></thead><tbody><tr><td align="left" valign="top" colspan="5">Affected side</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>1</td><td align="left" valign="top">Shoulder</td><td align="left" valign="top">0.99 (0.97-0.99)</td><td align="left" valign="top">284.85 (38, 38)</td><td align="left" valign="top">&#x003C;.001</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>2</td><td align="left" valign="top">Shoulder</td><td align="left" valign="top">0.99 (0.99-0.99)</td><td align="left" valign="top">2091.99 (38, 38)</td><td align="left" valign="top">&#x003C;.001</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>2</td><td align="left" valign="top">Elbow</td><td align="left" valign="top">0.99 (0.98-0.99)</td><td align="left" valign="top">1772.51 (38, 38)</td><td align="left" valign="top">&#x003C;.001</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>3</td><td align="left" valign="top">Shoulder</td><td align="left" valign="top">0.97 (0.95-0.98)</td><td align="left" valign="top">89.19 (38, 38)</td><td align="left" valign="top">&#x003C;.001</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>3</td><td align="left" valign="top">Elbow</td><td align="left" valign="top">0.99 (0.98-0.99)</td><td align="left" valign="top">260.63 (38, 38)</td><td align="left" valign="top">&#x003C;.001</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>4</td><td align="left" valign="top">Shoulder</td><td align="left" valign="top">0.99 (0.99-0.99)</td><td align="left" valign="top">467.71 (38, 38)</td><td align="left" valign="top">&#x003C;.001</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>4</td><td align="left" valign="top">Elbow</td><td align="left" valign="top">0.99 (0.99-1.00)</td><td align="left" valign="top">2508.41 (38, 38)</td><td align="left" valign="top">&#x003C;.001</td></tr></tbody></table></table-wrap></sec><sec id="s3-7"><title>Track-UL Algorithm Test-Retest Reliability for Home Measurement</title><p>To assess the test-retest reliability of the Track-UL algorithm from the videos recorded at home, we collected 3 independent measures for each task. The ICC showed that the Track-UL algorithm had good to excellent reliability results for both the shoulder and elbow joints on the affected side (all ICC values&#x2265;0.778; <xref ref-type="table" rid="table7">Table 7</xref>).</p><table-wrap id="t7" position="float"><label>Table 7.</label><caption><p>Track-UL algorithm test-retest reliability for the home measurement (n=13).</p></caption><table id="table7" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Tested side and task number</td><td align="left" valign="bottom">Joint</td><td align="left" valign="bottom">ICC<sup><xref ref-type="table-fn" rid="table7fn1">a</xref></sup></td><td align="left" valign="bottom">SEM<sup><xref ref-type="table-fn" rid="table7fn2">b</xref></sup></td><td align="left" valign="bottom">MDC<sup><xref ref-type="table-fn" rid="table7fn3">c</xref></sup></td></tr></thead><tbody><tr><td align="left" valign="top" colspan="5">Affected side</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>2</td><td align="left" valign="top">Elbow</td><td align="left" valign="top">0.77</td><td align="left" valign="top">13.07</td><td align="left" valign="top">36.24</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>3</td><td align="left" valign="top">Elbow</td><td align="left" valign="top">0.98</td><td align="left" valign="top">5.94</td><td align="left" valign="top">16.48</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>4</td><td align="left" valign="top">Elbow</td><td align="left" valign="top">0.94</td><td align="left" valign="top">9.12</td><td align="left" valign="top">25.28</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>1</td><td align="left" valign="top">Shoulder</td><td align="left" valign="top">0.97</td><td align="left" valign="top">3.23</td><td align="left" valign="top">8.95</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>2</td><td align="left" valign="top">Shoulder</td><td align="left" valign="top">0.99</td><td align="left" valign="top">2.29</td><td align="left" valign="top">6.35</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>3</td><td align="left" valign="top">Shoulder</td><td align="left" valign="top">0.96</td><td align="left" valign="top">4.52</td><td align="left" valign="top">12.54</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>4</td><td align="left" valign="top">Shoulder</td><td align="left" valign="top">0.91</td><td align="left" valign="top">4.30</td><td align="left" valign="top">11.92</td></tr></tbody></table><table-wrap-foot><fn id="table7fn1"><p><sup>a</sup>ICC: intraclass correlation coefficient.</p></fn><fn id="table7fn2"><p><sup>b</sup>SEM: SE of measurement.</p></fn><fn id="table7fn3"><p><sup>c</sup>MDC: minimum detectable change.</p></fn></table-wrap-foot></table-wrap></sec></sec><sec id="s4" sec-type="discussion"><title>Discussion</title><sec id="s4-1"><title>Principal Findings</title><p>The main aims of this study were to measure the concurrent validity, absolute agreement, and test-retest reliability of the newly developed Track-UL algorithm. This algorithm uses a 2D markerless motion capture system based on MediaPipe, a pose estimation framework, to measure shoulder and elbow functional active ROM from recorded videos in people with stroke. While most algorithms that use MediaPipe pose estimation models have been tested and implemented in healthy populations for a variety of applications [<xref ref-type="bibr" rid="ref31">31</xref>-<xref ref-type="bibr" rid="ref33">33</xref>], such as assessment for spinal diseases and frozen shoulder [<xref ref-type="bibr" rid="ref34">34</xref>], among people with tremors [<xref ref-type="bibr" rid="ref35">35</xref>], and for telerehabilitation purposes [<xref ref-type="bibr" rid="ref36">36</xref>,<xref ref-type="bibr" rid="ref37">37</xref>], they have not yet been validated in individuals with stroke using complex functional tasks. The use of computer vision&#x2013;based assessment of functional movement in people with stroke has the potential to improve the accessibility and scalability of movement assessment by providing accurate, low-cost, and markerless measurement outside of specialist laboratory settings. Therefore, we assessed shoulder and elbow functional active ROM during 4 complex functional tasks in both laboratory and home settings to explore the efficacy and effectiveness of this approach for individuals with stroke. We also present the possibilities of using this technique for telemonitoring in stroke rehabilitation.</p><p>For the videos recorded by the researcher in the laboratory, we observed a positive agreement between the Track-UL algorithm and the Kinovea software in the measurement of upper-limb function in survivors of stroke in 4 different tasks. The LoA was below the minimum clinically significant difference, which supports the use of the Track-UL algorithm in clinical practice to evaluate and monitor the progression of upper-limb kinematics. The LoA was slightly higher in certain movements on the affected side relative to the unaffected side, potentially due to the compensatory shoulder joint movements (internal rotation during arm elevation) and elbow joint movements (flexion and extension), which influenced the process of analysis.</p><p>We observed a significant degree of absolute agreement between the Track-UL algorithm and the Kinovea software in videos captured by both the physiotherapist and the participants. This finding supports the interchangeability of tools as the Track-UL algorithm performs as an intuitive and convenient tool for assessing arm mobility in both clinical and remote settings. We support the integration of this tool into the clinical assessment of upper-limb kinematics in individuals with stroke. This tool offers significant advantages, particularly for those who experience difficulties commuting to clinics or who reside in rural areas, by enabling remote follow-up and monitoring of upper-limb recovery. Furthermore, it provides more convenient solutions for carers, reducing their time and financial burdens as a result of traveling to clinics with their relatives with stroke. We can integrate this tool with any telerehabilitation model to provide a telerehabilitation and telemonitoring tool for people to use at home.</p><p>The test-retest reliability of the Track-UL algorithm demonstrated excellent consistency and robustness in repeated clinical assessments both in laboratory and home environments. The findings indicate that the Track-UL algorithm can deliver highly reliable kinematic measurements for recorded videos of people with stroke, rendering it appropriate for both in-clinic and remote assessments in stroke rehabilitation. This can be clinically beneficial, particularly for individuals reporting fatigue, allowing us to request that they perform the task only once rather than multiple times.</p><p>We calculated the minimum detectable change of the Track-UL assessment tool and determined that it was slightly elevated for the elbow joint compared to the shoulder joint, more pronounced on the affected side. This is potentially attributable to personal variability and the complexity of the tasks, which emphasize functional movements rather than simple joint movements. Additionally, the use of a 2D markerless motion capture system limits measurements to a single plane, making the results more sensitive to compensatory movements such as whole-arm rotation. Therefore, if this algorithm is to be used for monitoring upper-limb kinematics over time as a telemonitoring tool, the data should be interpreted with caution in clinical practice.</p><p>Our findings align with those of Latreche et al [<xref ref-type="bibr" rid="ref31">31</xref>], who reported good agreement and reliability of an algorithm using the MediaPipe pose estimation framework in comparison to a goniometer; however, their study focused on simple shoulder movements. In our study, we explored complex functional arm movements involving the shoulder and elbow joints in people with stroke, which were close to what we usually evaluate in clinical practice. Another study measured upper-limb kinematics using sensorized gloves equipped with electromagnetic sensors capable of assessing 6 df during functional reach and grasp tasks [<xref ref-type="bibr" rid="ref38">38</xref>]; however, the Track-UL algorithm&#x2013;based measurement tool has the potential to be more user-friendly, requiring no additional equipment, which increases its suitability for clinical use.</p><p>Many participants were either unable to accurately record videos at home (4/27, 14.8%), potentially because restricted home environments, such as small houses, made it difficult to achieve the required camera angle, or they were unable to record the videos at all (10/27, 37%) due to technical difficulties in following the video recording instructions and then sending the videos back to the research team, particularly among older survivors. Consequently, we could not analyze the videos to determine whether the setup was incorrect, whether the entire movement was captured within the camera&#x2019;s frame, or whether the camera was positioned arbitrarily, all of which could affect pose estimation analysis. All of these factors affected the ability to record the videos at home and the ability of the algorithm to analyze the joint angles from the videos recorded independently by the participants or their carers at home. We recommend integrating this algorithm into an application that can offer a step-by-step guide for video recording alongside real-time feedback on the video quality, such as the camera angle, the distance from the camera, and frame coverage, to enhance the feasibility of using this algorithm as a telemonitoring tool for people with stroke.</p><p>For the current application, which aimed to validate 4 functional active ROM tasks, 2D analysis is appropriate. However, there are known limitations, such as lack of depth information and issues with out-of-plane movement detection. These could potentially be addressed using 3D with 2D recorded videos to yield further depth of information that could be useful in other contexts. We also recommend incorporating more functional tasks into the assessment and cross-validating them with functional questionnaires (FMA-UE). In addition, time-series analysis could yield extra layers of useful clinical information, including qualitative considerations such as movement smoothness, which could add further insight in future studies.</p></sec><sec id="s4-2"><title>Conclusions</title><p>The Track-UL algorithm can be adopted as a cost-effective, fast, and user-friendly alternative to traditional motion analysis software such as Kinovea. This allows for its use in both clinical and potentially in remote settings. This technology has the potential to benefit people with stroke through monitoring and motor performance assessments. We recommend developing an application with an easy-to-use interface that enables survivors of stroke to securely record and analyze their arm movement both in clinics and at home. Automated reports can provide physiotherapists with objective, measurable kinematic data to help design and adjust personalized rehabilitation programs.</p></sec></sec></body><back><ack><p>This study was supported by the Egyptian Ministry of Higher Education and Scientific Research as part of a funded PhD project. SEL is supported by the Dennis and Mireille Gillings Foundation. The research was carried out at the National Institute for Health and Care Research Exeter Biomedical Research Centre.</p></ack><notes><sec><title>Funding</title><p>This study was funded by the Egyptian government and supported by the National Institute for Health and Care Research Biomedical Research Centre at the University of Exeter. HL has received funding for his PhD from the Egyptian Cultural and Educational Bureau (ID: MM48/21). HL, TCR, PL, HD, MM, and SEL are supported by the National Institute for Health and Care Research Exeter Biomedical Research Centre. HD is supported by National Institute for Health and Care Research HealthTech Research Centre.</p></sec><sec><title>Data Availability</title><p>The datasets generated or analyzed during this study are available from the corresponding author on reasonable request.</p></sec></notes><fn-group><fn fn-type="con"><p>HL, DH, AH, SEL, MM, and HD conceptualized the methods and designed the study. HL collected the data from the participants. HL, PL, XX, PN, and TCR analyzed and interpreted the data. HL and HD wrote the initial manuscript, and all authors reviewed, edited, and approved the final manuscript.</p></fn><fn fn-type="conflict"><p>MM is a Co-Founder and Director of DigiTherapix. HD is codirector of DigiTherapix. DigiTherapix is not related to the subject matter of the manuscript, and no company resources or funds were used in this work. All other authors declare no other conflicts of interest.</p></fn></fn-group><glossary><title>Abbreviations</title><def-list><def-item><term id="abb1">FMA-UE</term><def><p>Fugl-Meyer Assessment for the upper extremity</p></def></def-item><def-item><term id="abb2">ICC</term><def><p>intraclass correlation coefficient</p></def></def-item><def-item><term id="abb3">LoA</term><def><p>limit of agreement</p></def></def-item><def-item><term id="abb4">ROM</term><def><p>range of motion</p></def></def-item></def-list></glossary><ref-list><title>References</title><ref id="ref1"><label>1</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Donkor</surname><given-names>ES</given-names> </name></person-group><article-title>Stroke in the 21st century: a snapshot of the burden, epidemiology, and quality of life</article-title><source>Stroke Res Treat</source><year>2018</year><volume>2018</volume><fpage>3238165</fpage><pub-id pub-id-type="doi">10.1155/2018/3238165</pub-id><pub-id pub-id-type="medline">30598741</pub-id></nlm-citation></ref><ref id="ref2"><label>2</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Raghavan</surname><given-names>P</given-names> </name></person-group><article-title>Upper limb motor impairment after stroke</article-title><source>Phys Med Rehabil Clin N Am</source><year>2015</year><month>11</month><volume>26</volume><issue>4</issue><fpage>599</fpage><lpage>610</lpage><pub-id pub-id-type="doi">10.1016/j.pmr.2015.06.008</pub-id><pub-id pub-id-type="medline">26522900</pub-id></nlm-citation></ref><ref id="ref3"><label>3</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Colebatch</surname><given-names>JG</given-names> </name><name name-style="western"><surname>Gandevia</surname><given-names>SC</given-names> </name></person-group><article-title>The distribution of muscular weakness in upper motor neuron lesions affecting the arm</article-title><source>Brain</source><year>1989</year><month>06</month><volume>112</volume><issue>Pt 3</issue><fpage>749</fpage><lpage>763</lpage><pub-id pub-id-type="doi">10.1093/brain/112.3.749</pub-id><pub-id pub-id-type="medline">2731028</pub-id></nlm-citation></ref><ref id="ref4"><label>4</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>O&#x2019;Dwyer</surname><given-names>NJ</given-names> </name><name name-style="western"><surname>Ada</surname><given-names>L</given-names> </name><name name-style="western"><surname>Neilson</surname><given-names>PD</given-names> </name></person-group><article-title>Spasticity and muscle contracture following stroke</article-title><source>Brain</source><year>1996</year><volume>119</volume><issue>Pt 5</issue><fpage>1737</fpage><lpage>1749</lpage><pub-id pub-id-type="doi">10.1093/brain/119.5.1737</pub-id><pub-id pub-id-type="medline">8931594</pub-id></nlm-citation></ref><ref id="ref5"><label>5</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Levin</surname><given-names>MF</given-names> </name><name name-style="western"><surname>Selles</surname><given-names>RW</given-names> </name><name name-style="western"><surname>Verheul</surname><given-names>MH</given-names> </name><name name-style="western"><surname>Meijer</surname><given-names>OG</given-names> </name></person-group><article-title>Deficits in the coordination of agonist and antagonist muscles in stroke patients: implications for normal motor control</article-title><source>Brain Res</source><year>2000</year><month>01</month><day>24</day><volume>853</volume><issue>2</issue><fpage>352</fpage><lpage>369</lpage><pub-id pub-id-type="doi">10.1016/s0006-8993(99)02298-2</pub-id><pub-id pub-id-type="medline">10640634</pub-id></nlm-citation></ref><ref id="ref6"><label>6</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Tang</surname><given-names>E</given-names> </name><name name-style="western"><surname>Moran</surname><given-names>N</given-names> </name><name name-style="western"><surname>Cadman</surname><given-names>M</given-names> </name><etal/></person-group><article-title>Stroke rehabilitation in adults: summary of updated NICE guidance</article-title><source>BMJ</source><year>2024</year><month>03</month><day>22</day><volume>384</volume><fpage>q498</fpage><pub-id pub-id-type="doi">10.1136/bmj.q498</pub-id><pub-id pub-id-type="medline">38519084</pub-id></nlm-citation></ref><ref id="ref7"><label>7</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>de Jong</surname><given-names>LD</given-names> </name><name name-style="western"><surname>Dijkstra</surname><given-names>PU</given-names> </name><name name-style="western"><surname>Stewart</surname><given-names>RE</given-names> </name><name name-style="western"><surname>Postema</surname><given-names>K</given-names> </name></person-group><article-title>Repeated measurements of arm joint passive range of motion after stroke: interobserver reliability and sources of variation</article-title><source>Phys Ther</source><year>2012</year><month>08</month><volume>92</volume><issue>8</issue><fpage>1027</fpage><lpage>1035</lpage><pub-id pub-id-type="doi">10.2522/ptj.20110280</pub-id><pub-id pub-id-type="medline">22577062</pub-id></nlm-citation></ref><ref id="ref8"><label>8</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Gladstone</surname><given-names>DJ</given-names> </name><name name-style="western"><surname>Danells</surname><given-names>CJ</given-names> </name><name name-style="western"><surname>Black</surname><given-names>SE</given-names> </name></person-group><article-title>The Fugl-Meyer assessment of motor recovery after stroke: a critical review of its measurement properties</article-title><source>Neurorehabil Neural Repair</source><year>2002</year><month>09</month><volume>16</volume><issue>3</issue><fpage>232</fpage><lpage>240</lpage><pub-id pub-id-type="doi">10.1177/154596802401105171</pub-id><pub-id pub-id-type="medline">12234086</pub-id></nlm-citation></ref><ref id="ref9"><label>9</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Hanks</surname><given-names>J</given-names> </name><name name-style="western"><surname>Myers</surname><given-names>B</given-names> </name></person-group><article-title>Validity, reliability, and efficiency of a standard goniometer, medical inclinometer, and builder&#x2019;s inclinometer</article-title><source>Int J Sports Phys Ther</source><year>2023</year><volume>18</volume><issue>4</issue><fpage>989</fpage><lpage>996</lpage><pub-id pub-id-type="doi">10.26603/001c.83944</pub-id><pub-id pub-id-type="medline">37547826</pub-id></nlm-citation></ref><ref id="ref10"><label>10</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Lea</surname><given-names>RD</given-names> </name><name name-style="western"><surname>Gerhardt</surname><given-names>JJ</given-names> </name></person-group><article-title>Range-of-motion measurements</article-title><source>J Bone Joint Surg Am</source><year>1995</year><month>05</month><volume>77</volume><issue>5</issue><fpage>784</fpage><lpage>798</lpage><pub-id pub-id-type="doi">10.2106/00004623-199505000-00017</pub-id><pub-id pub-id-type="medline">7744906</pub-id></nlm-citation></ref><ref id="ref11"><label>11</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Akizuki</surname><given-names>K</given-names> </name><name name-style="western"><surname>Yamaguchi</surname><given-names>K</given-names> </name><name name-style="western"><surname>Morita</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Ohashi</surname><given-names>Y</given-names> </name></person-group><article-title>The effect of proficiency level on measurement error of range of motion</article-title><source>J Phys Ther Sci</source><year>2016</year><month>09</month><volume>28</volume><issue>9</issue><fpage>2644</fpage><lpage>2651</lpage><pub-id pub-id-type="doi">10.1589/jpts.28.2644</pub-id><pub-id pub-id-type="medline">27799712</pub-id></nlm-citation></ref><ref id="ref12"><label>12</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Cirstea</surname><given-names>MC</given-names> </name><name name-style="western"><surname>Levin</surname><given-names>MF</given-names> </name></person-group><article-title>Compensatory strategies for reaching in stroke</article-title><source>Brain</source><year>2000</year><month>05</month><volume>123</volume><issue>Pt 5</issue><fpage>940</fpage><lpage>953</lpage><pub-id pub-id-type="doi">10.1093/brain/123.5.940</pub-id><pub-id pub-id-type="medline">10775539</pub-id></nlm-citation></ref><ref id="ref13"><label>13</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Elrahim</surname><given-names>RMA</given-names> </name><name name-style="western"><surname>Embaby</surname><given-names>EA</given-names> </name><name name-style="western"><surname>Ali</surname><given-names>MF</given-names> </name><name name-style="western"><surname>Kamel</surname><given-names>RM</given-names> </name></person-group><article-title>Inter-rater and intra-rater reliability of Kinovea software for measurement of shoulder range of motion</article-title><source>Bull Fac Phys Ther</source><year>2016</year><month>12</month><volume>21</volume><issue>2</issue><fpage>80</fpage><lpage>87</lpage><pub-id pub-id-type="doi">10.4103/1110-6611.196778</pub-id></nlm-citation></ref><ref id="ref14"><label>14</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Hellsten</surname><given-names>T</given-names> </name><name name-style="western"><surname>Karlsson</surname><given-names>J</given-names> </name><name name-style="western"><surname>Shamsuzzaman</surname><given-names>M</given-names> </name><name name-style="western"><surname>Pulkkis</surname><given-names>G</given-names> </name></person-group><article-title>The potential of computer vision-based marker-less human motion analysis for rehabilitation</article-title><source>Rehabil Process Outcome</source><year>2021</year><volume>10</volume><fpage>11795727211022330</fpage><pub-id pub-id-type="doi">10.1177/11795727211022330</pub-id><pub-id pub-id-type="medline">34987303</pub-id></nlm-citation></ref><ref id="ref15"><label>15</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Hannink</surname><given-names>E</given-names> </name><name name-style="western"><surname>Mansoubi</surname><given-names>M</given-names> </name><name name-style="western"><surname>Cronin</surname><given-names>N</given-names> </name><etal/></person-group><article-title>Validity and feasibility of remote measurement systems for functional movement and posture assessments in people with axial spondylarthritis</article-title><source>Healthc Technol Lett</source><year>2022</year><month>12</month><volume>9</volume><issue>6</issue><fpage>110</fpage><lpage>118</lpage><pub-id pub-id-type="doi">10.1049/htl2.12038</pub-id><pub-id pub-id-type="medline">36514477</pub-id></nlm-citation></ref><ref id="ref16"><label>16</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Mansoubi</surname><given-names>M</given-names> </name><name name-style="western"><surname>Leveridge</surname><given-names>P</given-names> </name><name name-style="western"><surname>Smith</surname><given-names>M</given-names> </name><etal/></person-group><article-title>Knee4Life: empowering knee recovery after total knee replacement through digital health protocol</article-title><source>Sensors (Basel)</source><year>2024</year><month>11</month><day>17</day><volume>24</volume><issue>22</issue><fpage>7334</fpage><pub-id pub-id-type="doi">10.3390/s24227334</pub-id><pub-id pub-id-type="medline">39599111</pub-id></nlm-citation></ref><ref id="ref17"><label>17</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Wade</surname><given-names>L</given-names> </name><name name-style="western"><surname>Needham</surname><given-names>L</given-names> </name><name name-style="western"><surname>McGuigan</surname><given-names>P</given-names> </name><name name-style="western"><surname>Bilzon</surname><given-names>J</given-names> </name></person-group><article-title>Applications and limitations of current markerless motion capture methods for clinical gait biomechanics</article-title><source>PeerJ</source><year>2022</year><volume>10</volume><fpage>e12995</fpage><pub-id pub-id-type="doi">10.7717/peerj.12995</pub-id><pub-id pub-id-type="medline">35237469</pub-id></nlm-citation></ref><ref id="ref18"><label>18</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Aprile</surname><given-names>I</given-names> </name><name name-style="western"><surname>Rabuffetti</surname><given-names>M</given-names> </name><name name-style="western"><surname>Padua</surname><given-names>L</given-names> </name><name name-style="western"><surname>Di Sipio</surname><given-names>E</given-names> </name><name name-style="western"><surname>Simbolotti</surname><given-names>C</given-names> </name><name name-style="western"><surname>Ferrarin</surname><given-names>M</given-names> </name></person-group><article-title>Kinematic analysis of the upper limb motor strategies in stroke patients as a tool towards advanced neurorehabilitation strategies: a preliminary study</article-title><source>Biomed Res Int</source><year>2014</year><volume>2014</volume><fpage>636123</fpage><pub-id pub-id-type="doi">10.1155/2014/636123</pub-id><pub-id pub-id-type="medline">24868536</pub-id></nlm-citation></ref><ref id="ref19"><label>19</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Puig-Div&#x00ED;</surname><given-names>A</given-names> </name><name name-style="western"><surname>Escalona-Marfil</surname><given-names>C</given-names> </name><name name-style="western"><surname>Padull&#x00E9;s-Riu</surname><given-names>JM</given-names> </name><name name-style="western"><surname>Busquets</surname><given-names>A</given-names> </name><name name-style="western"><surname>Padull&#x00E9;s-Chando</surname><given-names>X</given-names> </name><name name-style="western"><surname>Marcos-Ruiz</surname><given-names>D</given-names> </name></person-group><article-title>Validity and reliability of the Kinovea program in obtaining angles and distances using coordinates in 4 perspectives</article-title><source>PLoS One</source><year>2019</year><volume>14</volume><issue>6</issue><fpage>e0216448</fpage><pub-id pub-id-type="doi">10.1371/journal.pone.0216448</pub-id><pub-id pub-id-type="medline">31166989</pub-id></nlm-citation></ref><ref id="ref20"><label>20</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Fern&#x00E1;ndez-Gonz&#x00E1;lez</surname><given-names>P</given-names> </name><name name-style="western"><surname>Koutsou</surname><given-names>A</given-names> </name><name name-style="western"><surname>Cuesta-G&#x00F3;mez</surname><given-names>A</given-names> </name><name name-style="western"><surname>Carratal&#x00E1;-Tejada</surname><given-names>M</given-names> </name><name name-style="western"><surname>Miangolarra-Page</surname><given-names>JC</given-names> </name><name name-style="western"><surname>Molina-Rueda</surname><given-names>F</given-names> </name></person-group><article-title>Reliability of Kinovea<sup>&#x00AE;</sup> software and agreement with a three-dimensional motion system for gait analysis in healthy subjects</article-title><source>Sensors (Basel)</source><year>2020</year><month>06</month><day>2</day><volume>20</volume><issue>11</issue><fpage>3154</fpage><pub-id pub-id-type="doi">10.3390/s20113154</pub-id><pub-id pub-id-type="medline">32498380</pub-id></nlm-citation></ref><ref id="ref21"><label>21</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Lugaresi</surname><given-names>C</given-names> </name><name name-style="western"><surname>Tang</surname><given-names>J</given-names> </name><name name-style="western"><surname>Nash</surname><given-names>H</given-names> </name><etal/></person-group><article-title>MediaPipe: a framework for perceiving and processing reality</article-title><access-date>2026-04-21</access-date><conf-name>Third Workshop on Computer Vision for AR/VR at IEEE Computer Vision and Pattern Recognition (CVPR) 2019</conf-name><conf-date>Jun 17, 2019</conf-date><comment><ext-link ext-link-type="uri" xlink:href="https://research.google/pubs/mediapipe-a-framework-for-perceiving-and-processing-reality/">https://research.google/pubs/mediapipe-a-framework-for-perceiving-and-processing-reality/</ext-link></comment></nlm-citation></ref><ref id="ref22"><label>22</label><nlm-citation citation-type="other"><person-group person-group-type="author"><name name-style="western"><surname>Bazarevsky</surname><given-names>V</given-names> </name><name name-style="western"><surname>Grishchenko</surname><given-names>I</given-names> </name><name name-style="western"><surname>Raveendran</surname><given-names>K</given-names> </name><name name-style="western"><surname>Zhu</surname><given-names>T</given-names> </name><name name-style="western"><surname>Zhang</surname><given-names>F</given-names> </name><name name-style="western"><surname>Grundmann</surname><given-names>M</given-names> </name></person-group><article-title>BlazePose: on-device real-time body pose tracking</article-title><source>arXiv</source><comment>Preprint posted online on  Jun 17, 2020</comment><pub-id pub-id-type="doi">10.48550/arXiv.2006.10204</pub-id></nlm-citation></ref><ref id="ref23"><label>23</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Akturk</surname><given-names>S</given-names> </name><name name-style="western"><surname>Derdiyok</surname><given-names>FB</given-names> </name><name name-style="western"><surname>Serbest</surname><given-names>K</given-names> </name></person-group><article-title>Markerless joint angle estimation using MediaPipe with a rapid setup for joint moment calculation</article-title><source>Multimed Tools Appl</source><year>2026</year><volume>85</volume><fpage>38</fpage><pub-id pub-id-type="doi">10.1007/s11042-026-21256-z</pub-id></nlm-citation></ref><ref id="ref24"><label>24</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Cao</surname><given-names>Z</given-names> </name><name name-style="western"><surname>Hidalgo</surname><given-names>G</given-names> </name><name name-style="western"><surname>Simon</surname><given-names>T</given-names> </name><name name-style="western"><surname>Wei</surname><given-names>SE</given-names> </name><name name-style="western"><surname>Sheikh</surname><given-names>Y</given-names> </name></person-group><article-title>OpenPose: realtime multi-person 2D pose estimation using part affinity fields</article-title><source>IEEE Trans Pattern Anal Mach Intell</source><year>2021</year><month>01</month><volume>43</volume><issue>1</issue><fpage>172</fpage><lpage>186</lpage><pub-id pub-id-type="doi">10.1109/TPAMI.2019.2929257</pub-id><pub-id pub-id-type="medline">31331883</pub-id></nlm-citation></ref><ref id="ref25"><label>25</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Uhlrich</surname><given-names>SD</given-names> </name><name name-style="western"><surname>Falisse</surname><given-names>A</given-names> </name><name name-style="western"><surname>Kidzi&#x0144;ski</surname><given-names>&#x0141;</given-names> </name><etal/></person-group><article-title>OpenCap: human movement dynamics from smartphone videos</article-title><source>PLoS Comput Biol</source><year>2023</year><month>10</month><volume>19</volume><issue>10</issue><fpage>e1011462</fpage><pub-id pub-id-type="doi">10.1371/journal.pcbi.1011462</pub-id><pub-id pub-id-type="medline">37856442</pub-id></nlm-citation></ref><ref id="ref26"><label>26</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Martin Bland</surname><given-names>J</given-names> </name><name name-style="western"><surname>Altman</surname><given-names>DG</given-names> </name></person-group><article-title>Statistical methods for assessing agreement between two methods of clinical measurement</article-title><source>Lancet</source><year>1986</year><month>02</month><volume>327</volume><issue>8476</issue><fpage>307</fpage><lpage>310</lpage><pub-id pub-id-type="doi">10.1016/S0140-6736(86)90837-8</pub-id></nlm-citation></ref><ref id="ref27"><label>27</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Stratford</surname><given-names>PW</given-names> </name><name name-style="western"><surname>Binkley</surname><given-names>JM</given-names> </name><name name-style="western"><surname>Riddle</surname><given-names>DL</given-names> </name></person-group><article-title>Health status measures: strategies and analytic methods for assessing change scores</article-title><source>Phys Ther</source><year>1996</year><month>10</month><volume>76</volume><issue>10</issue><fpage>1109</fpage><lpage>1123</lpage><pub-id pub-id-type="doi">10.1093/ptj/76.10.1109</pub-id><pub-id pub-id-type="medline">8863764</pub-id></nlm-citation></ref><ref id="ref28"><label>28</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Stratford</surname><given-names>PW</given-names> </name><name name-style="western"><surname>Binkley</surname><given-names>J</given-names> </name><name name-style="western"><surname>Solomon</surname><given-names>P</given-names> </name><name name-style="western"><surname>Finch</surname><given-names>E</given-names> </name><name name-style="western"><surname>Gill</surname><given-names>C</given-names> </name><name name-style="western"><surname>Moreland</surname><given-names>J</given-names> </name></person-group><article-title>Defining the minimum level of detectable change for the Roland-Morris Questionnaire</article-title><source>Phys Ther</source><year>1996</year><month>04</month><volume>76</volume><issue>4</issue><fpage>359</fpage><lpage>365</lpage><pub-id pub-id-type="doi">10.1093/ptj/76.4.359</pub-id><pub-id pub-id-type="medline">8606899</pub-id></nlm-citation></ref><ref id="ref29"><label>29</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Koo</surname><given-names>TK</given-names> </name><name name-style="western"><surname>Li</surname><given-names>MY</given-names> </name></person-group><article-title>A guideline of selecting and reporting intraclass correlation coefficients for reliability research</article-title><source>J Chiropr Med</source><year>2016</year><month>06</month><volume>15</volume><issue>2</issue><fpage>155</fpage><lpage>163</lpage><pub-id pub-id-type="doi">10.1016/j.jcm.2016.02.012</pub-id><pub-id pub-id-type="medline">27330520</pub-id></nlm-citation></ref><ref id="ref30"><label>30</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Mullaney</surname><given-names>MJ</given-names> </name><name name-style="western"><surname>McHugh</surname><given-names>MP</given-names> </name><name name-style="western"><surname>Johnson</surname><given-names>CP</given-names> </name><name name-style="western"><surname>Tyler</surname><given-names>TF</given-names> </name></person-group><article-title>Reliability of shoulder range of motion comparing a goniometer to a digital level</article-title><source>Physiother Theory Pract</source><year>2010</year><month>07</month><volume>26</volume><issue>5</issue><fpage>327</fpage><lpage>333</lpage><pub-id pub-id-type="doi">10.3109/09593980903094230</pub-id><pub-id pub-id-type="medline">20557263</pub-id></nlm-citation></ref><ref id="ref31"><label>31</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Latreche</surname><given-names>A</given-names> </name><name name-style="western"><surname>Kelaiaia</surname><given-names>R</given-names> </name><name name-style="western"><surname>Chemori</surname><given-names>A</given-names> </name><name name-style="western"><surname>Kerboua</surname><given-names>A</given-names> </name></person-group><article-title>Reliability and validity analysis of MediaPipe-based measurement system for some human rehabilitation motions</article-title><source>Measurement</source><year>2023</year><month>06</month><volume>214</volume><fpage>112826</fpage><pub-id pub-id-type="doi">10.1016/j.measurement.2023.112826</pub-id></nlm-citation></ref><ref id="ref32"><label>32</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Garg</surname><given-names>S</given-names> </name><name name-style="western"><surname>Saxena</surname><given-names>A</given-names> </name><name name-style="western"><surname>Gupta</surname><given-names>R</given-names> </name></person-group><article-title>Yoga pose classification: a CNN and MediaPipe inspired deep learning approach for real-world application</article-title><source>J Ambient Intell Human Comput</source><year>2023</year><month>12</month><volume>14</volume><fpage>16551</fpage><lpage>16562</lpage><pub-id pub-id-type="doi">10.1007/s12652-022-03910-0</pub-id></nlm-citation></ref><ref id="ref33"><label>33</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Hisham</surname><given-names>MN</given-names> </name><name name-style="western"><surname>Hassan</surname><given-names>MF</given-names> </name><name name-style="western"><surname>Ibrahim</surname><given-names>N</given-names> </name><name name-style="western"><surname>Zin</surname><given-names>ZM</given-names> </name></person-group><article-title>Mono camera-based human skeletal tracking for squat exercise abnormality detection using double exponential smoothing</article-title><source>Int J Adv Comput Sci Appl</source><year>2022</year><volume>13</volume><issue>7</issue><pub-id pub-id-type="doi">10.14569/IJACSA.2022.0130709</pub-id></nlm-citation></ref><ref id="ref34"><label>34</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Zhang</surname><given-names>W</given-names> </name><name name-style="western"><surname>Li</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Cai</surname><given-names>S</given-names> </name><etal/></person-group><article-title>Combined MediaPipe and YOLOv5 range of motion assessment system for spinal diseases and frozen shoulder</article-title><source>Sci Rep</source><year>2024</year><volume>14</volume><fpage>15879</fpage><pub-id pub-id-type="doi">10.1038/s41598-024-66221-8</pub-id></nlm-citation></ref><ref id="ref35"><label>35</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Friedrich</surname><given-names>MU</given-names> </name><name name-style="western"><surname>Roenn</surname><given-names>AJ</given-names> </name><name name-style="western"><surname>Palmisano</surname><given-names>C</given-names> </name><etal/></person-group><article-title>Validation and application of computer vision algorithms for video-based tremor analysis</article-title><source>NPJ Digit Med</source><year>2024</year><month>06</month><day>21</day><volume>7</volume><issue>1</issue><fpage>165</fpage><pub-id pub-id-type="doi">10.1038/s41746-024-01153-1</pub-id><pub-id pub-id-type="medline">38906946</pub-id></nlm-citation></ref><ref id="ref36"><label>36</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Pornpipatsakul</surname><given-names>K</given-names> </name><name name-style="western"><surname>Chuengwutigool</surname><given-names>W</given-names> </name><name name-style="western"><surname>Chaichaowarat</surname><given-names>R</given-names> </name><name name-style="western"><surname>Foongchomcheay</surname><given-names>A</given-names> </name></person-group><article-title>Bridging exercise monitoring system using RGB camera for stroke rehabilitation</article-title><conf-name>2023 IEEE R10 Conference (TENCON)</conf-name><conf-date>Oct 31 to Nov 3, 2023</conf-date><pub-id pub-id-type="doi">10.1109/TENCON58879.2023.10322445</pub-id></nlm-citation></ref><ref id="ref37"><label>37</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Kushnir</surname><given-names>A</given-names> </name><name name-style="western"><surname>Kachmar</surname><given-names>O</given-names> </name><name name-style="western"><surname>Bonnech&#x00E8;re</surname><given-names>B</given-names> </name></person-group><article-title>STASISM: a versatile serious gaming multi-sensor platform for personalized telerehabilitation and telemonitoring</article-title><source>Sensors (Basel)</source><year>2024</year><month>01</month><day>6</day><volume>24</volume><issue>2</issue><fpage>351</fpage><pub-id pub-id-type="doi">10.3390/s24020351</pub-id><pub-id pub-id-type="medline">38257442</pub-id></nlm-citation></ref><ref id="ref38"><label>38</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Khanna</surname><given-names>P</given-names> </name><name name-style="western"><surname>Oppenheim</surname><given-names>T</given-names> </name><name name-style="western"><surname>Tu-Chan</surname><given-names>A</given-names> </name><name name-style="western"><surname>Abrams</surname><given-names>G</given-names> </name><name name-style="western"><surname>Ganguly</surname><given-names>K</given-names> </name></person-group><article-title>Measuring arm and hand joint kinematics to estimate impairment during a functional reach and grasp task after stroke</article-title><source>Neurorehabil Neural Repair</source><year>2023</year><month>06</month><volume>37</volume><issue>6</issue><fpage>409</fpage><lpage>417</lpage><pub-id pub-id-type="doi">10.1177/15459683231179173</pub-id><pub-id pub-id-type="medline">37300318</pub-id></nlm-citation></ref></ref-list><app-group><supplementary-material id="app1"><label>Multimedia Appendix 1</label><p>Instructions for recording the videos.</p><media xlink:href="rehab_v13i1e87128_app1.pdf" xlink:title="PDF File, 358 KB"/></supplementary-material></app-group></back></article>