<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.0 20040830//EN" "http://dtd.nlm.nih.gov/publishing/2.0/journalpublishing.dtd">
<article xmlns:xlink="http://www.w3.org/1999/xlink" article-type="research-article" dtd-version="2.0">
  <front>
    <journal-meta>
      <journal-id journal-id-type="publisher-id">JRAT</journal-id>
      <journal-id journal-id-type="nlm-ta">JMIR Rehabil Assist Technol</journal-id>
      <journal-title>JMIR Rehabilitation and Assistive Technologies</journal-title>
      <issn pub-type="epub">2369-2529</issn>
      <publisher>
        <publisher-name>JMIR Publications</publisher-name>
        <publisher-loc>Toronto, Canada</publisher-loc>
      </publisher>
    </journal-meta>
    <article-meta>
      <article-id pub-id-type="publisher-id">v10i1e48031</article-id>
      <article-id pub-id-type="pmid">38145484</article-id>
      <article-id pub-id-type="doi">10.2196/48031</article-id>
      <article-categories>
        <subj-group subj-group-type="heading">
          <subject>Original Paper</subject>
        </subj-group>
        <subj-group subj-group-type="article-type">
          <subject>Original Paper</subject>
        </subj-group>
      </article-categories>
      <title-group>
        <article-title>Older Adults’ Engagement and Mood During Robot-Assisted Group Activities in Nursing Homes: Development and Observational Pilot Study</article-title>
      </title-group>
      <contrib-group>
        <contrib contrib-type="editor">
          <name>
            <surname>Mulvenna</surname>
            <given-names>Maurice</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Campbell</surname>
            <given-names>Laura</given-names>
          </name>
        </contrib>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Chang</surname>
            <given-names>Nien-Tzu</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib id="contrib1" contrib-type="author" corresp="yes">
          <name name-style="western">
            <surname>Tanner</surname>
            <given-names>Alexandra</given-names>
          </name>
          <degrees>MSc</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <address>
            <institution>School of Applied Psychology</institution>
            <institution>University of Applied Sciences and Arts Northwestern Switzerland</institution>
            <addr-line>Riggenbachstrasse 16</addr-line>
            <addr-line>Olten</addr-line>
            <country>Switzerland</country>
            <phone>41 0796040979</phone>
            <email>mail@alexandratanner.net</email>
          </address>
          <xref rid="aff2" ref-type="aff">2</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0003-3713-8647</ext-link>
        </contrib>
        <contrib id="contrib2" contrib-type="author">
          <name name-style="western">
            <surname>Urech</surname>
            <given-names>Andreas</given-names>
          </name>
          <degrees>MSc</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-8181-9274</ext-link>
        </contrib>
        <contrib id="contrib3" contrib-type="author">
          <name name-style="western">
            <surname>Schulze</surname>
            <given-names>Hartmut</given-names>
          </name>
          <degrees>Prof Dr</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-1668-537X</ext-link>
        </contrib>
        <contrib id="contrib4" contrib-type="author">
          <name name-style="western">
            <surname>Manser</surname>
            <given-names>Tanja</given-names>
          </name>
          <degrees>Prof Dr</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-5852-8735</ext-link>
        </contrib>
      </contrib-group>
      <aff id="aff1">
        <label>1</label>
        <institution>School of Applied Psychology</institution>
        <institution>University of Applied Sciences and Arts Northwestern Switzerland</institution>
        <addr-line>Olten</addr-line>
        <country>Switzerland</country>
      </aff>
      <aff id="aff2">
        <label>2</label>
        <institution>City of Bern (Digital Stadt Bern)</institution>
        <addr-line>Bern</addr-line>
        <country>Switzerland</country>
      </aff>
      <author-notes>
        <corresp>Corresponding Author: Alexandra Tanner <email>mail@alexandratanner.net</email></corresp>
      </author-notes>
      <pub-date pub-type="collection">
        <year>2023</year>
      </pub-date>
      <pub-date pub-type="epub">
        <day>25</day>
        <month>12</month>
        <year>2023</year>
      </pub-date>
      <volume>10</volume>
      <elocation-id>e48031</elocation-id>
      <history>
        <date date-type="received">
          <day>1</day>
          <month>5</month>
          <year>2023</year>
        </date>
        <date date-type="rev-request">
          <day>30</day>
          <month>8</month>
          <year>2023</year>
        </date>
        <date date-type="rev-recd">
          <day>23</day>
          <month>10</month>
          <year>2023</year>
        </date>
        <date date-type="accepted">
          <day>15</day>
          <month>11</month>
          <year>2023</year>
        </date>
      </history>
      <copyright-statement>©Alexandra Tanner, Andreas Urech, Hartmut Schulze, Tanja Manser. Originally published in JMIR Rehabilitation and Assistive Technology (https://rehab.jmir.org), 25.12.2023.</copyright-statement>
      <copyright-year>2023</copyright-year>
      <license license-type="open-access" xlink:href="https://creativecommons.org/licenses/by/4.0/">
        <p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (https://creativecommons.org/licenses/by/4.0/), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work, first published in JMIR Rehabilitation and Assistive Technology, is properly cited. The complete bibliographic information, a link to the original publication on https://rehab.jmir.org/, as well as this copyright and license information must be included.</p>
      </license>
      <self-uri xlink:href="https://rehab.jmir.org/2023/1/e48031" xlink:type="simple"/>
      <abstract>
        <sec sec-type="background">
          <title>Background</title>
          <p>Promoting the well-being of older adults in an aging society requires new solutions. One resource might be the use of social robots for group activities that promote physical and cognitive stimulation. Engaging in a robot-assisted group activity may help in the slowdown of physical and cognitive decline in older adults. Currently, our knowledge is limited on whether older adults engage in group activities with humanlike social robots and whether they experience a positive affect while doing so. Both are necessary preconditions to achieve the intended effects of a group activity.</p>
        </sec>
        <sec sec-type="objective">
          <title>Objective</title>
          <p>Our pilot study has 2 aims. First, we aimed to develop and pilot an observational coding scheme for robot-assisted group activities because self-report data on engagement and mood of nursing home residents are often difficult to obtain, and the existing observation instruments do have limitations. Second, we aimed to investigate older adults’ engagement and mood during robot-assisted group activities in 4 different nursing care homes in the German-speaking part of Switzerland.</p>
        </sec>
        <sec sec-type="methods">
          <title>Methods</title>
          <p>We developed an observation system, inspired by existing tools, for a structured observation of engagement and mood of older adults during a robot-assisted group activity. In this study, 85 older adult residents from 4 different care homes in Switzerland participated in 5 robot-assisted group activity sessions, and they were observed using our developed system. The data were collected in the form of video clips that were assessed by 2 raters regarding engagement (direction of gaze, posture as well as body expression, and activity) and mood (positive and negative affects). Both variables were rated on a 5-point rating scale.</p>
        </sec>
        <sec sec-type="results">
          <title>Results</title>
          <p>Our pilot study findings show that the engagement and mood of older adults can be assessed reliably by using the proposed observational coding scheme. Most participants actively engaged in robot-assisted group activities (mean 4.19, SD 0.47; median 4.0). The variables used to measure engagement were direction of gaze (mean 4.65, SD 0.49; median 5.0), posture and body expression (mean 4.03, SD 0.71; median 4.0), and activity (mean 3.90, SD 0.65; median 4.0). Further, we observed mainly positive affects in this group. Almost no negative affect was observed (mean 1.13, SD 0.20; median 1.0), while the positive affect (mean 3.22, SD 0.55; median 3.2) was high.</p>
        </sec>
        <sec sec-type="conclusions">
          <title>Conclusions</title>
          <p>The developed observational coding system can be used and further developed in future studies on robot-assisted group activities in the nursing home context and potentially in other settings. Additionally, our pilot study indicates that cognitive and physical stimulation of older adults can be promoted by social robots in a group setting. This finding encourages future technological development and improvement of social robots and points to the potential of observational research to systematically evaluate such developments.</p>
        </sec>
      </abstract>
      <kwd-group>
        <kwd>human-robot interaction</kwd>
        <kwd>social robot</kwd>
        <kwd>nursing home</kwd>
        <kwd>observational research</kwd>
        <kwd>group activity</kwd>
        <kwd>observational</kwd>
        <kwd>pilot study</kwd>
        <kwd>robot</kwd>
        <kwd>engagement</kwd>
        <kwd>mood</kwd>
        <kwd>well-being</kwd>
        <kwd>cognitive</kwd>
        <kwd>elderly</kwd>
        <kwd>social robot</kwd>
        <kwd>nursing</kwd>
        <kwd>aging</kwd>
      </kwd-group>
    </article-meta>
  </front>
  <body>
    <sec sec-type="introduction">
      <title>Introduction</title>
      <sec>
        <title>Background</title>
        <p>Given the global phenomenon of aging populations, strategies to reduce the risk of physical and cognitive decline and the associated consequences on the well-being of older adults and their ability to cope with everyday life are urgently needed [<xref ref-type="bibr" rid="ref1">1</xref>]. One resource in this context might be the use of the so-called social robots. According to Anzalone and colleagues [<xref ref-type="bibr" rid="ref2">2</xref>], social robots can be understood as “machines that humans should perceive as realistic, effective partners, able to communicate and cooperate with them as naturally as possible interestingly enough.” The acceptance of social robots and their potential to promote the well-being of older adults have been explored and demonstrated in several studies [<xref ref-type="bibr" rid="ref3">3</xref>-<xref ref-type="bibr" rid="ref8">8</xref>]. Most of the robots studied are animallike, with PARO [<xref ref-type="bibr" rid="ref9">9</xref>], a seal-shaped robot, being a prominent example [<xref ref-type="bibr" rid="ref2">2</xref>-<xref ref-type="bibr" rid="ref5">5</xref>,<xref ref-type="bibr" rid="ref10">10</xref>,<xref ref-type="bibr" rid="ref11">11</xref>]. However, animallike companion robots are not multifunctional and their interactions are not sufficient for those who require care and support. A study comparing animallike and humanlike social robots in group settings provided the first evidence that humanlike robots have greater effects on cognitive training than animallike robots [<xref ref-type="bibr" rid="ref12">12</xref>], which brings humanlike social robots into the focus of research for group activities for older adults. This so-called third generation of social robots, including Nao, Pepper, QT, Sophia, Jack, LOVOT, or Tessa [<xref ref-type="bibr" rid="ref12">12</xref>-<xref ref-type="bibr" rid="ref18">18</xref>], continue to evolve, as new software is developed and released into the market [<xref ref-type="bibr" rid="ref13">13</xref>]. Their humanlike forms [<xref ref-type="bibr" rid="ref19">19</xref>] and integrated voice capability allow for interactions through facial expression, gestures, and voice. Thus, these robots can support cognitive and physically stimulating exercises, which in combination, achieve the best results in maintaining cognitive abilities in older adults [<xref ref-type="bibr" rid="ref1">1</xref>].</p>
        <p>Few studies [<xref ref-type="bibr" rid="ref7">7</xref>,<xref ref-type="bibr" rid="ref12">12</xref>,<xref ref-type="bibr" rid="ref13">13</xref>,<xref ref-type="bibr" rid="ref20">20</xref>-<xref ref-type="bibr" rid="ref25">25</xref>] have investigated whether older adults actively engage in and experience positive moods during these activities. Since mood and engagement are crucial for the effectiveness of such group activities with a humanlike social robot, this study aims to explore these 2 constructs empirically. In doing so, we chose the method of systematic behavioral observation, because self-report data of older adults in nursing homes are often difficult to obtain and might interfere with their experience of the activity itself [<xref ref-type="bibr" rid="ref9">9</xref>]. As no suitable observational coding scheme could be identified in the literature, a second aim of this study was the development and piloting of an observational coding scheme. In summary, this pilot study addresses the following questions: (1) can the engagement and mood of older adults in a robot-assisted group activity be assessed through systematic behavioral observation? and (2) do older adults actively engage in a robot-assisted group activity and what mood (ie, positive or negative affect) can be observed in the group during such a robot-assisted group activity?</p>
      </sec>
      <sec>
        <title>Related Work</title>
        <p>A review identified group activities for older adults assisted by social robots in 5 domains: affective therapy, cognitive training, social facilitation, companionship, and physiological therapy [<xref ref-type="bibr" rid="ref7">7</xref>]. Three studies [<xref ref-type="bibr" rid="ref12">12</xref>,<xref ref-type="bibr" rid="ref13">13</xref>,<xref ref-type="bibr" rid="ref20">20</xref>] showed a great potential of humanlike robots in group activities, with broader functionalities for physical activities. The first indications that older adults liked to participate in robot-assisted group activities for physical activities are shown in [<xref ref-type="bibr" rid="ref21">21</xref>,<xref ref-type="bibr" rid="ref22">22</xref>]. The robot NAO was found suitable to be used in group settings for moving, memory training, entertainment, music, dancing, and games [<xref ref-type="bibr" rid="ref23">23</xref>]. One study showed that older adults in a nursing home prefer walking with a robot rather than walking alone [<xref ref-type="bibr" rid="ref24">24</xref>]. Another study showed that older adults actively participated in robot-assisted cognitive therapy and physiotherapy sessions, and a trend toward improved neuropsychiatric symptoms, reduced apathy, and higher quality of life was observed [<xref ref-type="bibr" rid="ref25">25</xref>]. Although these studies [<xref ref-type="bibr" rid="ref12">12</xref>,<xref ref-type="bibr" rid="ref13">13</xref>,<xref ref-type="bibr" rid="ref20">20</xref>-<xref ref-type="bibr" rid="ref25">25</xref>] provide first insights into the acceptance of humanlike robots assisting in group activities of older adults, we identified only 1 study that systematically developed and used an observation system for examining the engagement of groups of older adults during activity sessions assisted by a humanlike social robot [<xref ref-type="bibr" rid="ref13">13</xref>]. Even though this observation system indicates that systematic observation is a fruitful methodological approach in this research context, it does not fully capture the psychological constructs of engagement and mood that are at the center of our pilot study and that are usually captured using self-report surveys.</p>
      </sec>
      <sec>
        <title>Observation of Engagement and Mood During a Robot-Assisted Group Activity</title>
        <sec>
          <title>Engagement</title>
          <p>In the context of group activities providing physical and cognitive stimulation, engagement in exercises is crucial to generate the intended effects [<xref ref-type="bibr" rid="ref26">26</xref>]. According to Perugia and colleagues [<xref ref-type="bibr" rid="ref27">27</xref>], engagement is defined as “the psychological state of well-being, enjoyment, and active involvement that is triggered by meaningful activities and causes people with dementia to be absorbed by the activity, more energetic and in a more positive mood.” Studies with children provide evidence that children are just as willing to engage in robot-guided exercises as when a human demonstrates the exercise [<xref ref-type="bibr" rid="ref28">28</xref>].</p>
        </sec>
        <sec>
          <title>Mood</title>
          <p>The mood (ie, positive or negative affect) in the group during a robot-assisted activity is of interest to determine if older adults are enjoying themselves in the process, which is relevant to ensure participation beyond curiosity and to assess whether the intended positive effects of such robot-assisted group activities are actually attained. Assessing mood separately from engagement was important also because the stimulus for activities of older adults in the nursing home is a key factor in whether engagement occurs [<xref ref-type="bibr" rid="ref29">29</xref>]. The general experience is that humanlike social robots, with their ability to express emotions, tend to evoke a notably positive affect. However, the counter hypothesis to this would be that older adults simply want to be polite and participate because something new is happening in the nursing home, without they actually experiencing the positive affect when interacting with the humanlike social robot.</p>
        </sec>
        <sec>
          <title>Assessment of Engagement and Mood</title>
          <p>The assessment of engagement and mood during a robot-assisted group activity has not been researched much [<xref ref-type="bibr" rid="ref27">27</xref>], and collecting data with older adults in terms of reliable outcomes presents a challenge [<xref ref-type="bibr" rid="ref7">7</xref>]. Several observational studies have provided inspiration for the design of the observation tool used in this study [<xref ref-type="bibr" rid="ref30">30</xref>-<xref ref-type="bibr" rid="ref33">33</xref>]. One important observational instrument is the Observational Measurement of Engagement [<xref ref-type="bibr" rid="ref34">34</xref>], and its further development can be used to gain a broad understanding of engagement in the context of telepresence robots and companion robots [<xref ref-type="bibr" rid="ref20">20</xref>]. Although this instrument was not directly suitable to measure the predefined behavior of older adults (eg, mimic an exercise) at the group level, it informed our methodological decisions and developments.</p>
        </sec>
      </sec>
    </sec>
    <sec sec-type="methods">
      <title>Methods</title>
      <sec>
        <title>Study Design</title>
        <p>We considered this as a pilot study because we developed and tested the applicability of a systematic observation system for rating participants’ engagement and mood during robot-assisted group activity sessions for older adults in nursing homes.</p>
      </sec>
      <sec>
        <title>Recruitment Strategy</title>
        <p>A pool of about 200 nursing homes in the German-speaking part of Switzerland were contacted by telephone and invited to participate in this study. Four nursing homes expressed their interest, and they were selected to participate in this observational field study. All participating nursing homes provide various services for leisure activities and physical and cognitive stimulation. They offer accommodation and care to 50-160 residents and provide specialized dementia care. As part of this study, the management of each nursing home agreed to co-organize a robot-assisted group activity together with the research team and made nursing staff available to accompany residents to the session. Residents of the participating nursing homes were informed about the robot-assisted group activity and the study procedure, and they were invited to participate on a voluntary basis.</p>
      </sec>
      <sec>
        <title>Materials</title>
        <p>The robot used for the robot-assisted group activity was the NAO robot from SoftBanks Robotics [<xref ref-type="bibr" rid="ref35">35</xref>]. We used the software of Avatarion [<xref ref-type="bibr" rid="ref36">36</xref>] developed by Smart Companion [<xref ref-type="bibr" rid="ref37">37</xref>]. The software was developed in collaboration with experts for leisure activities and physical and cognitive stimulation for older adults, specifically for robot-assisted leisure activities during their care. In this study, we used 3 software modules that support common elements of group activities for older adults: singing, storytelling, and gymnastics.</p>
        <list list-type="order">
          <list-item>
            <p>Singing: In the first module, the robot animates the residents to sing along with him or her by using friendly verbal communication and gestures. All songs implemented in this module are well-known Swiss songs that are popular with the older generation. The robot sings the songs with a human voice, and the singing is accompanied with suitable gestures. For songs with more complex lyrics, the residents received handouts of the lyrics.</p>
          </list-item>
          <list-item>
            <p>Storytelling: In the second module, the robot tells a story to the residents. The stories are designed to include biographical aspects. All stories implemented in this module are short and contain elements to imitate movements.</p>
          </list-item>
          <list-item>
            <p>Gymnastics: In the third module, the robot guides the residents to imitate physical exercises by using friendly verbal communication and gestures. The physical exercises are designed for older adults. For example, the robot shows how to stretch the arms or move the fingers.</p>
          </list-item>
        </list>
        <p><xref rid="figure1" ref-type="fig">Figures 1</xref>-<xref rid="figure2" ref-type="fig">2</xref> illustrate the robot-assisted physical exercise sessions in 2 different nursing homes. Photos were taken during 2 robot-assisted group activity sessions in 2 different nursing homes. The pictures show the NAO robot demonstrating movements with its hands and residents participating in this physical exercise by imitating the robot’s movements.</p>
        <fig id="figure1" position="float">
          <label>Figure 1</label>
          <caption>
            <p>An illustration of a robot-assisted group activity in a nursing home.</p>
          </caption>
          <graphic xlink:href="rehab_v10i1e48031_fig1.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
        <fig id="figure2" position="float">
          <label>Figure 2</label>
          <caption>
            <p>An illustration of a robot-assisted group activity in another nursing home.</p>
          </caption>
          <graphic xlink:href="rehab_v10i1e48031_fig2.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
      </sec>
      <sec>
        <title>Ethical Considerations</title>
        <p>According to Swiss law this study did not require formal ethics approval and was thus exempt from formal ethics review. For more information please see the corresponding section of the Swiss Human Research Act. The participating nursing homes consented to this study and informed the residents in advance about the robot-assisted activity sessions. Participation in the robot-assisted activity sessions was voluntary. Consent was obtained for using the anonymized photographs in this paper.</p>
      </sec>
      <sec>
        <title>Study Procedure and Data Collection</title>
        <p>Robot-assisted group activity sessions were offered in the participating nursing homes in July and August 2019. Chairs and free spaces for wheelchair users were arranged in a way that allowed the participants to see the NAO6 robot that was placed on a table. All robot-assisted group activity sessions took place 1 hour before lunch. Participating residents arrived independently. During the session, 2-5 health care professionals were available in the room for the general support of the residents. All group activity sessions in this study were conducted by research team members and lasted 1 hour, with the actual robot-assisted group activity taking about 30 minutes.</p>
        <p>The activity session was structured in 3 parts. First, a representative of the research team welcomed the residents, explained the procedure of the session, reminded them that participation was voluntary, informed them about data protection issues, asked for their approval regarding video recording, and introduced the persons involved. Second, a technical expert from the Smart Companion team started the robot program. The participants first performed a gymnastics exercise, then sang a song together with the robot, and toward the end of the session, they listened to a story. The research team did not interact with the participating residents during these sessions. Third, an additional exercise was conducted by the robot to get the residents in the mood for lunch. This exercise was not recorded and was not part of this study, as it did not aim at their physical and cognitive stimulation. At the end of the activity session, the robot wished the residents bon appétit and said goodbye. Finally, the research team also said goodbye and thanked the residents for their participation.</p>
        <p>To systematically analyze residents’ engagement and mood in group activities with the robot, sessions were recorded on video. Video recording has the advantage that, for example, behavior can be observed unobtrusively and participants do not have to be bothered afterwards, as they would be when using interviews. Further, for a high number of residents in nursing homes, other forms of data collection such as surveys present an inaccurate form of assessment, since retrieval, reporting, and ranking of relevant information may be compromised. Therefore, almost all assessment techniques for people with dementia rely on behavior observation [<xref ref-type="bibr" rid="ref27">27</xref>]. Video recording was done in a way such that residents should not be disturbed, and the Hawthorne effect could be reduced [<xref ref-type="bibr" rid="ref38">38</xref>]. Hence, short video clips of all 3 exercises were filmed as discreetly as possible. The video clips lasted between 30 seconds and 3 minutes and were distributed across the whole duration of the 3 exercises. The time of the start of the clip in the exercise was chosen randomly. For practical reasons, video clips were recorded with a smartphone camera. For ethical reasons, we collected no personal data such as the age of participants as well as the presence and severity of dementia symptoms. The videos only show the number of participants during each session.</p>
      </sec>
      <sec>
        <title>Measures</title>
        <p>For a structured observation of engagement and mood during a robot-assisted group activity, an observation system was developed. The observation system builds on existing observation systems for engagement and mood of individuals but was adapted for direct observation in a group setting. For example, in studies of children’s engagement during one-on-one interactions with robots [<xref ref-type="bibr" rid="ref30">30</xref>,<xref ref-type="bibr" rid="ref31">31</xref>], the variables used to measure engagement were direction of gaze, facial expressions, responses, or gestures. Another study related to children with autism spectrum disorders interacting with social robots [<xref ref-type="bibr" rid="ref32">32</xref>] used measures of engagement based on nonverbal behavior focusing on social and antisocial behaviors. Another system used for older adults observed in a session with a social robot includes measures of engagement and mood targeted to a setting with small groups and a facilitator. Engagement was measured by someone leaning toward the collaborator, and mood was assessed by movements that were accompanied by a positive or negative affect [<xref ref-type="bibr" rid="ref27">27</xref>]. Further, we analyzed the Observational Measurement of Engagement. This tool is based on a self-identity questionnaire and the 3 dimensions of observational measurements, namely, duration, attention, and attitude. This instrument did not meet all our needs, as we had a predefined duration of an interaction, and attitude was not the focus of our study. However, attention was in our interest and was included in our observation instrument. Another study measured affect and social interaction during a game [<xref ref-type="bibr" rid="ref33">33</xref>]. Positive affect included smiling and clapping, and negative affect included sadness and anger [<xref ref-type="bibr" rid="ref33">33</xref>]. Both studies show the relevance of gaze direction for capturing engagement and of observable behaviors for capturing positive and negative affect.</p>
      </sec>
      <sec>
        <title>Engagement</title>
        <p>Extending the previous research, we aimed to assess participants’ engagement in robot-assisted group activities. We adapted an established rating system that has been used for the observation of students’ attention in class [<xref ref-type="bibr" rid="ref39">39</xref>]. This observation system captures 3 aspects of engagement: (1) direction of gaze (looks toward the teaching center vs looks elsewhere), (2) posture and body expression (oriented toward the teaching center and alert vs averted or flaccid), and (3) activity (performs the activity necessary for the task vs does something else on the side). Since we analyzed groups of nursing home residents and were not interested in individual differences, we assessed engagement of the group as a whole. To do so, we created a 5-point rating scale reflecting the degree of engagement in the group. For example, in the original systematic behavior observation instrument [<xref ref-type="bibr" rid="ref39">39</xref>], sequences were rated whether a child looks toward the teaching center. We have modified the formulation from “none” of the participants looks (score 1) to “all” participants look to the center of the robot-assisted activity (score 5), and this 5-level rating scale aimed to assess engagement from very low to very high (see <xref ref-type="table" rid="table1">Table 1</xref>). Therefore, we distributed the number of people who showed the behavior depending on group size on the 5-level scale.</p>
        <table-wrap position="float" id="table1">
          <label>Table 1</label>
          <caption>
            <p>Description of the rating system for engagement at the group level.</p>
          </caption>
          <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
            <col width="70"/>
            <col width="100"/>
            <col width="250"/>
            <col width="270"/>
            <col width="310"/>
            <thead>
              <tr valign="top">
                <td>Rating</td>
                <td>Engagement</td>
                <td>Direction of gaze</td>
                <td>Posture and body expression</td>
                <td>Activity</td>
              </tr>
            </thead>
            <tbody>
              <tr valign="top">
                <td>1</td>
                <td>Very low</td>
                <td>None of the participants look to the center of the robot-assisted group activity (looking elsewhere)</td>
                <td>None of the participants turned toward the center of the robot-assisted group activity but turned away and were flaccid</td>
                <td>None of the participants perform the activity necessary for the task, for example, performing movements, singing, or listening to the story told by the robot (doing something else on the side)</td>
              </tr>
              <tr valign="top">
                <td>2</td>
                <td>Low</td>
                <td>Most participants do not look to the center of the robot-assisted group activity</td>
                <td>Most of the participants are not turned toward the center of robot-assisted group activity but turned away and were flaccid</td>
                <td>Most participants do not perform the activity necessary for the task, for example, performing movements, singing, or listening to the story told by the robot (doing something else on the side)</td>
              </tr>
              <tr valign="top">
                <td>3</td>
                <td>Medium</td>
                <td>Some participants look to the center of the robot-assisted group activity</td>
                <td>Some participants are turned toward the center of robot-assisted group activity and their body expression is alert (vs turned away and flaccid)</td>
                <td>Some participants perform the activity necessary for the task, for example, performing movements, singing, and listening to the story told by the robot (vs doing something else on the side)</td>
              </tr>
              <tr valign="top">
                <td>4</td>
                <td>High</td>
                <td>Most participants look to the center of the robot-assisted activity session</td>
                <td>Most participants are turned toward the center of robot-assisted group activity and their body expression is alert (vs turned away and flaccid)</td>
                <td>Most participants perform the activity necessary for the task, for example, performing movements, singing, and listening to the story told by the robot (vs doing something else on the side)</td>
              </tr>
              <tr valign="top">
                <td>5</td>
                <td>Very high</td>
                <td>All participants look to the center of robot-assisted activity session</td>
                <td>All participants are turned toward the center of robot-assisted group activity and their body expression is alert (vs turned away and flaccid)</td>
                <td>All participants perform the activity necessary for the task, for example, performing movements, singing, and listening to the story told by the robot (vs doing something else on the side).</td>
              </tr>
            </tbody>
          </table>
        </table-wrap>
      </sec>
      <sec>
        <title>Mood</title>
        <p>To capture participants’ mood during the robot-assisted activities, we developed an observational rating scale based on the German version of the Positive and Negative Affect Schedule (PANAS) [<xref ref-type="bibr" rid="ref40">40</xref>]. The PANAS is frequently used in studies in which human mood states are of interest. The questionnaire consists of 20 adjectives describing different feelings with 10 adjectives capturing positive affect and the other 10 capturing negative affect. The items of the original PANAS are shown in <xref ref-type="boxed-text" rid="box1">Textbox 1</xref> [<xref ref-type="bibr" rid="ref25">25</xref>]. This survey instrument was chosen because it contains a set of mood variables that describe mood with positive and negative affects with several adjectives that we assumed were observable by a rater. Based on findings by Reisenzein and colleagues [<xref ref-type="bibr" rid="ref41">41</xref>] that emotions can be detected by observers using a variety of cues (eg, facial expressions, verbal expressions, physical expressions), we transformed the survey instrument PANAS into an observational rating scale for mood at the group level.</p>
        <boxed-text id="box1" position="float">
          <title>Adjectives of the Positive and Negative Affect Schedule.</title>
          <p>
            <bold>Positive affect</bold>
          </p>
          <list list-type="bullet">
            <list-item>
              <p>Attentive</p>
            </list-item>
            <list-item>
              <p>Active</p>
            </list-item>
            <list-item>
              <p>Alert</p>
            </list-item>
            <list-item>
              <p>Excited</p>
            </list-item>
            <list-item>
              <p>Enthusiastic</p>
            </list-item>
            <list-item>
              <p>Determined</p>
            </list-item>
            <list-item>
              <p>Inspired</p>
            </list-item>
            <list-item>
              <p>Proud</p>
            </list-item>
            <list-item>
              <p>Interested</p>
            </list-item>
            <list-item>
              <p>Strong (this mood could not be observed reliably in our study)</p>
            </list-item>
          </list>
          <p>
            <bold>Negative affect</bold>
          </p>
          <list list-type="bullet">
            <list-item>
              <p>Hostile</p>
            </list-item>
            <list-item>
              <p>Irritable</p>
            </list-item>
            <list-item>
              <p>Ashamed</p>
            </list-item>
            <list-item>
              <p>Guilty (this mood could not be observed reliably in our study)</p>
            </list-item>
            <list-item>
              <p>Distressed</p>
            </list-item>
            <list-item>
              <p>Upset</p>
            </list-item>
            <list-item>
              <p>Scared</p>
            </list-item>
            <list-item>
              <p>Afraid</p>
            </list-item>
            <list-item>
              <p>Jittery</p>
            </list-item>
            <list-item>
              <p>Nervous</p>
            </list-item>
          </list>
        </boxed-text>
        <p>The original 5-level response scale contains the gradations “very slightly or not at all,” “a little,” “moderately,” “quite a bit,” and “extremely.” Again, because we were interested in the mood at the group level, we adapted the rating scale to reflect observable indicators of mood in the group, and a 5-point rating scale from “very low” to “very high” was used. For example, “very low” signified none of the participants were attentive in the robot-assisted group activity, and “very high” signified all participants were attentive in the robot-assisted group activity (see <xref ref-type="table" rid="table2">Table 2</xref>). To make an objective assessment of group mood during the robot-assisted group activity, sequences from the observation were rated in relation to each adjective from the PANAS. The description of the 5-point rating scale of mood according to the PANAS is shown in <xref ref-type="table" rid="table2">Table 2</xref>. We considered the observation at group level to be particularly relevant for capturing mood in the group so that situational factors that are an important component in the observation of mental states [<xref ref-type="bibr" rid="ref41">41</xref>] could be included.</p>
        <table-wrap position="float" id="table2">
          <label>Table 2</label>
          <caption>
            <p>Description of the 5-point rating scale of mood according to the Positive and Negative Affect Schedule [<xref ref-type="bibr" rid="ref35">35</xref>] for a robot-assisted group activity.</p>
          </caption>
          <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
            <col width="100"/>
            <col width="400"/>
            <col width="500"/>
            <thead>
              <tr valign="top">
                <td>Rating</td>
                <td>Extent of the perceived states for the measurement of mood in the group activity</td>
                <td>Description</td>
              </tr>
            </thead>
            <tbody>
              <tr valign="top">
                <td>1</td>
                <td>Very low</td>
                <td>None of the participants are __<sup>a</sup> in the robot-assisted group activity.</td>
              </tr>
              <tr valign="top">
                <td>2</td>
                <td>Low</td>
                <td>Most participants at the robot-assisted group activity are not __.</td>
              </tr>
              <tr valign="top">
                <td>3</td>
                <td>Medium</td>
                <td>Some participants are __ in the robot-assisted group activity.</td>
              </tr>
              <tr valign="top">
                <td>4</td>
                <td>High</td>
                <td>Most participants are __ in the robot-assisted group activity.</td>
              </tr>
              <tr valign="top">
                <td>5</td>
                <td>Very high</td>
                <td>All participants at the robot-assisted group activity are __.</td>
              </tr>
            </tbody>
          </table>
          <table-wrap-foot>
            <fn id="table2fn1">
              <p><sup>a</sup>The rating system was used for every adjective of the Positive and Negative Affect Schedule.</p>
            </fn>
          </table-wrap-foot>
        </table-wrap>
      </sec>
      <sec>
        <title>Coding of Video Recordings</title>
        <p>Video clips were rated independently by 2 trained observers (rater 1 and rater 2). Rater 1 was present during all robot-assisted group activities and rater 2 during two randomly selected sessions. Both raters were trained in the observation of nonverbal communication and body language for assessing the items for all 3 aspects of engagement (ie, direction of gaze, posture and body expression, activity) and for mood (ie, positive and negative affect). Clearly visible signs of dementia and severe physical limitations of the residents had to be considered, and the rating of engagement had to be adjusted to the residents’ possibilities of participation (eg, physical limitations). However, no individual was excluded from the analysis, as all ratings were performed at the group level. Each observer rated the group as a whole in every video clip by assessing whether none of the participants, some of the participants, most of the participants, or all of the participants exhibited a particular behavior indicating engagement (eg, direction of gaze, posture and body expression, activity) or positive or negative affect (eg, attentive, scared). To provide specific and context-sensitive anchors for the ratings, we counted the number of participants and distributed them proportionally across the 5-point scale. Thus, it depended on the actual group size what most and some participants meant. During the initial trial, it became apparent that all variables of engagement could easily be observed and rated by both raters. However, for rating the perceived mood in the group of study participants according to PANAS, additional coding rules had to be defined. The items “strong” and “guilty” were difficult to observe and hard to differentiate from 2 other items (eg, proud, ashamed) and thus not considered in the analysis. Each video clip was rated independently by the 2 raters to allow for reliability assessment.</p>
      </sec>
      <sec>
        <title>Data Analysis</title>
        <sec>
          <title>Interrater Agreement</title>
          <p>To evaluate the agreement between 2 raters, we calculated the intraclass correlation coefficient (ICC) with the SPSS statistics software (version 26; IBM Corp). An ICC higher than 0.61 was considered substantial, and ICC higher than 0.81 was considered an almost perfect agreement [<xref ref-type="bibr" rid="ref42">42</xref>].</p>
        </sec>
        <sec>
          <title>Video Analysis</title>
          <p>The number and gender of participants who attended each robot-assisted group activity session was extracted from the videos and presented descriptively. For the analysis of engagement, the mean values, standard deviation, and medians of the aspects direction of gaze, posture and body expression, and activity as well as the overall mean value, standard deviation, and median of engagement were calculated from the observers’ ratings. We also calculated the mean value, standard deviation, and median for each item and the positive and negative affect dimensions from the PANAS for each robot-assisted group activity session.</p>
        </sec>
      </sec>
    </sec>
    <sec sec-type="results">
      <title>Results</title>
      <sec>
        <title>Videos and Study Participants</title>
        <p>Of the 34 video clips recorded during 5 robot-assisted group activities, 3 videos had to be excluded because not all participants were visible or the video was too short to be rated. Thus, we finally included 31 video clips. In the 4 participating nursing homes, 85 older residents participated in 5 robot-assisted group activity sessions. Participant characteristics are provided in <xref ref-type="table" rid="table3">Table 3</xref>.</p>
        <table-wrap position="float" id="table3">
          <label>Table 3</label>
          <caption>
            <p>Characteristics of the nursing homes and attendance in the activity sessions.<sup>a</sup></p>
          </caption>
          <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
            <col width="350"/>
            <col width="250"/>
            <col width="400"/>
            <thead>
              <tr valign="top">
                <td>Participating long-term care facilities</td>
                <td>Residential spaces (n)</td>
                <td>Residents attending the group activity (N=85)</td>
              </tr>
            </thead>
            <tbody>
              <tr valign="top">
                <td>1</td>
                <td>160</td>
                <td>15</td>
              </tr>
              <tr valign="top">
                <td>2</td>
                <td>140</td>
                <td>20</td>
              </tr>
              <tr valign="top">
                <td>2</td>
                <td>140</td>
                <td>16</td>
              </tr>
              <tr valign="top">
                <td>3</td>
                <td>82</td>
                <td>18</td>
              </tr>
              <tr valign="top">
                <td>4</td>
                <td>48</td>
                <td>16</td>
              </tr>
            </tbody>
          </table>
          <table-wrap-foot>
            <fn id="table3fn1">
              <p><sup>a</sup>In nursing home 2, we conducted 2 independent robot-assisted group activity sessions.</p>
            </fn>
          </table-wrap-foot>
        </table-wrap>
      </sec>
      <sec>
        <title>Interrater Agreement</title>
        <p>Agreement between the 2 raters was high for engagement and positive and negative affect. Engagement had an ICC score of 0.83 (95% CI 0.65-0.92). Negative affect reached an ICC of 0.84 (95% CI 0.67-0.93), and positive affect had an ICC of 0.90 (95% CI 0.79-0.96). Individual items, specifically adjectives that belonged to negative affect, received a rather weak ICC. These include the items “ashamed” (ICC 0.37, 95% CI –0.32 to 0.70) and “afraid” (ICC 0.39, 95% CI –1.07 to 0.52).</p>
      </sec>
      <sec>
        <title>Engagement</title>
        <p>As <xref ref-type="table" rid="table2">Table 2</xref> demonstrates, the results show that the engagement of the participants in the robot-assisted group activity was high (mean 4.19, SD 0.47; median 4.0). The direction of gaze was measured as almost very high (mean 4.65, SD 0.49; median 5.0); posture and body expression (mean 4.03, SD 0.71; median 4.0) and activity (mean 3.90, SD 0.65; median 4.0) were also rated as high.</p>
      </sec>
      <sec>
        <title>Mood</title>
        <p>Overall, no negative affect could be observed (mean 1.13, SD 0.20; median 1.0). The mean value of positive affect was 3.22 (SD 0.55; median 3.2), which indicates the observer perceived a good mood during the sessions. Adjectives of the positive affect such as interested (mean 4.13, SD 0.56; median 4.0), alert (mean 4.39, SD 0.67; median 4.0), inspired (mean 3.87, SD 0.96; median 4.0), attentive (mean 4.19, SD 1.05; median 4.0), and active (mean 4.16, SD 0.64; median 4.0) received high ratings around the value 4, while enthusiastic (mean 2.42, SD 1.03; median 2.0), proud (mean 1.23, SD 0.43; median 1.0), and determined (mean 1.94, SD 0.77; median 2.0) were observed to be very low or low within the group of participants. <xref ref-type="table" rid="table4">Table 4</xref> shows the detailed results.</p>
        <table-wrap position="float" id="table4">
          <label>Table 4</label>
          <caption>
            <p>Interrater agreement as well as the mean (SD) and median values for the study variables engagement and mood (ie, positive and negative affect) observed during robot-assisted group activities.<sup>a</sup></p>
          </caption>
          <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
            <col width="30"/>
            <col width="230"/>
            <col width="400"/>
            <col width="200"/>
            <col width="140"/>
            <thead>
              <tr valign="top">
                <td colspan="2">
                  <break/>
                </td>
                <td>Interrater agreement (intraclass correlation coefficient)</td>
                <td>Mean (SD)</td>
                <td>Median</td>
              </tr>
            </thead>
            <tbody>
              <tr valign="top">
                <td colspan="2">
                  <bold>Engagement</bold>
                </td>
                <td>0.831</td>
                <td>4.19 (0.47)</td>
                <td>4.0</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Direction of gaze</td>
                <td>0.661</td>
                <td>4.65 (0.49)</td>
                <td>5.0</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Posture and body expression</td>
                <td>0.883</td>
                <td>403 (0.71)</td>
                <td>4.0</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Activity</td>
                <td>0.811</td>
                <td>3.90 (0.65)</td>
                <td>4.0</td>
              </tr>
              <tr valign="top">
                <td colspan="2">
                  <bold>Positive affect</bold>
                </td>
                <td>0.902</td>
                <td>3.22 (0.55)</td>
                <td>3.2</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Interested</td>
                <td>0.842</td>
                <td>4.13 (0.56)</td>
                <td>4.0</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Excited</td>
                <td>0.825</td>
                <td>2.61 (0.92)</td>
                <td>3.0</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Enthusiastic</td>
                <td>0.840</td>
                <td>2.42 (1.03)</td>
                <td>2.0</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Proud</td>
                <td>0.680</td>
                <td>1.23 (0.43)</td>
                <td>1.0</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Alert</td>
                <td>0.750</td>
                <td>4.39 (0.67)</td>
                <td>4.0</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Inspired</td>
                <td>0.884</td>
                <td>3.87 (0.96)</td>
                <td>4.0</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Attentive</td>
                <td>0.901</td>
                <td>4.19 (1.05)</td>
                <td>4.0</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Determined</td>
                <td>0.766</td>
                <td>1.94 (0.77)</td>
                <td>2.0</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Active</td>
                <td>0.722</td>
                <td>4.16 (0.64)</td>
                <td>4.0</td>
              </tr>
              <tr valign="top">
                <td colspan="2">
                  <bold>Negative affect</bold>
                </td>
                <td>0.840</td>
                <td>1.13 (0.20)</td>
                <td>1.1</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Distressed</td>
                <td>0.842</td>
                <td>1.29 (0.59)</td>
                <td>1.0</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Upset</td>
                <td>0.768</td>
                <td>1.16 (0.52)</td>
                <td>1.0</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Scared</td>
                <td>0.659</td>
                <td>1.06 (0.43)</td>
                <td>1.0</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Hostile</td>
                <td>0.491</td>
                <td>1.10 (0.48)</td>
                <td>1.0</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Irritable</td>
                <td>0.649</td>
                <td>1.06 (0.25)</td>
                <td>1.0</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Ashamed</td>
                <td>0.365</td>
                <td>1.16 (0.37)</td>
                <td>1.0</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Nervous</td>
                <td>0.804</td>
                <td>1.29 (0.53)</td>
                <td>1.0</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Jittery</td>
                <td>0.665</td>
                <td>1.23 (0.43)</td>
                <td>1.0</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Afraid</td>
                <td>0.390</td>
                <td>1.13 (0.51)</td>
                <td>1.0</td>
              </tr>
            </tbody>
          </table>
          <table-wrap-foot>
            <fn id="table4fn1">
              <p><sup>a</sup>The items “strong” and “guilty” were not analyzed.</p>
            </fn>
          </table-wrap-foot>
        </table-wrap>
      </sec>
      <sec>
        <title>Additional Observations</title>
        <p>Although we did not collect this information systematically, we observed that more residents participated in the robot-assisted activity sessions than expected by the nursing home staff and the research team. The different types of robot-assisted exercises (ie, singing, storytelling, gymnastics) promoted a variety of cognitive and physical stimulations as would a human instructor. Further, when watching the video recordings, we noted that the nursing home staff took time to assist and support participants individually during the robot-assisted activity session. Conversations took place between the residents and the nursing staff, and it seemed that the robot conducting all the instructions allowed more time for personal care.</p>
      </sec>
    </sec>
    <sec sec-type="discussion">
      <title>Discussion</title>
      <p>In robot-assisted group activity sessions for older adults in nursing homes, their engagement and mood (ie, positive affect) can be regarded as preconditions to achieve the intended positive effects of physical and cognitive stimulation. Our observational pilot study in 4 nursing homes shows that residents actively engage in the leisure activities demonstrated and guided by a humanlike social robot. Overall, the engagement of the older adults in gymnastics exercises, singing with the robot, or listening to the robot telling stories was high. Engagement in the group activity was measured using 3 variables: direction of gaze, posture and body expression, and activity that the robot demonstrated. Almost all participants in the robot-assisted activity sessions kept their gaze directed toward the robot, and most had an active alert posture and actively imitated the movements demonstrated by the robot. We observed a positive mood in the groups during the robot-assisted activity sessions. Overall, the items measuring positive affect received high ratings, and the mood in the groups was mainly interested, alert, inspired, and attentive. The results of our study extend and complement existing laboratory studies as well as studies applied in areas other than the nursing home [<xref ref-type="bibr" rid="ref13">13</xref>] by systematically using observational data to gain a better understanding of the ways in which residents engage in and experience robot-assisted group activities. From a methodological point of view, the participatory observation with video recording provided new insights. The systematic coding of video clips using structured observation systems for both study variables allowed us to reliably show whether participants engage with a positive mood. Further, the observation system developed for this study complements existing instruments for measuring engagement and positive and negative affect by focusing on group level measures and the behavior toward a humanlike social robot in a group activity.</p>
      <p>In contrast, other instruments [<xref ref-type="bibr" rid="ref2">2</xref>,<xref ref-type="bibr" rid="ref27">27</xref>,<xref ref-type="bibr" rid="ref34">34</xref>] focus on engagement-related behavior, wherein older adults directly interacted in a one-on-one setting with the robot and not within a group activity. During a group activity, it is common for older adults, especially for those with physical limitations and early signs of dementia, to express behavior less consistently and clearly. By observing at group level, it was possible to assess the engagement and the mood of the individual in the situation and context of the group, and the sometimes subtle cues to emotion could be reliably detected by the trained raters. This is important as the technical recognition system still needs to be greatly improved [<xref ref-type="bibr" rid="ref36">36</xref>]. The added value of the instrument is that it allows for monitoring engagement and mood in a group of participants with limited self-report capabilities and thus broadens the insights gained with the existing instruments. In combination with the initial findings from other field studies [<xref ref-type="bibr" rid="ref13">13</xref>,<xref ref-type="bibr" rid="ref21">21</xref>,<xref ref-type="bibr" rid="ref23">23</xref>,<xref ref-type="bibr" rid="ref25">25</xref>,<xref ref-type="bibr" rid="ref43">43</xref>] specifically studying the fostering of well-being in a nursing home setting, our results show the potential of such activity sessions to make a valuable contribution. For example, a memory study program with a humanlike social robot for older adults for cognitive training in a nursing home showed positive trends [<xref ref-type="bibr" rid="ref44">44</xref>] and could be adapted for fun group activities.</p>
      <p>As limitations, the following aspects should be mentioned. First, participants attended the robot-assisted group activity sessions voluntarily and were generally informed about the content beforehand; so, when they joined the session, they may have had a positive attitude toward robots, which could therefore have introduced a bias toward a more positive affect. Moreover, although the reliability of the observation system could be shown with high ICC values for most items measuring affect, some mood items had to be excluded due to difficulties in distinguishing them through observation during short interactions in pretests and some items still have rather low ICC values (eg, ashamed, afraid). This indicates that negative affect was more difficult to assess, which needs to be reflected critically when interpreting our findings. This result matches the findings of a study that measured emotions of individuals with severe intellectual disabilities where positive emotions were also found to be more observable than negative ones [<xref ref-type="bibr" rid="ref45">45</xref>]. Thus, the investigation of negative affect while participating in robot-assisted activities might be an interesting focus of future studies. Second, we did not collect data as to whether the participants had mild or severe dementia. Although the analysis at the group level allowed consideration of situational factors and the constraints of the individuals were included by the raters, there may be differences in engagement and mood expression depending on the level of dementia as previous research shows [<xref ref-type="bibr" rid="ref46">46</xref>]. Third, each robot-assisted activity was only performed once per group. Thus, we were unable to assess sequence effects or analyze which activities are the most engaging or which activities might tire participants more quickly. Moreover, in this study, we did not have the opportunity to study engagement and mood over a long period of time. Future research is needed for this [<xref ref-type="bibr" rid="ref47">47</xref>]. Thus, novelty effects cannot be excluded. Interestingly, some studies [<xref ref-type="bibr" rid="ref23">23</xref>] over longer periods of time did not report an attractiveness loss of the robot, but they did mention loss of interest due to usability problems with the robots. The so-called novelty effect [<xref ref-type="bibr" rid="ref48">48</xref>] theoretically predicts “a decrease in the engagement with a stimulus after its initial novelty has worn off.” Usually, it is seen as a bias that has to be overcome (eg, by repeated interaction with the robot). An experiment with well-controlled repeated interactions showed that perceptions were positively influenced when participants interacted with the robot [<xref ref-type="bibr" rid="ref48">48</xref>] and reported that a consistently positive interaction was already determined in the first 2 minutes of the conversation with the robot and remained stable over the subsequent sessions. In contrast, perceived threat and discomfort were the dimensions that changed the most during the interactions and decreased until the last session of the experiment [<xref ref-type="bibr" rid="ref48">48</xref>]. With this in mind, we assume that the engagement and positive mood observed in the initial interaction as in our study are likely to be maintained in a relatively stable manner. Fourth, our pilot study investigates engagement and mood across different exercises within a robot-assisted activity session. In terms of effects on health, a larger study should assess which type of exercise receives the greatest engagement and positive affect, and it is of interest to continuously record the exercises. This allows for a more precise analysis of behavior during exercises, such as fatigue, and facilitates better comparisons of participation in the exercises among themselves. This knowledge would inform future software development and implementation of social robots. Finally, we did not investigate the practicability of the NAO robot for nursing staff and how a robot-assisted group activity can be implemented in a nursing home successfully. Research following a human-centered design approach [<xref ref-type="bibr" rid="ref49">49</xref>] and an improved understanding of sustainable integration of social robots in leisure activities of older adults during their care are crucial.</p>
      <p>Based on the positive findings of our study, questions arise about other application areas for robot-assisted group activities. Group sessions with social robots generate a form of enthusiasm, which is why they may be particularly suitable for group activities with vulnerable groups such as children, older adults, or people with disabilities. For all these potential target groups, interactions need to be designed in a way that results in maximum benefit and does no harm. In the context of the shortage of skilled nursing staff, social robots bring the potential to conduct a leisure group activity where caregivers do not need to be continuously present, thereby enabling older adults to be physically and cognitively engaged with less care effort and with fun. Moreover, if the social robot demonstrates exercises, nursing staff have more time for individual care as well as for personal conversations with the older adults. The literature shows that engagement and mood are prerequisites for health effects to be achieved [<xref ref-type="bibr" rid="ref26">26</xref>]. Although the generalizability of our results must be established by future research, we found that older adults engage in robot-assisted group activities and that most of them were in a good mood during the session—interested, alert, inspired, and attentive. Therefore, the positive results on engagement and mood provide clear indications that humanlike social robots can improve the cognitive and physical abilities in older adults. Compared to other technologies, robots with their ability to communicate in a humanlike manner have a special property of supporting individuals physically and psychologically. Further development of this new technology of social robots is thus worthwhile in terms of promoting the quality of life of older adults in nursing homes.</p>
    </sec>
  </body>
  <back>
    <app-group/>
    <glossary>
      <title>Abbreviations</title>
      <def-list>
        <def-item>
          <term id="abb1">ICC</term>
          <def>
            <p>intraclass correlation coefficient</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb2">PANAS</term>
          <def>
            <p>Positive and Negative Affect Schedule</p>
          </def>
        </def-item>
      </def-list>
    </glossary>
    <ack>
      <p>We would like to thank all the participants in this study and care professionals working in the nursing home facilities enrolled in our study. We would like to specially thank Gabriela Bohler from Smart Companion who provided the software for this study. This study was part of the strategic initiative “Robo-Lab FHNW” funded by the University of Applied Sciences and Arts Northwestern Switzerland (FHNW) from 2018 to 2020. The Swiss Innovation Agency Innosuisse provided additional funding for this study (Innovationsscheck 33808.1).</p>
    </ack>
    <fn-group>
      <fn fn-type="con">
        <p>All authors contributed to the study conception and design. AT and AU prepared the material, collected the data, and performed the analysis. AT, HS, and TM prepared the first draft of the manuscript. All authors read and approved the final paper.</p>
      </fn>
      <fn fn-type="conflict">
        <p>None declared.</p>
      </fn>
    </fn-group>
    <ref-list>
      <ref id="ref1">
        <label>1</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Gheysen</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Poppe</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>DeSmet</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Swinnen</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Cardon</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>De Bourdeaudhuij</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Chastin</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Fias</surname>
              <given-names>W</given-names>
            </name>
          </person-group>
          <article-title>Physical activity to improve cognition in older adults: can physical activity programs enriched with cognitive challenges enhance the effects? A systematic review and meta-analysis</article-title>
          <source>Int J Behav Nutr Phys Act</source>
          <year>2018</year>
          <month>07</month>
          <day>04</day>
          <volume>15</volume>
          <issue>1</issue>
          <fpage>63</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://ijbnpa.biomedcentral.com/articles/10.1186/s12966-018-0697-x"/>
          </comment>
          <pub-id pub-id-type="doi">10.1186/s12966-018-0697-x</pub-id>
          <pub-id pub-id-type="medline">29973193</pub-id>
          <pub-id pub-id-type="pii">10.1186/s12966-018-0697-x</pub-id>
          <pub-id pub-id-type="pmcid">PMC6032764</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref2">
        <label>2</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Anzalone</surname>
              <given-names>SM</given-names>
            </name>
            <name name-style="western">
              <surname>Boucenna</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Ivaldi</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Chetouani</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>Evaluating the engagement with social robots</article-title>
          <source>Int J of Soc Robotics</source>
          <year>2015</year>
          <month>4</month>
          <day>17</day>
          <volume>7</volume>
          <issue>4</issue>
          <fpage>465</fpage>
          <pub-id pub-id-type="doi">10.1007/s12369-015-0298-7</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref3">
        <label>3</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Broadbent</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Stafford</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>MacDonald</surname>
              <given-names>B</given-names>
            </name>
          </person-group>
          <article-title>Acceptance of healthcare robots for the older population: review and future directions</article-title>
          <source>Int J of Soc Robotics</source>
          <year>2009</year>
          <month>10</month>
          <day>3</day>
          <volume>1</volume>
          <issue>4</issue>
          <fpage>319</fpage>
          <lpage>330</lpage>
          <pub-id pub-id-type="doi">10.1007/s12369-009-0030-6</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref4">
        <label>4</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Broekens</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Heerink</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Rosendal</surname>
              <given-names>H</given-names>
            </name>
          </person-group>
          <article-title>Assistive social robots in elderly care: A review</article-title>
          <source>Gerontechnology</source>
          <year>2009</year>
          <volume>8</volume>
          <fpage>A</fpage>
          <pub-id pub-id-type="doi">10.4017/gt.2009.08.02.002.00</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref5">
        <label>5</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Abdi</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Al-Hindawi</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Ng</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Vizcaychipi</surname>
              <given-names>MP</given-names>
            </name>
          </person-group>
          <article-title>Scoping review on the use of socially assistive robot technology in elderly care</article-title>
          <source>BMJ Open</source>
          <year>2018</year>
          <month>02</month>
          <day>12</day>
          <volume>8</volume>
          <issue>2</issue>
          <fpage>e018815</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://bmjopen.bmj.com/lookup/pmidlookup?view=long&#38;pmid=29440212"/>
          </comment>
          <pub-id pub-id-type="doi">10.1136/bmjopen-2017-018815</pub-id>
          <pub-id pub-id-type="medline">29440212</pub-id>
          <pub-id pub-id-type="pii">bmjopen-2017-018815</pub-id>
          <pub-id pub-id-type="pmcid">PMC5829664</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref6">
        <label>6</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Vandemeulebroucke</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>de Casterlé</surname>
              <given-names>Bernadette Dierckx</given-names>
            </name>
            <name name-style="western">
              <surname>Gastmans</surname>
              <given-names>C</given-names>
            </name>
          </person-group>
          <article-title>How do older adults experience and perceive socially assistive robots in aged care: a systematic review of qualitative evidence</article-title>
          <source>Aging Ment Health</source>
          <year>2018</year>
          <month>02</month>
          <volume>22</volume>
          <issue>2</issue>
          <fpage>149</fpage>
          <lpage>167</lpage>
          <pub-id pub-id-type="doi">10.1080/13607863.2017.1286455</pub-id>
          <pub-id pub-id-type="medline">28282732</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref7">
        <label>7</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Pu</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Moyle</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Jones</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Todorovic</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>The effectiveness of social robots for older adults: a systematic review and meta-analysis of randomized controlled studies</article-title>
          <source>Gerontologist</source>
          <year>2019</year>
          <month>01</month>
          <day>09</day>
          <volume>59</volume>
          <issue>1</issue>
          <fpage>e37</fpage>
          <lpage>e51</lpage>
          <pub-id pub-id-type="doi">10.1093/geront/gny046</pub-id>
          <pub-id pub-id-type="medline">29897445</pub-id>
          <pub-id pub-id-type="pii">5036100</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref8">
        <label>8</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Scoglio</surname>
              <given-names>AA</given-names>
            </name>
            <name name-style="western">
              <surname>Reilly</surname>
              <given-names>ED</given-names>
            </name>
            <name name-style="western">
              <surname>Gorman</surname>
              <given-names>JA</given-names>
            </name>
            <name name-style="western">
              <surname>Drebing</surname>
              <given-names>CE</given-names>
            </name>
          </person-group>
          <article-title>Use of social robots in mental health and well-being research: systematic review</article-title>
          <source>J Med Internet Res</source>
          <year>2019</year>
          <month>07</month>
          <day>24</day>
          <volume>21</volume>
          <issue>7</issue>
          <fpage>e13322</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.jmir.org/2019/7/e13322/"/>
          </comment>
          <pub-id pub-id-type="doi">10.2196/13322</pub-id>
          <pub-id pub-id-type="medline">31342908</pub-id>
          <pub-id pub-id-type="pii">v21i7e13322</pub-id>
          <pub-id pub-id-type="pmcid">PMC6685125</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref9">
        <label>9</label>
        <nlm-citation citation-type="web">
          <source>PARO Therapeutic Robot</source>
          <access-date>2021-11-29</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="http://www.parorobots.com/">http://www.parorobots.com/</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref10">
        <label>10</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Henschel</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Laban</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Cross</surname>
              <given-names>ES</given-names>
            </name>
          </person-group>
          <article-title>What makes a robot social? a review of social robots from science fiction to a home or hospital near you</article-title>
          <source>Curr Robot Rep</source>
          <year>2021</year>
          <volume>2</volume>
          <issue>1</issue>
          <fpage>9</fpage>
          <lpage>19</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/34977592"/>
          </comment>
          <pub-id pub-id-type="doi">10.1007/s43154-020-00035-0</pub-id>
          <pub-id pub-id-type="medline">34977592</pub-id>
          <pub-id pub-id-type="pii">35</pub-id>
          <pub-id pub-id-type="pmcid">PMC7860159</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref11">
        <label>11</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Lorenz</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Weiss</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Hirche</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>Synchrony and reciprocity: key mechanisms for social companion robots in therapy and care</article-title>
          <source>Int J of Soc Robotics</source>
          <year>2015</year>
          <month>11</month>
          <day>2</day>
          <volume>8</volume>
          <issue>1</issue>
          <fpage>125</fpage>
          <lpage>143</lpage>
          <pub-id pub-id-type="doi">10.1007/s12369-015-0325-8</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref12">
        <label>12</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Valentí Soler</surname>
              <given-names>Meritxell</given-names>
            </name>
            <name name-style="western">
              <surname>Agüera-Ortiz</surname>
              <given-names>Luis</given-names>
            </name>
            <name name-style="western">
              <surname>Olazarán Rodríguez</surname>
              <given-names>Javier</given-names>
            </name>
            <name name-style="western">
              <surname>Mendoza Rebolledo</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Pérez Muñoz</surname>
              <given-names>Almudena</given-names>
            </name>
            <name name-style="western">
              <surname>Rodríguez Pérez</surname>
              <given-names>Irene</given-names>
            </name>
            <name name-style="western">
              <surname>Osa Ruiz</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Barrios Sánchez</surname>
              <given-names>Ana</given-names>
            </name>
            <name name-style="western">
              <surname>Herrero Cano</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Carrasco Chillón</surname>
              <given-names>Laura</given-names>
            </name>
            <name name-style="western">
              <surname>Felipe Ruiz</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>López Alvarez</surname>
              <given-names>Jorge</given-names>
            </name>
            <name name-style="western">
              <surname>León Salas</surname>
              <given-names>Beatriz</given-names>
            </name>
            <name name-style="western">
              <surname>Cañas Plaza</surname>
              <given-names>José M</given-names>
            </name>
            <name name-style="western">
              <surname>Martín Rico</surname>
              <given-names>Francisco</given-names>
            </name>
            <name name-style="western">
              <surname>Abella Dago</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Martínez Martín</surname>
              <given-names>Pablo</given-names>
            </name>
          </person-group>
          <article-title>Social robots in advanced dementia</article-title>
          <source>Front Aging Neurosci</source>
          <year>2015</year>
          <volume>7</volume>
          <fpage>133</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/26388764"/>
          </comment>
          <pub-id pub-id-type="doi">10.3389/fnagi.2015.00133</pub-id>
          <pub-id pub-id-type="medline">26388764</pub-id>
          <pub-id pub-id-type="pmcid">PMC4558428</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref13">
        <label>13</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Chu</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Khosla</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Khaksar</surname>
              <given-names>SMS</given-names>
            </name>
            <name name-style="western">
              <surname>Nguyen</surname>
              <given-names>K</given-names>
            </name>
          </person-group>
          <article-title>Service innovation through social robot engagement to improve dementia care quality</article-title>
          <source>Assist Technol</source>
          <year>2017</year>
          <volume>29</volume>
          <issue>1</issue>
          <fpage>8</fpage>
          <lpage>18</lpage>
          <pub-id pub-id-type="doi">10.1080/10400435.2016.1171807</pub-id>
          <pub-id pub-id-type="medline">27064692</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref14">
        <label>14</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Pandey</surname>
              <given-names>AK</given-names>
            </name>
            <name name-style="western">
              <surname>Gelin</surname>
              <given-names>R</given-names>
            </name>
          </person-group>
          <article-title>A mass-produced sociable humanoid robot: Pepper-the first machine of its kind</article-title>
          <source>IEEE Robot. Automat. Mag</source>
          <year>2018</year>
          <month>9</month>
          <volume>25</volume>
          <issue>3</issue>
          <fpage>40</fpage>
          <lpage>48</lpage>
          <pub-id pub-id-type="doi">10.1109/mra.2018.2833157</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref15">
        <label>15</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Costa</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Steffgen</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Rodríguez</surname>
              <given-names>LF</given-names>
            </name>
            <name name-style="western">
              <surname>Nazarikhorram</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Ziafati</surname>
              <given-names>P</given-names>
            </name>
          </person-group>
          <article-title>Socially assistive robots for teaching emotional abilities to children with autism spectrum disorder</article-title>
          <year>2017</year>
          <conf-name>Conference on Human-Robot Interaction (HRI2017)</conf-name>
          <conf-date>March 6-9</conf-date>
          <conf-loc>Vienna</conf-loc>
        </nlm-citation>
      </ref>
      <ref id="ref16">
        <label>16</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Dinesen</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Hansen</surname>
              <given-names>HK</given-names>
            </name>
            <name name-style="western">
              <surname>Grønborg</surname>
              <given-names>Gry Bruun</given-names>
            </name>
            <name name-style="western">
              <surname>Dyrvig</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Leisted</surname>
              <given-names>SD</given-names>
            </name>
            <name name-style="western">
              <surname>Stenstrup</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Skov Schacksen</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Oestergaard</surname>
              <given-names>C</given-names>
            </name>
          </person-group>
          <article-title>Use of a social robot (LOVOT) for persons with dementia: exploratory study</article-title>
          <source>JMIR Rehabil Assist Technol</source>
          <year>2022</year>
          <month>08</month>
          <day>01</day>
          <volume>9</volume>
          <issue>3</issue>
          <fpage>e36505</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://rehab.jmir.org/2022/3/e36505/"/>
          </comment>
          <pub-id pub-id-type="doi">10.2196/36505</pub-id>
          <pub-id pub-id-type="medline">35916689</pub-id>
          <pub-id pub-id-type="pii">v9i3e36505</pub-id>
          <pub-id pub-id-type="pmcid">PMC9379791</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref17">
        <label>17</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>van Dam</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Gielissen</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Reijnders</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>van der Poel</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Boon</surname>
              <given-names>B</given-names>
            </name>
          </person-group>
          <article-title>Experiences of persons with executive dysfunction in disability care using a social robot to execute daily tasks and increase the feeling of independence: multiple-case study</article-title>
          <source>JMIR Rehabil Assist Technol</source>
          <year>2022</year>
          <month>11</month>
          <day>03</day>
          <volume>9</volume>
          <issue>4</issue>
          <fpage>e41313</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://rehab.jmir.org/2022/4/e41313/"/>
          </comment>
          <pub-id pub-id-type="doi">10.2196/41313</pub-id>
          <pub-id pub-id-type="medline">36326800</pub-id>
          <pub-id pub-id-type="pii">v9i4e41313</pub-id>
          <pub-id pub-id-type="pmcid">PMC9672999</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref18">
        <label>18</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Randall</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Kamino</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Joshi</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Chen</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Hsu</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Tsui</surname>
              <given-names>KM</given-names>
            </name>
            <name name-style="western">
              <surname>Šabanović</surname>
              <given-names>Selma</given-names>
            </name>
          </person-group>
          <article-title>Understanding the connection among Ikigai, well-being, and home robot acceptance in Japanese older adults: mixed methods study</article-title>
          <source>JMIR Aging</source>
          <year>2023</year>
          <month>10</month>
          <day>04</day>
          <volume>6</volume>
          <fpage>e45442</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://aging.jmir.org/2023//e45442/"/>
          </comment>
          <pub-id pub-id-type="doi">10.2196/45442</pub-id>
          <pub-id pub-id-type="medline">37792460</pub-id>
          <pub-id pub-id-type="pii">v6i1e45442</pub-id>
          <pub-id pub-id-type="pmcid">PMC10585446</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref19">
        <label>19</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Crowell</surname>
              <given-names>CR</given-names>
            </name>
            <name name-style="western">
              <surname>Deska</surname>
              <given-names>JC</given-names>
            </name>
            <name name-style="western">
              <surname>Villano</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Zenk</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Roddy</surname>
              <given-names>John T</given-names>
            </name>
          </person-group>
          <article-title>Anthropomorphism of robots: study of appearance and agency</article-title>
          <source>JMIR Hum Factors</source>
          <year>2019</year>
          <month>05</month>
          <day>10</day>
          <volume>6</volume>
          <issue>2</issue>
          <fpage>e12629</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://humanfactors.jmir.org/2019/2/e12629/"/>
          </comment>
          <pub-id pub-id-type="doi">10.2196/12629</pub-id>
          <pub-id pub-id-type="medline">31094323</pub-id>
          <pub-id pub-id-type="pii">v6i2e12629</pub-id>
          <pub-id pub-id-type="pmcid">PMC6533876</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref20">
        <label>20</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Jones</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Sung</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Moyle</surname>
              <given-names>W</given-names>
            </name>
          </person-group>
          <article-title>Assessing engagement in people with dementia: a new approach to assessment using video analysis</article-title>
          <source>Arch Psychiatr Nurs</source>
          <year>2015</year>
          <month>12</month>
          <volume>29</volume>
          <issue>6</issue>
          <fpage>377</fpage>
          <lpage>82</lpage>
          <pub-id pub-id-type="doi">10.1016/j.apnu.2015.06.019</pub-id>
          <pub-id pub-id-type="medline">26577550</pub-id>
          <pub-id pub-id-type="pii">S0883-9417(15)00195-8</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref21">
        <label>21</label>
        <nlm-citation citation-type="web">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Bäck</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Mäkelä</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Kallio</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Robot-guided exercise program for the rehabilitation of older nursing home residents</article-title>
          <source>Annals of Long-Term Care</source>
          <access-date>2021-08-18</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://researchportal.tuni.fi/en/publications/robot-guided-exercise-program-for-the-rehabilitation-of-older-nur">https://researchportal.tuni.fi/en/publications/robot-guided-exercise-program-for-the-rehabilitation-of-older-nur</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref22">
        <label>22</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Cruz-Sandoval</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Morales-Tellez</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Sandoval</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Favela</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>A social robot as therapy facilitator in interventions to deal with dementia-related behavioral symptoms</article-title>
          <year>2020</year>
          <conf-name>ACM/IEEE International Conference on Human-Robot Interaction (HRI2020)</conf-name>
          <conf-date>March 23-26</conf-date>
          <conf-loc>Cambridge</conf-loc>
          <pub-id pub-id-type="doi">10.1145/3319502.3374840</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref23">
        <label>23</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Huisman</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Kort</surname>
              <given-names>H</given-names>
            </name>
          </person-group>
          <article-title>Two-year use of care robot Zora in Dutch nursing homes: an evaluation study</article-title>
          <source>Healthcare (Basel)</source>
          <year>2019</year>
          <month>02</month>
          <day>19</day>
          <volume>7</volume>
          <issue>1</issue>
          <fpage>31</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=healthcare7010031"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/healthcare7010031</pub-id>
          <pub-id pub-id-type="medline">30791489</pub-id>
          <pub-id pub-id-type="pii">healthcare7010031</pub-id>
          <pub-id pub-id-type="pmcid">PMC6473570</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref24">
        <label>24</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Nomura</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Kanda</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Yamada</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Suzuki</surname>
              <given-names>T</given-names>
            </name>
          </person-group>
          <article-title>The effects of assistive walking robots for health care support on older persons: a preliminary field experiment in an elder care facility</article-title>
          <source>Intel Serv Robotics</source>
          <year>2021</year>
          <month>01</month>
          <day>10</day>
          <volume>14</volume>
          <issue>1</issue>
          <fpage>25</fpage>
          <lpage>32</lpage>
          <pub-id pub-id-type="doi">10.1007/s11370-020-00345-4</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref25">
        <label>25</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Martín</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Agüero</surname>
              <given-names>CE</given-names>
            </name>
            <name name-style="western">
              <surname>Cañas</surname>
              <given-names>JM</given-names>
            </name>
            <name name-style="western">
              <surname>Valenti</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Martínez-Martín</surname>
              <given-names>P</given-names>
            </name>
          </person-group>
          <article-title>Robotherapy with dementia patients</article-title>
          <source>International Journal of Advanced Robotic Systems</source>
          <year>2013</year>
          <month>01</month>
          <day>01</day>
          <volume>10</volume>
          <issue>1</issue>
          <fpage>10</fpage>
          <pub-id pub-id-type="doi">10.5772/54765</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref26">
        <label>26</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kolanowski</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Buettner</surname>
              <given-names>Linda</given-names>
            </name>
            <name name-style="western">
              <surname>Litaker</surname>
              <given-names>Mark</given-names>
            </name>
            <name name-style="western">
              <surname>Yu</surname>
              <given-names>Fang</given-names>
            </name>
          </person-group>
          <article-title>Factors that relate to activity engagement in nursing home residents</article-title>
          <source>Am J Alzheimers Dis Other Demen</source>
          <year>2006</year>
          <volume>21</volume>
          <issue>1</issue>
          <fpage>15</fpage>
          <lpage>22</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://journals.sagepub.com/doi/10.1177/153331750602100109?url_ver=Z39.88-2003&#38;rfr_id=ori:rid:crossref.org&#38;rfr_dat=cr_pub  0pubmed"/>
          </comment>
          <pub-id pub-id-type="doi">10.1177/153331750602100109</pub-id>
          <pub-id pub-id-type="medline">16526585</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref27">
        <label>27</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Perugia</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>van Berkel</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Díaz-Boladeras</surname>
              <given-names>Marta</given-names>
            </name>
            <name name-style="western">
              <surname>Català-Mallofré</surname>
              <given-names>Andreu</given-names>
            </name>
            <name name-style="western">
              <surname>Rauterberg</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Barakova</surname>
              <given-names>E</given-names>
            </name>
          </person-group>
          <article-title>Understanding engagement in dementia through behavior: the Ethographic and Laban-inspired coding system of engagement (ELICSE) and the evidence-based model of engagement-related behavior (EMODEB)</article-title>
          <source>Front Psychol</source>
          <year>2018</year>
          <volume>9</volume>
          <fpage>2</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/29881360"/>
          </comment>
          <pub-id pub-id-type="doi">10.3389/fpsyg.2018.00690</pub-id>
          <pub-id pub-id-type="medline">29881360</pub-id>
          <pub-id pub-id-type="pmcid">PMC5976786</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref28">
        <label>28</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Belpaeme</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Kennedy</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Ramachandran</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Scassellati</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Tanaka</surname>
              <given-names>F</given-names>
            </name>
          </person-group>
          <article-title>Social robots for education: A review</article-title>
          <source>Sci Robot</source>
          <year>2018</year>
          <month>08</month>
          <day>15</day>
          <volume>3</volume>
          <issue>21</issue>
          <fpage>eaat5954</fpage>
          <pub-id pub-id-type="doi">10.1126/scirobotics.aat5954</pub-id>
          <pub-id pub-id-type="medline">33141719</pub-id>
          <pub-id pub-id-type="pii">3/21/eaat5954</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref29">
        <label>29</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Cohen-Mansfield</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Marx</surname>
              <given-names>MS</given-names>
            </name>
            <name name-style="western">
              <surname>Freedman</surname>
              <given-names>LS</given-names>
            </name>
            <name name-style="western">
              <surname>Murad</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Regier</surname>
              <given-names>NG</given-names>
            </name>
            <name name-style="western">
              <surname>Thein</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Dakheel-Ali</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>The comprehensive process model of engagement</article-title>
          <source>The American Journal of Geriatric Psychiatry</source>
          <year>2011</year>
          <month>10</month>
          <volume>19</volume>
          <issue>10</issue>
          <fpage>859</fpage>
          <lpage>870</lpage>
          <pub-id pub-id-type="doi">10.1097/jgp.0b013e318202bf5b</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref30">
        <label>30</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Serholt</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Barendregt</surname>
              <given-names>W</given-names>
            </name>
          </person-group>
          <article-title>Robots tutoring children: longitudinal evaluation of social engagement in child-robot interaction</article-title>
          <year>2016</year>
          <conf-name>9th Nordic Conference on Human-Computer Interaction</conf-name>
          <conf-date>October 23-27</conf-date>
          <conf-loc>Gothenburg</conf-loc>
          <pub-id pub-id-type="doi">10.1145/2971485.2971536</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref31">
        <label>31</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Ahmad</surname>
              <given-names>MI</given-names>
            </name>
            <name name-style="western">
              <surname>Mubin</surname>
              <given-names>O</given-names>
            </name>
            <name name-style="western">
              <surname>Orlando</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Adaptive social robot for sustaining social engagement during long-term children–robot interaction</article-title>
          <source>International Journal of Human–Computer Interaction</source>
          <year>2017</year>
          <month>03</month>
          <day>03</day>
          <volume>33</volume>
          <issue>12</issue>
          <fpage>943</fpage>
          <lpage>962</lpage>
          <pub-id pub-id-type="doi">10.1080/10447318.2017.1300750</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref32">
        <label>32</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Simut</surname>
              <given-names>RE</given-names>
            </name>
            <name name-style="western">
              <surname>Vanderfaeillie</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Peca</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Van de Perre</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Vanderborght</surname>
              <given-names>B</given-names>
            </name>
          </person-group>
          <article-title>Children with autism spectrum disorders make a fruit salad with PROBO, the social robot: an interaction study</article-title>
          <source>J Autism Dev Disord</source>
          <year>2016</year>
          <month>01</month>
          <volume>46</volume>
          <issue>1</issue>
          <fpage>113</fpage>
          <lpage>126</lpage>
          <pub-id pub-id-type="doi">10.1007/s10803-015-2556-9</pub-id>
          <pub-id pub-id-type="medline">26304031</pub-id>
          <pub-id pub-id-type="pii">10.1007/s10803-015-2556-9</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref33">
        <label>33</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Shahid</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Krahmer</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Swerts</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>Child–robot interaction across cultures: How does playing a game with a social robot compare to playing a game alone or with a friend?</article-title>
          <source>Computers in Human Behavior</source>
          <year>2014</year>
          <month>11</month>
          <volume>40</volume>
          <fpage>86</fpage>
          <lpage>100</lpage>
          <pub-id pub-id-type="doi">10.1016/j.chb.2014.07.043</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref34">
        <label>34</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Cohen-Mansfield</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Dakheel-Ali</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Marx</surname>
              <given-names>MS</given-names>
            </name>
          </person-group>
          <article-title>Engagement in persons with dementia: the concept and its measurement</article-title>
          <source>The American Journal of Geriatric Psychiatry</source>
          <year>2009</year>
          <month>4</month>
          <volume>17</volume>
          <issue>4</issue>
          <fpage>299</fpage>
          <lpage>307</lpage>
          <pub-id pub-id-type="doi">10.1097/jgp.0b013e31818f3a52</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref35">
        <label>35</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Watson</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Clark</surname>
              <given-names>LA</given-names>
            </name>
            <name name-style="western">
              <surname>Tellegen</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Development and validation of brief measures of positive and negative affect: The PANAS scales</article-title>
          <source>Journal of Personality and Social Psychology</source>
          <year>1988</year>
          <volume>54</volume>
          <issue>6</issue>
          <fpage>1063</fpage>
          <lpage>1070</lpage>
          <pub-id pub-id-type="doi">10.1037/0022-3514.54.6.1063</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref36">
        <label>36</label>
        <nlm-citation citation-type="web">
          <article-title>YEOSUITE software</article-title>
          <source>Avatarion Technology AG</source>
          <access-date>2021-09-20</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.avatarion.ch/de/yeosuite/">https://www.avatarion.ch/de/yeosuite/</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref37">
        <label>37</label>
        <nlm-citation citation-type="web">
          <article-title>Social robots 4 you</article-title>
          <source>Smart Companion</source>
          <access-date>2021-08-21</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.smart-companion.ch/">https://www.smart-companion.ch/</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref38">
        <label>38</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Gillespie</surname>
              <given-names>R</given-names>
            </name>
          </person-group>
          <source>Manufacturing Knowledge: A History of the Hawthorne Experiments (Studies in Economic History and Policy: USA in the Twentieth Century)</source>
          <year>1993</year>
          <publisher-loc>Cambridge, UK</publisher-loc>
          <publisher-name>Cambridge University Press</publisher-name>
        </nlm-citation>
      </ref>
      <ref id="ref39">
        <label>39</label>
        <nlm-citation citation-type="web">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Ehrhardt</surname>
              <given-names>KJ</given-names>
            </name>
            <name name-style="western">
              <surname>Findeisen</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Marinello</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Reinartz-Wenzel</surname>
              <given-names>H</given-names>
            </name>
          </person-group>
          <article-title>Systematische verhaltensbeobachtung von aufmerksamkeit im unterricht: zur prüfung von objektivität und zuverlässigkeit (Systematic behavior observation of attention in class: Proof of objectivity and reliability)</article-title>
          <source>Fachportal-Paedagogik</source>
          <year>1981</year>
          <access-date>2023-11-29</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.fachportal-paedagogik.de/literatur/vollanzeige.html?FId=143768">https://www.fachportal-paedagogik.de/literatur/vollanzeige.html?FId=143768</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref40">
        <label>40</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Breyer</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Bluemke</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>Deutsche version der positive and negative affect schedule PANAS (GESIS panel)</article-title>
          <source>Zusammenstellung sozialwissenschaftlicher Items und Skalen (ZIS)</source>
          <year>2014</year>
          <fpage>1</fpage>
          <lpage>23</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.gesis.org/"/>
          </comment>
          <pub-id pub-id-type="doi">10.6102/zis242</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref41">
        <label>41</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Reisenzein</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Junge</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Studtmann</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Huber</surname>
              <given-names>O</given-names>
            </name>
          </person-group>
          <article-title>Observational approaches to the measurement of emotions</article-title>
          <source>International Handbook of Emotions in Education</source>
          <year>2013</year>
          <publisher-loc>New York</publisher-loc>
          <publisher-name>Routledge Chapman &#38; Hall</publisher-name>
          <fpage>580</fpage>
          <lpage>606</lpage>
        </nlm-citation>
      </ref>
      <ref id="ref42">
        <label>42</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Landis</surname>
              <given-names>JR</given-names>
            </name>
            <name name-style="western">
              <surname>Koch</surname>
              <given-names>GG</given-names>
            </name>
          </person-group>
          <article-title>The measurement of observer agreement for categorical data</article-title>
          <source>Biometrics</source>
          <year>1977</year>
          <month>03</month>
          <volume>33</volume>
          <issue>1</issue>
          <fpage>159</fpage>
          <pub-id pub-id-type="doi">10.2307/2529310</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref43">
        <label>43</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Cruz-Sandoval</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Penaloza</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Favela</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Castro-Coronel</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Towards social robots that support exercise therapies for persons with dementia</article-title>
          <year>2018</year>
          <conf-name>Proceedings of the 2018 ACM International Joint Conference and 2018 International Symposium on Pervasive and Ubiquitous Computing and Wearable Computers</conf-name>
          <conf-date>October 8-12</conf-date>
          <conf-loc>Singapore</conf-loc>
          <pub-id pub-id-type="doi">10.1145/3267305.3267539</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref44">
        <label>44</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Pino</surname>
              <given-names>O</given-names>
            </name>
            <name name-style="western">
              <surname>Palestra</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Trevino</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>De Carolis</surname>
              <given-names>B</given-names>
            </name>
          </person-group>
          <article-title>The humanoid robot NAO as trainer in a memory program for elderly people with mild cognitive impairment</article-title>
          <source>Int J Soc Robotics</source>
          <year>2019</year>
          <month>2</month>
          <day>27</day>
          <volume>12</volume>
          <issue>1</issue>
          <fpage>21</fpage>
          <lpage>33</lpage>
          <pub-id pub-id-type="doi">10.1007/s12369-019-00533-y</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref45">
        <label>45</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Vos</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>De Cock</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Petry</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Van Den Noortgate</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Maes</surname>
              <given-names>B</given-names>
            </name>
          </person-group>
          <article-title>Investigating the relationship between observed mood and emotions in people with severe and profound intellectual disabilities</article-title>
          <source>J Intellect Disabil Res</source>
          <year>2013</year>
          <month>05</month>
          <volume>57</volume>
          <issue>5</issue>
          <fpage>440</fpage>
          <lpage>51</lpage>
          <pub-id pub-id-type="doi">10.1111/jir.12021</pub-id>
          <pub-id pub-id-type="medline">23419208</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref46">
        <label>46</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Obayashi</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Kodate</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Masuyama</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>Measuring the impact of age, gender and dementia on communication-robot interventions in residential care homes</article-title>
          <source>Geriatr Gerontol Int</source>
          <year>2020</year>
          <month>04</month>
          <volume>20</volume>
          <issue>4</issue>
          <fpage>373</fpage>
          <lpage>378</lpage>
          <pub-id pub-id-type="doi">10.1111/ggi.13890</pub-id>
          <pub-id pub-id-type="medline">32077237</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref47">
        <label>47</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Mahmoudi Asl</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Molinari Ulate</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Franco Martin</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>van der Roest</surname>
              <given-names>H</given-names>
            </name>
          </person-group>
          <article-title>Methodologies used to study the feasibility, usability, efficacy, and effectiveness of social robots for elderly adults: scoping review</article-title>
          <source>J Med Internet Res</source>
          <year>2022</year>
          <month>08</month>
          <day>01</day>
          <volume>24</volume>
          <issue>8</issue>
          <fpage>e37434</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.jmir.org/2022/8/e37434/"/>
          </comment>
          <pub-id pub-id-type="doi">10.2196/37434</pub-id>
          <pub-id pub-id-type="medline">35916695</pub-id>
          <pub-id pub-id-type="pii">v24i8e37434</pub-id>
          <pub-id pub-id-type="pmcid">PMC9379790</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref48">
        <label>48</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Paetzel</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Perugia</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Castellano</surname>
              <given-names>G</given-names>
            </name>
          </person-group>
          <article-title>The persistence of first impressions: the effect of repeated interactions on the perception of a social robot</article-title>
          <year>2020</year>
          <conf-name>15th ACM/IEEE International Conference on Human-Robot Interaction (HRI)</conf-name>
          <conf-date>March 23-26</conf-date>
          <conf-loc>Cambridge, United Kingdom</conf-loc>
          <pub-id pub-id-type="doi">10.1145/3319502.3374786</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref49">
        <label>49</label>
        <nlm-citation citation-type="web">
          <article-title>Ergonomics of human-system interaction. Part 210: Human-centred design for interactive systems</article-title>
          <source>ISO</source>
          <access-date>2021-09-21</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.iso.org/cms/render/live/en/sites/isoorg/contents/data/standard/07/75/77520.html">https://www.iso.org/cms/render/live/en/sites/isoorg/contents/data/standard/07/75/77520.html</ext-link>
          </comment>
        </nlm-citation>
      </ref>
    </ref-list>
  </back>
</article>
