2020
|
| Laura Gonzalez; Salam Daher; Greg Welch Neurological Assessment Using a Physical-Virtual Patient (PVP) Journal Article In: Simulation & Gaming, pp. 1–17, 2020. @article{Gonzalez2020aa,
title = {Neurological Assessment Using a Physical-Virtual Patient (PVP)},
author = {Laura Gonzalez and Salam Daher and Greg Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2020/08/Gonzalez2020aa.pdf},
year = {2020},
date = {2020-08-12},
journal = {Simulation & Gaming},
pages = {1--17},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
|
| Austin Erickson; Nahal Norouzi; Kangsoo Kim; Ryan Schubert; Jonathan Jules; Joseph J. LaViola Jr.; Gerd Bruder; Gregory F. Welch Sharing gaze rays for visual target identification tasks in collaborative augmented reality Journal Article In: Journal on Multimodal User Interfaces: Special Issue on Multimodal Interfaces and Communication Cues for Remote Collaboration, vol. 14, no. 4, pp. 353-371, 2020, ISSN: 1783-8738. @article{EricksonNorouzi2020,
title = {Sharing gaze rays for visual target identification tasks in collaborative augmented reality},
author = {Austin Erickson and Nahal Norouzi and Kangsoo Kim and Ryan Schubert and Jonathan Jules and Joseph J. LaViola Jr. and Gerd Bruder and Gregory F. Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2020/07/Erickson2020_Article_SharingGazeRaysForVisualTarget.pdf},
doi = {https://doi.org/10.1007/s12193-020-00330-2},
issn = {1783-8738},
year = {2020},
date = {2020-07-09},
urldate = {2020-07-09},
journal = {Journal on Multimodal User Interfaces: Special Issue on Multimodal Interfaces and Communication Cues for Remote Collaboration},
volume = {14},
number = {4},
pages = {353-371},
abstract = {Augmented reality (AR) technologies provide a shared platform for users to collaborate in a physical context involving both real and virtual content. To enhance the quality of interaction between AR users, researchers have proposed augmenting users’ interpersonal space with embodied cues such as their gaze direction. While beneficial in achieving improved interpersonal spatial communication, such shared gaze environments suffer from multiple types of errors related to eye tracking and networking, that can reduce objective performance and subjective experience. In this paper, we present a human-subjects study to understand the impact of accuracy, precision, latency, and dropout based errors on users’ performance when using shared gaze cues to identify a target among a crowd of people. We simulated varying amounts of errors and the target distances and measured participants’ objective performance through their response time and error rate, and their subjective experience and cognitive load through questionnaires. We found significant differences suggesting that the simulated error levels had stronger effects on participants’ performance than target distance with accuracy and latency having a high impact on participants’ error rate. We also observed that participants assessed their own performance as lower than it objectively was. We discuss implications for practical shared gaze applications and we present a multi-user prototype system.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Augmented reality (AR) technologies provide a shared platform for users to collaborate in a physical context involving both real and virtual content. To enhance the quality of interaction between AR users, researchers have proposed augmenting users’ interpersonal space with embodied cues such as their gaze direction. While beneficial in achieving improved interpersonal spatial communication, such shared gaze environments suffer from multiple types of errors related to eye tracking and networking, that can reduce objective performance and subjective experience. In this paper, we present a human-subjects study to understand the impact of accuracy, precision, latency, and dropout based errors on users’ performance when using shared gaze cues to identify a target among a crowd of people. We simulated varying amounts of errors and the target distances and measured participants’ objective performance through their response time and error rate, and their subjective experience and cognitive load through questionnaires. We found significant differences suggesting that the simulated error levels had stronger effects on participants’ performance than target distance with accuracy and latency having a high impact on participants’ error rate. We also observed that participants assessed their own performance as lower than it objectively was. We discuss implications for practical shared gaze applications and we present a multi-user prototype system. |
| Arup Kumar Ghosh; Charles E. Hughes; Pamela J. Wisniewski Circle of Trust: A New Approach to Mobile Online Safety for Teens and Parents Proceedings Article In: Proceedings of CHI Conference on Human Factors in Computing Systems, pp. 618:1-14, 2020. @inproceedings{Ghosh2020cot,
title = {Circle of Trust: A New Approach to Mobile Online Safety for Teens and Parents},
author = {Arup Kumar Ghosh and Charles E. Hughes and Pamela J. Wisniewski },
doi = {10.1145/3313831.3376747},
year = {2020},
date = {2020-04-25},
booktitle = {Proceedings of CHI Conference on Human Factors in Computing Systems},
pages = {618:1-14},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
|
| Salam Daher; Jason Hochreiter; Ryan Schubert; Laura Gonzalez; Juan Cendan; Mindi Anderson; Desiree A Diaz; Gregory F. Welch The Physical-Virtual Patient Simulator: A Physical Human Form with Virtual Appearance and Behavior Journal Article In: Simulation in Healthcare, vol. 15, no. 2, pp. 115–121, 2020, (see erratum at DOI: 10.1097/SIH.0000000000000481). @article{Daher2020aa,
title = {The Physical-Virtual Patient Simulator: A Physical Human Form with Virtual Appearance and Behavior},
author = {Salam Daher and Jason Hochreiter and Ryan Schubert and Laura Gonzalez and Juan Cendan and Mindi Anderson and Desiree A Diaz and Gregory F. Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2020/06/Daher2020aa1.pdf
https://journals.lww.com/simulationinhealthcare/Fulltext/2020/04000/The_Physical_Virtual_Patient_Simulator__A_Physical.9.aspx
https://journals.lww.com/simulationinhealthcare/Fulltext/2020/06000/Erratum_to_the_Physical_Virtual_Patient_Simulator_.12.aspx},
doi = {10.1097/SIH.0000000000000409},
year = {2020},
date = {2020-04-01},
journal = {Simulation in Healthcare},
volume = {15},
number = {2},
pages = {115--121},
note = {see erratum at DOI: 10.1097/SIH.0000000000000481},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
|
| Zubin Choudhary; Kangsoo Kim; Ryan Schubert; Gerd Bruder; Gregory F. Welch Virtual Big Heads: Analysis of Human Perception and Comfort of Head Scales in Social Virtual Reality Proceedings Article In: Proceedings of the IEEE Conference on Virtual Reality and 3D User Interfaces (IEEE VR), pp. 425-433, Atlanta, Georgia, 2020. @inproceedings{Choudhary2020vbh,
title = {Virtual Big Heads: Analysis of Human Perception and Comfort of Head Scales in Social Virtual Reality},
author = {Zubin Choudhary and Kangsoo Kim and Ryan Schubert and Gerd Bruder and Gregory F. Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2020/02/IEEEVR2020_BigHead.pdf
https://www.youtube.com/watch?v=14289nufYf0, YouTube Presentation},
doi = {10.1109/VR46266.2020.00-41},
year = {2020},
date = {2020-03-23},
booktitle = {Proceedings of the IEEE Conference on Virtual Reality and 3D User Interfaces (IEEE VR)},
pages = {425-433},
address = {Atlanta, Georgia},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
|
| Austin Erickson; Gerd Bruder; Pamela J. Wisniewski; Greg Welch Examining Whether Secondary Effects of Temperature-Associated Virtual Stimuli Influence Subjective Perception of Duration Proceedings Article In: Proceedings of IEEE International Conference on Virtual Reality and 3D User Interfaces (IEEE VR), pp. 493-499, Atlanta, Georgia, 2020. @inproceedings{Erickson2020b,
title = {Examining Whether Secondary Effects of Temperature-Associated Virtual Stimuli Influence Subjective Perception of Duration},
author = {Austin Erickson and Gerd Bruder and Pamela J. Wisniewski and Greg Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2020/02/TimePerception_VR2020.pdf
https://www.youtube.com/watch?v=kG2M-cbjS3s&t=1s, YouTube Presentation},
doi = {10.1109/VR46266.2020.00-34},
year = {2020},
date = {2020-03-23},
urldate = {2020-03-23},
booktitle = {Proceedings of IEEE International Conference on Virtual Reality and 3D User Interfaces (IEEE VR)},
pages = {493-499},
address = {Atlanta, Georgia},
abstract = {Past work in augmented reality has shown that temperature-associated AR stimuli can induce warming and cooling sensations in the user, and prior work in psychology suggests that a person's body temperature can influence that person's sense of subjective perception of duration. In this paper, we present a user study to evaluate the relationship between temperature-associated virtual stimuli presented on an AR-HMD and the user's sense of subjective perception of duration and temperature. In particular, we investigate two independent variables: the apparent temperature of the virtual stimuli presented to the participant, which could be hot or cold, and the location of the stimuli, which could be in direct contact with the user, in indirect contact with the user, or both in direct and indirect contact simultaneously. We investigate how these variables affect the users' perception of duration and perception of body and environment temperature by having participants make prospective time estimations while observing the virtual stimulus and answering subjective questions regarding their body and environment temperatures. Our work confirms that temperature-associated virtual stimuli are capable of having significant effects on the users' perception of temperature, and highlights a possible limitation in the current augmented reality technology in that no secondary effects on the users' perception of duration were observed.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Past work in augmented reality has shown that temperature-associated AR stimuli can induce warming and cooling sensations in the user, and prior work in psychology suggests that a person's body temperature can influence that person's sense of subjective perception of duration. In this paper, we present a user study to evaluate the relationship between temperature-associated virtual stimuli presented on an AR-HMD and the user's sense of subjective perception of duration and temperature. In particular, we investigate two independent variables: the apparent temperature of the virtual stimuli presented to the participant, which could be hot or cold, and the location of the stimuli, which could be in direct contact with the user, in indirect contact with the user, or both in direct and indirect contact simultaneously. We investigate how these variables affect the users' perception of duration and perception of body and environment temperature by having participants make prospective time estimations while observing the virtual stimulus and answering subjective questions regarding their body and environment temperatures. Our work confirms that temperature-associated virtual stimuli are capable of having significant effects on the users' perception of temperature, and highlights a possible limitation in the current augmented reality technology in that no secondary effects on the users' perception of duration were observed. |
| Austin Erickson; Kangsoo Kim; Gerd Bruder; Greg Welch Effects of Dark Mode Graphics on Visual Acuity and Fatigue with Virtual Reality Head-Mounted Displays Proceedings Article In: Proceedings of IEEE International Conference on Virtual Reality and 3D User Interfaces (IEEE VR), pp. 434-442, Atlanta, Georgia, 2020. @inproceedings{Erickson2020,
title = {Effects of Dark Mode Graphics on Visual Acuity and Fatigue with Virtual Reality Head-Mounted Displays},
author = {Austin Erickson and Kangsoo Kim and Gerd Bruder and Greg Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2020/02/VR2020_DarkMode2_0.pdf
https://www.youtube.com/watch?v=wePUk0xTLA0&t=5s, YouTube Presentation},
doi = {10.1109/VR46266.2020.00-40},
year = {2020},
date = {2020-03-23},
urldate = {2020-03-23},
booktitle = {Proceedings of IEEE International Conference on Virtual Reality and 3D User Interfaces (IEEE VR)},
pages = {434-442},
address = {Atlanta, Georgia},
abstract = {Current virtual reality (VR) head-mounted displays (HMDs) are characterized by a low angular resolution that makes it difficult to make out details, leading to reduced legibility of text and increased visual fatigue. Light-on-dark graphics modes, so-called ``dark mode'' graphics, are becoming more and more popular over a wide range of display technologies, and have been correlated with increased visual comfort and acuity, specifically when working in low-light environments, which suggests that they might provide significant advantages for VR HMDs.
In this paper, we present a human-subject study investigating the correlations between the color mode and the ambient lighting with respect to visual acuity and fatigue on VR HMDs.
We compare two color schemes, characterized by light letters on a dark background (dark mode), or dark letters on a light background (light mode), and show that the dark background in dark mode provides a significant advantage in terms of reduced visual fatigue and increased visual acuity in dim virtual environments on current HMDs. Based on our results, we discuss guidelines for user interfaces and applications.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Current virtual reality (VR) head-mounted displays (HMDs) are characterized by a low angular resolution that makes it difficult to make out details, leading to reduced legibility of text and increased visual fatigue. Light-on-dark graphics modes, so-called ``dark mode'' graphics, are becoming more and more popular over a wide range of display technologies, and have been correlated with increased visual comfort and acuity, specifically when working in low-light environments, which suggests that they might provide significant advantages for VR HMDs.
In this paper, we present a human-subject study investigating the correlations between the color mode and the ambient lighting with respect to visual acuity and fatigue on VR HMDs.
We compare two color schemes, characterized by light letters on a dark background (dark mode), or dark letters on a light background (light mode), and show that the dark background in dark mode provides a significant advantage in terms of reduced visual fatigue and increased visual acuity in dim virtual environments on current HMDs. Based on our results, we discuss guidelines for user interfaces and applications. |
| Kangsoo Kim; Celso M. de Melo; Nahal Norouzi; Gerd Bruder; Gregory F. Welch Reducing Task Load with an Embodied Intelligent Virtual Assistant for Improved Performance in Collaborative Decision Making Proceedings Article In: Proceedings of the IEEE Conference on Virtual Reality and 3D User Interfaces (IEEE VR), pp. 529-538, Atlanta, Georgia, 2020. @inproceedings{Kim2020rtl,
title = {Reducing Task Load with an Embodied Intelligent Virtual Assistant for Improved Performance in Collaborative Decision Making},
author = {Kangsoo Kim and Celso M. de Melo and Nahal Norouzi and Gerd Bruder and Gregory F. Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2020/02/IEEEVR2020_ARDesertSurvival.pdf
https://www.youtube.com/watch?v=G_iZ_asjp3I&t=6s, YouTube Presentation},
doi = {10.1109/VR46266.2020.00-30},
year = {2020},
date = {2020-03-23},
booktitle = {Proceedings of the IEEE Conference on Virtual Reality and 3D User Interfaces (IEEE VR)},
pages = {529-538},
address = {Atlanta, Georgia},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
|
| Jacob Stuart; Ileri Akinnola; Frank Guido-Sanz; Mindi Anderson; Desiree Diaz; Greg Welch; Benjamin Lok [Poster] Applying Stress Management Techniques in Augmented Reality: Stress Induction and Reduction in Healthcare Providers during Virtual Triage Simulation Proceedings Article In: Proceedings of the IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW), pp. 171-172, 2020. @inproceedings{Stuart2019asm,
title = {[Poster] Applying Stress Management Techniques in Augmented Reality: Stress Induction and Reduction in Healthcare Providers during Virtual Triage Simulation},
author = {Jacob Stuart and Ileri Akinnola and Frank Guido-Sanz and Mindi Anderson and Desiree Diaz and Greg Welch and Benjamin Lok},
url = {https://sreal.ucf.edu/wp-content/uploads/2020/07/09090656.pdf},
doi = {10.1109/VRW50115.2020.00037},
year = {2020},
date = {2020-03-22},
booktitle = {Proceedings of the IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)},
pages = {171-172},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
|
| Kangsoo Kim; Nahal Norouzi; Austin Erickson [Tutorial] Developing Embodied Interactive Virtual Characters for Human-Subjects Studies Presentation 22.03.2020. @misc{Kim2020dei,
title = {[Tutorial] Developing Embodied Interactive Virtual Characters for Human-Subjects Studies},
author = {Kangsoo Kim and Nahal Norouzi and Austin Erickson},
url = {https://www.youtube.com/watch?v=UgT_-LVrQlc&list=PLMvKdHzC3SyacMfUj3qqd-pIjKmjtmwnz
https://sreal.ucf.edu/ieee-vr-2020-tutorial-developing-embodied-interactive-virtual-characters-for-human-subjects-studies/},
year = {2020},
date = {2020-03-22},
urldate = {2020-03-22},
booktitle = {IEEE International Conference on Virtual Reality and 3D User Interfaces (IEEE VR)},
keywords = {},
pubstate = {published},
tppubtype = {presentation}
}
|
| Nahal Norouzi Augmented Reality Animals: Are They Our Future Companions? Presentation 22.03.2020, (IEEE VR 2020 Doctoral Consortium). @misc{Norouzi2020,
title = {Augmented Reality Animals: Are They Our Future Companions?},
author = {Nahal Norouzi },
url = {https://sreal.ucf.edu/wp-content/uploads/2020/03/vr20c-sub1054-cam-i5.pdf},
year = {2020},
date = {2020-03-22},
abstract = {Previous research in the field of human-animal interaction has captured the multitude of benefits of this relationship on different aspects of human health. Existing limitations for accompanying pets/animals in some public spaces, allergies, and inability to provide adequate care for animals/pets limits the possible benefits of this relationship. However, the increased popularity of augmented reality and virtual reality devices and the introduction of new social behaviors since their utilization offers the opportunity of using such platforms for the realization of virtual animals and investigation of their influences on human perception and behavior.
In this paper, two prior experiments are presented, which were designed to provide a better understanding of the requirements of virtual animals in augmented reality as companions and investigate some of their capabilities in the provision of support. Through these findings, future research directions are identified and discussed.
},
note = {IEEE VR 2020 Doctoral Consortium},
keywords = {},
pubstate = {published},
tppubtype = {presentation}
}
Previous research in the field of human-animal interaction has captured the multitude of benefits of this relationship on different aspects of human health. Existing limitations for accompanying pets/animals in some public spaces, allergies, and inability to provide adequate care for animals/pets limits the possible benefits of this relationship. However, the increased popularity of augmented reality and virtual reality devices and the introduction of new social behaviors since their utilization offers the opportunity of using such platforms for the realization of virtual animals and investigation of their influences on human perception and behavior.
In this paper, two prior experiments are presented, which were designed to provide a better understanding of the requirements of virtual animals in augmented reality as companions and investigate some of their capabilities in the provision of support. Through these findings, future research directions are identified and discussed.
|
| Austin Erickson; Nahal Norouzi; Kangsoo Kim; Joseph J. LaViola Jr.; Gerd Bruder; Gregory F. Welch Effects of Depth Information on Visual Target Identification Task Performance in Shared Gaze Environments Journal Article In: IEEE Transactions on Visualization and Computer Graphics, vol. 26, no. 5, pp. 1934-1944, 2020, ISSN: 1077-2626, (Presented at IEEE VR 2020). @article{Erickson2020c,
title = {Effects of Depth Information on Visual Target Identification Task Performance in Shared Gaze Environments},
author = {Austin Erickson and Nahal Norouzi and Kangsoo Kim and Joseph J. LaViola Jr. and Gerd Bruder and Gregory F. Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2020/02/shared_gaze_2_FINAL.pdf
https://www.youtube.com/watch?v=JQO_iosY62Y&t=6s, YouTube Presentation},
doi = {10.1109/TVCG.2020.2973054},
issn = {1077-2626},
year = {2020},
date = {2020-02-13},
urldate = {2020-02-13},
journal = {IEEE Transactions on Visualization and Computer Graphics},
volume = {26},
number = {5},
pages = {1934-1944},
abstract = {Human gaze awareness is important for social and collaborative interactions. Recent technological advances in augmented reality (AR) displays and sensors provide us with the means to extend collaborative spaces with real-time dynamic AR indicators of one's gaze, for example via three-dimensional cursors or rays emanating from a partner's head. However, such gaze cues are only as useful as the quality of the underlying gaze estimation and the accuracy of the display mechanism. Depending on the type of the visualization, and the characteristics of the errors, AR gaze cues could either enhance or interfere with collaborations. In this paper, we present two human-subject studies in which we investigate the influence of angular and depth errors, target distance, and the type of gaze visualization on participants' performance and subjective evaluation during a collaborative task with a virtual human partner, where participants identified targets within a dynamically walking crowd. First, our results show that there is a significant difference in performance for the two gaze visualizations ray and cursor in conditions with simulated angular and depth errors: the ray visualization provided significantly faster response times and fewer errors compared to the cursor visualization. Second, our results show that under optimal conditions, among four different gaze visualization methods, a ray without depth information provides the worst performance and is rated lowest, while a combination of a ray and cursor with depth information is rated highest. We discuss the subjective and objective performance thresholds and provide guidelines for practitioners in this field.},
note = {Presented at IEEE VR 2020},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Human gaze awareness is important for social and collaborative interactions. Recent technological advances in augmented reality (AR) displays and sensors provide us with the means to extend collaborative spaces with real-time dynamic AR indicators of one's gaze, for example via three-dimensional cursors or rays emanating from a partner's head. However, such gaze cues are only as useful as the quality of the underlying gaze estimation and the accuracy of the display mechanism. Depending on the type of the visualization, and the characteristics of the errors, AR gaze cues could either enhance or interfere with collaborations. In this paper, we present two human-subject studies in which we investigate the influence of angular and depth errors, target distance, and the type of gaze visualization on participants' performance and subjective evaluation during a collaborative task with a virtual human partner, where participants identified targets within a dynamically walking crowd. First, our results show that there is a significant difference in performance for the two gaze visualizations ray and cursor in conditions with simulated angular and depth errors: the ray visualization provided significantly faster response times and fewer errors compared to the cursor visualization. Second, our results show that under optimal conditions, among four different gaze visualization methods, a ray without depth information provides the worst performance and is rated lowest, while a combination of a ray and cursor with depth information is rated highest. We discuss the subjective and objective performance thresholds and provide guidelines for practitioners in this field. |
| Andrei State; Herman Towles; Tyler Johnson; Ryan Schubert; Brendan Walters; Greg Welch; Henry Fuchs The A-Desk: A Unified Workspace of the Future Journal Article In: IEEE Computer Graphics and Applications, vol. 40, no. 1, pp. 56-71, 2020, ISSN: 1558-1756. @article{State2020aa,
title = {The A-Desk: A Unified Workspace of the Future},
author = {Andrei State and Herman Towles and Tyler Johnson and Ryan Schubert and Brendan Walters and Greg Welch and Henry Fuchs},
url = {https://sreal.ucf.edu/wp-content/uploads/2020/01/State2020aa-1.pdf},
doi = {10.1109/MCG.2019.2951273},
issn = {1558-1756},
year = {2020},
date = {2020-01-01},
journal = {IEEE Computer Graphics and Applications},
volume = {40},
number = {1},
pages = {56-71},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
|
2019
|
| Myungho Lee; Nahal Norouzi; Gerd Bruder; Pamela J. Wisniewski; Gregory F. Welch Mixed Reality Tabletop Gameplay: Social Interaction with a Virtual Human Capable of Physical Influence Journal Article In: IEEE Transactions on Visualization and Computer Graphics, vol. 24, no. 8, pp. 1-12, 2019, ISSN: 1077-2626. @article{Lee2020,
title = {Mixed Reality Tabletop Gameplay: Social Interaction with a Virtual Human Capable of Physical Influence},
author = {Myungho Lee and Nahal Norouzi and Gerd Bruder and Pamela J. Wisniewski and Gregory F. Welch },
url = {https://sreal.ucf.edu/wp-content/uploads/2019/12/TVCG_Physical_Virtual_Table_2019.pdf},
doi = {10.1109/TVCG.2019.2959575},
issn = {1077-2626},
year = {2019},
date = {2019-12-18},
journal = {IEEE Transactions on Visualization and Computer Graphics},
volume = {24},
number = {8},
pages = {1-12},
abstract = {In this paper, we investigate the effects of the physical influence of a virtual human (VH) in the context of face-to-face interaction in a mixed reality environment. In Experiment 1, participants played a tabletop game with a VH, in which each player takes a turn and moves their own token along the designated spots on the shared table. We compared two conditions as follows: the VH in the virtual condition moves a virtual token that can only be seen through augmented reality (AR) glasses, while the VH in the physical condition moves a physical token as the participants do; therefore the VH’s token can be seen even in the periphery of the AR glasses. For the physical condition, we designed an actuator system underneath the table. The actuator moves a magnet under the table which then moves the VH’s physical token over the surface of the table. Our results indicate that participants felt higher co-presence with the VH in the physical condition, and participants assessed the VH as a more physical entity compared to the VH in the virtual condition. We further observed transference effects when participants attributed the VH’s ability to move physical objects to other elements in the real world. Also, the VH’s physical influence improved participants’ overall experience with the VH. In Experiment 2, we further looked into the question how the physical-virtual latency in movements affected the perceived plausibility of the VH’s interaction with the real world. Our results indicate that a slight temporal difference between the physical token reacting to the virtual hand’s movement increased the perceived realism and causality of the mixed reality interaction. We discuss potential explanations for the findings and implications for future shared mixed reality tabletop setups.
},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
In this paper, we investigate the effects of the physical influence of a virtual human (VH) in the context of face-to-face interaction in a mixed reality environment. In Experiment 1, participants played a tabletop game with a VH, in which each player takes a turn and moves their own token along the designated spots on the shared table. We compared two conditions as follows: the VH in the virtual condition moves a virtual token that can only be seen through augmented reality (AR) glasses, while the VH in the physical condition moves a physical token as the participants do; therefore the VH’s token can be seen even in the periphery of the AR glasses. For the physical condition, we designed an actuator system underneath the table. The actuator moves a magnet under the table which then moves the VH’s physical token over the surface of the table. Our results indicate that participants felt higher co-presence with the VH in the physical condition, and participants assessed the VH as a more physical entity compared to the VH in the virtual condition. We further observed transference effects when participants attributed the VH’s ability to move physical objects to other elements in the real world. Also, the VH’s physical influence improved participants’ overall experience with the VH. In Experiment 2, we further looked into the question how the physical-virtual latency in movements affected the perceived plausibility of the VH’s interaction with the real world. Our results indicate that a slight temporal difference between the physical token reacting to the virtual hand’s movement increased the perceived realism and causality of the mixed reality interaction. We discuss potential explanations for the findings and implications for future shared mixed reality tabletop setups.
|
| Kangsoo Kim; Nahal Norouzi; Tiffany Losekamp; Gerd Bruder; Mindi Anderson; Gregory Welch Effects of Patient Care Assistant Embodiment and Computer Mediation on User Experience Proceedings Article In: Proceedings of the IEEE International Conference on Artificial Intelligence & Virtual Reality (AIVR), pp. 17-24, IEEE, 2019. @inproceedings{Kim2019epc,
title = {Effects of Patient Care Assistant Embodiment and Computer Mediation on User Experience},
author = {Kangsoo Kim and Nahal Norouzi and Tiffany Losekamp and Gerd Bruder and Mindi Anderson and Gregory Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2019/11/AIVR2019_Caregiver.pdf},
doi = {10.1109/AIVR46125.2019.00013},
year = {2019},
date = {2019-12-09},
booktitle = {Proceedings of the IEEE International Conference on Artificial Intelligence & Virtual Reality (AIVR)},
pages = {17-24},
publisher = {IEEE},
abstract = {Providers of patient care environments are facing an increasing demand for technological solutions that can facilitate increased patient satisfaction while being cost effective and practically feasible. Recent developments with respect to smart hospital room setups and smart home care environments have an immense potential to leverage advances in technologies such as Intelligent Virtual Agents, Internet of Things devices, and Augmented Reality to enable novel forms of patient interaction with caregivers and their environment.
In this paper, we present a human-subjects study in which we compared four types of simulated patient care environments for a range of typical tasks. In particular, we tested two forms of caregiver mediation with a real person or a virtual agent, and we compared two forms of caregiver embodiment with disembodied verbal or embodied interaction. Our results show that, as expected, a real caregiver provides the optimal user experience but an embodied virtual assistant is also a viable option for patient care environments, providing significantly higher social presence and engagement than voice-only interaction. We discuss the implications in the field of patient care and digital assistant.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Providers of patient care environments are facing an increasing demand for technological solutions that can facilitate increased patient satisfaction while being cost effective and practically feasible. Recent developments with respect to smart hospital room setups and smart home care environments have an immense potential to leverage advances in technologies such as Intelligent Virtual Agents, Internet of Things devices, and Augmented Reality to enable novel forms of patient interaction with caregivers and their environment.
In this paper, we present a human-subjects study in which we compared four types of simulated patient care environments for a range of typical tasks. In particular, we tested two forms of caregiver mediation with a real person or a virtual agent, and we compared two forms of caregiver embodiment with disembodied verbal or embodied interaction. Our results show that, as expected, a real caregiver provides the optimal user experience but an embodied virtual assistant is also a viable option for patient care environments, providing significantly higher social presence and engagement than voice-only interaction. We discuss the implications in the field of patient care and digital assistant. |
| Alyssa Tanaka; Brian Stensrud; Greg Welch; Fransisco Guido-Sanz; Lee Sciarini; Henry Phillips The Development and Implementation of Speech Understanding for Medical Handoff Training Proceedings Article In: Proceedings of 2019 Interservice/Industry Training, Simulation, and Education Conference (I/ITSEC 2019), Orlando, Florida, U.S.A., 2019. @inproceedings{Tanaka2019aa,
title = {The Development and Implementation of Speech Understanding for Medical Handoff Training},
author = {Alyssa Tanaka and Brian Stensrud and Greg Welch and Fransisco Guido-Sanz and Lee Sciarini and Henry Phillips},
url = {https://sreal.ucf.edu/wp-content/uploads/2019/12/Tanaka2019aa.pdf},
year = {2019},
date = {2019-12-01},
booktitle = {Proceedings of 2019 Interservice/Industry Training, Simulation, and Education Conference (I/ITSEC 2019)},
address = {Orlando, Florida, U.S.A.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
|
| Nahal Norouzi; Austin Erickson; Kangsoo Kim; Ryan Schubert; Joseph J. LaViola Jr.; Gerd Bruder; Gregory F. Welch Effects of Shared Gaze Parameters on Visual Target Identification Task Performance in Augmented Reality Proceedings Article In: Proceedings of the ACM Symposium on Spatial User Interaction (SUI), pp. 12:1-12:11, ACM, 2019, ISBN: 978-1-4503-6975-6/19/10, (Best Paper Award). @inproceedings{Norouzi2019esg,
title = {Effects of Shared Gaze Parameters on Visual Target Identification Task Performance in Augmented Reality},
author = {Nahal Norouzi and Austin Erickson and Kangsoo Kim and Ryan Schubert and Joseph J. LaViola Jr. and Gerd Bruder and Gregory F. Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2019/10/a12-norouzi.pdf},
doi = {10.1145/3357251.3357587},
isbn = {978-1-4503-6975-6/19/10},
year = {2019},
date = {2019-10-19},
urldate = {2019-10-19},
booktitle = {Proceedings of the ACM Symposium on Spatial User Interaction (SUI)},
pages = {12:1-12:11},
publisher = {ACM},
abstract = {Augmented reality (AR) technologies provide a shared platform for users to collaborate in a physical context involving both real and virtual content. To enhance the quality of interaction between AR users, researchers have proposed augmenting users' interpersonal space with embodied cues such as their gaze direction. While beneficial in achieving improved interpersonal spatial communication, such shared gaze environments suffer from multiple types of errors related to eye tracking and networking, that can reduce objective performance and subjective experience.
In this paper, we conducted a human-subject study to understand the impact of accuracy, precision, latency, and dropout based errors on users' performance when using shared gaze cues to identify a target among a crowd of people. We simulated varying amounts of errors and the target distances and measured participants' objective performance through their response time and error rate, and their subjective experience and cognitive load through questionnaires. We found some significant differences suggesting that the simulated error levels had stronger effects on participants' performance than target distance with accuracy and latency having a high impact on participants' error rate. We also observed that participants assessed their own performance as lower than it objectively was, and we discuss implications for practical shared gaze applications.},
note = {Best Paper Award},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Augmented reality (AR) technologies provide a shared platform for users to collaborate in a physical context involving both real and virtual content. To enhance the quality of interaction between AR users, researchers have proposed augmenting users' interpersonal space with embodied cues such as their gaze direction. While beneficial in achieving improved interpersonal spatial communication, such shared gaze environments suffer from multiple types of errors related to eye tracking and networking, that can reduce objective performance and subjective experience.
In this paper, we conducted a human-subject study to understand the impact of accuracy, precision, latency, and dropout based errors on users' performance when using shared gaze cues to identify a target among a crowd of people. We simulated varying amounts of errors and the target distances and measured participants' objective performance through their response time and error rate, and their subjective experience and cognitive load through questionnaires. We found some significant differences suggesting that the simulated error levels had stronger effects on participants' performance than target distance with accuracy and latency having a high impact on participants' error rate. We also observed that participants assessed their own performance as lower than it objectively was, and we discuss implications for practical shared gaze applications. |
| Austin Erickson; Ryan Schubert; Kangsoo Kim; Gerd Bruder; Greg Welch Is It Cold in Here or Is It Just Me? Analysis of Augmented Reality Temperature Visualization for Computer-Mediated Thermoception Proceedings Article In: Proceedings of the IEEE International Symposium on Mixed and Augmented Reality (ISMAR), pp. 319-327, IEEE, 2019, ISBN: 978-1-7281-4765-9. @inproceedings{Erickson2019iic,
title = {Is It Cold in Here or Is It Just Me? Analysis of Augmented Reality Temperature Visualization for Computer-Mediated Thermoception},
author = {Austin Erickson and Ryan Schubert and Kangsoo Kim and Gerd Bruder and Greg Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2019/10/Erickson2019IIC.pdf},
doi = {10.1109/ISMAR.2019.00046},
isbn = {978-1-7281-4765-9},
year = {2019},
date = {2019-10-19},
urldate = {2019-10-19},
booktitle = {Proceedings of the IEEE International Symposium on Mixed and Augmented Reality (ISMAR)},
pages = {319-327},
publisher = {IEEE},
abstract = {Modern augmented reality (AR) head-mounted displays comprise a multitude of sensors that allow them to sense the environment around them. We have extended these capabilities by mounting two heat-wavelength infrared cameras to a Microsoft HoloLens, facilitating the acquisition of thermal data and enabling stereoscopic thermal overlays in the user’s augmented view. The ability to visualize live thermal information opens several avenues of investigation on how that thermal awareness may affect a user’s thermoception. We present a human-subject study, in which we simulated different temperature shifts using either heat vision overlays or 3D AR virtual effects associated with thermal cause-effect relationships (e.g., flames burn and ice cools). We further investigated differences in estimated temperatures when the stimuli were applied to either the user’s body or their environment. Our analysis showed significant effects and first trends for the AR virtual effects and heat vision, respectively, on participants’ temperature estimates for their body and the environment though with different strengths and characteristics, which we discuss in this paper. },
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Modern augmented reality (AR) head-mounted displays comprise a multitude of sensors that allow them to sense the environment around them. We have extended these capabilities by mounting two heat-wavelength infrared cameras to a Microsoft HoloLens, facilitating the acquisition of thermal data and enabling stereoscopic thermal overlays in the user’s augmented view. The ability to visualize live thermal information opens several avenues of investigation on how that thermal awareness may affect a user’s thermoception. We present a human-subject study, in which we simulated different temperature shifts using either heat vision overlays or 3D AR virtual effects associated with thermal cause-effect relationships (e.g., flames burn and ice cools). We further investigated differences in estimated temperatures when the stimuli were applied to either the user’s body or their environment. Our analysis showed significant effects and first trends for the AR virtual effects and heat vision, respectively, on participants’ temperature estimates for their body and the environment though with different strengths and characteristics, which we discuss in this paper. |
| Kendra Richards; Nikhil Mahalanobis; Kangsoo Kim; Ryan Schubert; Myungho Lee; Salam Daher; Nahal Norouzi; Jason Hochreiter; Gerd Bruder; Gregory F. Welch Analysis of Peripheral Vision and Vibrotactile Feedback During Proximal Search Tasks with Dynamic Virtual Entities in Augmented Reality Proceedings Article In: Proceedings of the ACM Symposium on Spatial User Interaction (SUI), pp. 3:1-3:9, ACM, 2019, ISBN: 978-1-4503-6975-6/19/10. @inproceedings{Richards2019b,
title = {Analysis of Peripheral Vision and Vibrotactile Feedback During Proximal Search Tasks with Dynamic Virtual Entities in Augmented Reality},
author = {Kendra Richards and Nikhil Mahalanobis and Kangsoo Kim and Ryan Schubert and Myungho Lee and Salam Daher and Nahal Norouzi and Jason Hochreiter and Gerd Bruder and Gregory F. Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2019/10/Richards2019b.pdf},
doi = {10.1145/3357251.3357585},
isbn = {978-1-4503-6975-6/19/10},
year = {2019},
date = {2019-10-19},
booktitle = {Proceedings of the ACM Symposium on Spatial User Interaction (SUI)},
pages = {3:1-3:9},
publisher = {ACM},
abstract = {A primary goal of augmented reality (AR) is to seamlessly embed virtual content into a real environment. There are many factors that can affect the perceived physicality and co-presence of virtual entities, including the hardware capabilities, the fidelity of the virtual behaviors, and sensory feedback associated with the interactions. In this paper, we present a study investigating participants' perceptions and behaviors during a time-limited search task in close proximity with virtual entities in AR. In particular, we analyze the effects of (i) visual conflicts in the periphery of an optical see-through head-mounted display, a Microsoft HoloLens, (ii) overall lighting in the physical environment, and (iii) multimodal feedback based on vibrotactile transducers mounted on a physical platform. Our results show significant benefits of vibrotactile feedback and reduced peripheral lighting for spatial and social presence, and engagement. We discuss implications of these effects for AR applications.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
A primary goal of augmented reality (AR) is to seamlessly embed virtual content into a real environment. There are many factors that can affect the perceived physicality and co-presence of virtual entities, including the hardware capabilities, the fidelity of the virtual behaviors, and sensory feedback associated with the interactions. In this paper, we present a study investigating participants' perceptions and behaviors during a time-limited search task in close proximity with virtual entities in AR. In particular, we analyze the effects of (i) visual conflicts in the periphery of an optical see-through head-mounted display, a Microsoft HoloLens, (ii) overall lighting in the physical environment, and (iii) multimodal feedback based on vibrotactile transducers mounted on a physical platform. Our results show significant benefits of vibrotactile feedback and reduced peripheral lighting for spatial and social presence, and engagement. We discuss implications of these effects for AR applications. |
| Kangsoo Kim; Austin Erickson; Alexis Lambert; Gerd Bruder; Gregory F. Welch Effects of Dark Mode on Visual Fatigue and Acuity in Optical See-Through Head-Mounted Displays Proceedings Article In: Proceedings of the ACM Symposium on Spatial User Interaction (SUI), pp. 9:1-9:9, ACM, 2019, ISBN: 978-1-4503-6975-6/19/10. @inproceedings{Kim2019edm,
title = {Effects of Dark Mode on Visual Fatigue and Acuity in Optical See-Through Head-Mounted Displays},
author = {Kangsoo Kim and Austin Erickson and Alexis Lambert and Gerd Bruder and Gregory F. Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2019/10/Kim2019edm.pdf},
doi = {10.1145/3357251.3357584},
isbn = {978-1-4503-6975-6/19/10},
year = {2019},
date = {2019-10-19},
urldate = {2019-10-19},
booktitle = {Proceedings of the ACM Symposium on Spatial User Interaction (SUI)},
pages = {9:1-9:9},
publisher = {ACM},
abstract = {Light-on-dark color schemes, so-called "Dark Mode," are becoming more and more popular over a wide range of display technologies and application fields. Many people who have to look at computer screens for hours at a time, such as computer programmers and computer graphics artists, indicate a preference for switching colors on a computer screen from dark text on a light background to light text on a dark background due to perceived advantages related to visual comfort and acuity, specifically when working in low-light environments.
In this paper, we investigate the effects of dark mode color schemes in the field of optical see-through head-mounted displays (OST-HMDs), where the characteristic "additive" light model implies that bright graphics are visible but dark graphics are transparent. We describe a human-subject study in which we evaluated a normal and inverted color mode in front of different physical backgrounds and among different lighting conditions. Our results show that dark mode graphics on OST-HMDs have significant benefits for visual acuity, fatigue, and usability, while user preferences depend largely on the lighting in the physical environment. We discuss the implications of these effects on user interfaces and applications.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Light-on-dark color schemes, so-called "Dark Mode," are becoming more and more popular over a wide range of display technologies and application fields. Many people who have to look at computer screens for hours at a time, such as computer programmers and computer graphics artists, indicate a preference for switching colors on a computer screen from dark text on a light background to light text on a dark background due to perceived advantages related to visual comfort and acuity, specifically when working in low-light environments.
In this paper, we investigate the effects of dark mode color schemes in the field of optical see-through head-mounted displays (OST-HMDs), where the characteristic "additive" light model implies that bright graphics are visible but dark graphics are transparent. We describe a human-subject study in which we evaluated a normal and inverted color mode in front of different physical backgrounds and among different lighting conditions. Our results show that dark mode graphics on OST-HMDs have significant benefits for visual acuity, fatigue, and usability, while user preferences depend largely on the lighting in the physical environment. We discuss the implications of these effects on user interfaces and applications. |