2020
|
| Austin Erickson; Nahal Norouzi; Kangsoo Kim; Joseph J. LaViola Jr.; Gerd Bruder; Gregory F. Welch Effects of Depth Information on Visual Target Identification Task Performance in Shared Gaze Environments Journal Article In: IEEE Transactions on Visualization and Computer Graphics, vol. 26, no. 5, pp. 1934-1944, 2020, ISSN: 1077-2626, (Presented at IEEE VR 2020). @article{Erickson2020c,
title = {Effects of Depth Information on Visual Target Identification Task Performance in Shared Gaze Environments},
author = {Austin Erickson and Nahal Norouzi and Kangsoo Kim and Joseph J. LaViola Jr. and Gerd Bruder and Gregory F. Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2020/02/shared_gaze_2_FINAL.pdf
https://www.youtube.com/watch?v=JQO_iosY62Y&t=6s, YouTube Presentation},
doi = {10.1109/TVCG.2020.2973054},
issn = {1077-2626},
year = {2020},
date = {2020-02-13},
urldate = {2020-02-13},
journal = {IEEE Transactions on Visualization and Computer Graphics},
volume = {26},
number = {5},
pages = {1934-1944},
abstract = {Human gaze awareness is important for social and collaborative interactions. Recent technological advances in augmented reality (AR) displays and sensors provide us with the means to extend collaborative spaces with real-time dynamic AR indicators of one's gaze, for example via three-dimensional cursors or rays emanating from a partner's head. However, such gaze cues are only as useful as the quality of the underlying gaze estimation and the accuracy of the display mechanism. Depending on the type of the visualization, and the characteristics of the errors, AR gaze cues could either enhance or interfere with collaborations. In this paper, we present two human-subject studies in which we investigate the influence of angular and depth errors, target distance, and the type of gaze visualization on participants' performance and subjective evaluation during a collaborative task with a virtual human partner, where participants identified targets within a dynamically walking crowd. First, our results show that there is a significant difference in performance for the two gaze visualizations ray and cursor in conditions with simulated angular and depth errors: the ray visualization provided significantly faster response times and fewer errors compared to the cursor visualization. Second, our results show that under optimal conditions, among four different gaze visualization methods, a ray without depth information provides the worst performance and is rated lowest, while a combination of a ray and cursor with depth information is rated highest. We discuss the subjective and objective performance thresholds and provide guidelines for practitioners in this field.},
note = {Presented at IEEE VR 2020},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Human gaze awareness is important for social and collaborative interactions. Recent technological advances in augmented reality (AR) displays and sensors provide us with the means to extend collaborative spaces with real-time dynamic AR indicators of one's gaze, for example via three-dimensional cursors or rays emanating from a partner's head. However, such gaze cues are only as useful as the quality of the underlying gaze estimation and the accuracy of the display mechanism. Depending on the type of the visualization, and the characteristics of the errors, AR gaze cues could either enhance or interfere with collaborations. In this paper, we present two human-subject studies in which we investigate the influence of angular and depth errors, target distance, and the type of gaze visualization on participants' performance and subjective evaluation during a collaborative task with a virtual human partner, where participants identified targets within a dynamically walking crowd. First, our results show that there is a significant difference in performance for the two gaze visualizations ray and cursor in conditions with simulated angular and depth errors: the ray visualization provided significantly faster response times and fewer errors compared to the cursor visualization. Second, our results show that under optimal conditions, among four different gaze visualization methods, a ray without depth information provides the worst performance and is rated lowest, while a combination of a ray and cursor with depth information is rated highest. We discuss the subjective and objective performance thresholds and provide guidelines for practitioners in this field. |
| Andrei State; Herman Towles; Tyler Johnson; Ryan Schubert; Brendan Walters; Greg Welch; Henry Fuchs The A-Desk: A Unified Workspace of the Future Journal Article In: IEEE Computer Graphics and Applications, vol. 40, no. 1, pp. 56-71, 2020, ISSN: 1558-1756. @article{State2020aa,
title = {The A-Desk: A Unified Workspace of the Future},
author = {Andrei State and Herman Towles and Tyler Johnson and Ryan Schubert and Brendan Walters and Greg Welch and Henry Fuchs},
url = {https://sreal.ucf.edu/wp-content/uploads/2020/01/State2020aa-1.pdf},
doi = {10.1109/MCG.2019.2951273},
issn = {1558-1756},
year = {2020},
date = {2020-01-01},
journal = {IEEE Computer Graphics and Applications},
volume = {40},
number = {1},
pages = {56-71},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
|
2019
|
| Myungho Lee; Nahal Norouzi; Gerd Bruder; Pamela J. Wisniewski; Gregory F. Welch Mixed Reality Tabletop Gameplay: Social Interaction with a Virtual Human Capable of Physical Influence Journal Article In: IEEE Transactions on Visualization and Computer Graphics, vol. 24, no. 8, pp. 1-12, 2019, ISSN: 1077-2626. @article{Lee2020,
title = {Mixed Reality Tabletop Gameplay: Social Interaction with a Virtual Human Capable of Physical Influence},
author = {Myungho Lee and Nahal Norouzi and Gerd Bruder and Pamela J. Wisniewski and Gregory F. Welch },
url = {https://sreal.ucf.edu/wp-content/uploads/2019/12/TVCG_Physical_Virtual_Table_2019.pdf},
doi = {10.1109/TVCG.2019.2959575},
issn = {1077-2626},
year = {2019},
date = {2019-12-18},
journal = {IEEE Transactions on Visualization and Computer Graphics},
volume = {24},
number = {8},
pages = {1-12},
abstract = {In this paper, we investigate the effects of the physical influence of a virtual human (VH) in the context of face-to-face interaction in a mixed reality environment. In Experiment 1, participants played a tabletop game with a VH, in which each player takes a turn and moves their own token along the designated spots on the shared table. We compared two conditions as follows: the VH in the virtual condition moves a virtual token that can only be seen through augmented reality (AR) glasses, while the VH in the physical condition moves a physical token as the participants do; therefore the VH’s token can be seen even in the periphery of the AR glasses. For the physical condition, we designed an actuator system underneath the table. The actuator moves a magnet under the table which then moves the VH’s physical token over the surface of the table. Our results indicate that participants felt higher co-presence with the VH in the physical condition, and participants assessed the VH as a more physical entity compared to the VH in the virtual condition. We further observed transference effects when participants attributed the VH’s ability to move physical objects to other elements in the real world. Also, the VH’s physical influence improved participants’ overall experience with the VH. In Experiment 2, we further looked into the question how the physical-virtual latency in movements affected the perceived plausibility of the VH’s interaction with the real world. Our results indicate that a slight temporal difference between the physical token reacting to the virtual hand’s movement increased the perceived realism and causality of the mixed reality interaction. We discuss potential explanations for the findings and implications for future shared mixed reality tabletop setups.
},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
In this paper, we investigate the effects of the physical influence of a virtual human (VH) in the context of face-to-face interaction in a mixed reality environment. In Experiment 1, participants played a tabletop game with a VH, in which each player takes a turn and moves their own token along the designated spots on the shared table. We compared two conditions as follows: the VH in the virtual condition moves a virtual token that can only be seen through augmented reality (AR) glasses, while the VH in the physical condition moves a physical token as the participants do; therefore the VH’s token can be seen even in the periphery of the AR glasses. For the physical condition, we designed an actuator system underneath the table. The actuator moves a magnet under the table which then moves the VH’s physical token over the surface of the table. Our results indicate that participants felt higher co-presence with the VH in the physical condition, and participants assessed the VH as a more physical entity compared to the VH in the virtual condition. We further observed transference effects when participants attributed the VH’s ability to move physical objects to other elements in the real world. Also, the VH’s physical influence improved participants’ overall experience with the VH. In Experiment 2, we further looked into the question how the physical-virtual latency in movements affected the perceived plausibility of the VH’s interaction with the real world. Our results indicate that a slight temporal difference between the physical token reacting to the virtual hand’s movement increased the perceived realism and causality of the mixed reality interaction. We discuss potential explanations for the findings and implications for future shared mixed reality tabletop setups.
|
| Susanne Schmidt; Gerd Bruder; Frank Steinicke Effects of Virtual Agent and Object Representation on Experiencing Exhibited Artifacts Journal Article In: Elsevier Computers and Graphics, vol. 83, pp. 1-10, 2019. @article{Schmidt2019,
title = {Effects of Virtual Agent and Object Representation on Experiencing Exhibited Artifacts },
author = {Susanne Schmidt and Gerd Bruder and Frank Steinicke},
url = {https://sreal.ucf.edu/wp-content/uploads/2019/07/Schmidt2019.pdf},
doi = {10.1016/j.cag.2019.06.002},
year = {2019},
date = {2019-10-01},
journal = {Elsevier Computers and Graphics},
volume = {83},
pages = {1-10},
abstract = {With the emergence of speech-controlled virtual agents (VAs) in consumer devices such as Amazon’s Echo or Apple’s HomePod, we have seen a large public interest in related technologies. While most of the current interactive conversational VAs appear in the form of voice-only assistants, other representations showing, for example, a contextually related or generic humanoid body are possible. In our previous work, we analyzed the effectiveness of different forms of VAs in the context of a virtual reality (VR) exhibition space. We found positive evidence that agent embodiment induces a higher sense of spatial and social presence. The results also suggest that both embodied and thematically related audio-visual representations of VAs positively affect the overall user experience. We extend this work by further analyzing the effects of the physicality of the agent’s environment (i.e., virtual vs. real). The results of the follow-up study indicate some benefits of virtual environments, e.g., regarding user engagement and learning of visual facts. We also evaluate some interaction effects between the representations of the virtual agent and its surrounding and discuss implications on the design of exhibition spaces.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
With the emergence of speech-controlled virtual agents (VAs) in consumer devices such as Amazon’s Echo or Apple’s HomePod, we have seen a large public interest in related technologies. While most of the current interactive conversational VAs appear in the form of voice-only assistants, other representations showing, for example, a contextually related or generic humanoid body are possible. In our previous work, we analyzed the effectiveness of different forms of VAs in the context of a virtual reality (VR) exhibition space. We found positive evidence that agent embodiment induces a higher sense of spatial and social presence. The results also suggest that both embodied and thematically related audio-visual representations of VAs positively affect the overall user experience. We extend this work by further analyzing the effects of the physicality of the agent’s environment (i.e., virtual vs. real). The results of the follow-up study indicate some benefits of virtual environments, e.g., regarding user engagement and learning of visual facts. We also evaluate some interaction effects between the representations of the virtual agent and its surrounding and discuss implications on the design of exhibition spaces. |
| Lisa Dieker; Carrie Straub; Michael Hynes; Charles Hughes; Caitlyn Bukathy; Taylor Bousfield; Samantha Mrstik Using Virtual Rehearsal in a Simulator to Impact the Performance of Science Teachers Journal Article In: International Journal of Gaming and Computer-Mediated Simulations, vol. 11, no. 4, pp. 1-20, 2019. @article{Dieker2019uvr,
title = {Using Virtual Rehearsal in a Simulator to Impact the Performance of Science Teachers},
author = {Lisa Dieker and Carrie Straub and Michael Hynes and Charles Hughes and Caitlyn Bukathy and Taylor Bousfield and Samantha Mrstik},
doi = {10.4018/IJGCMS.2019100101},
year = {2019},
date = {2019-10-01},
journal = {International Journal of Gaming and Computer-Mediated Simulations},
volume = {11},
number = {4},
pages = {1-20},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
|
| Kangsoo Kim; Ryan Schubert; Jason Hochreiter; Gerd Bruder; Gregory Welch Blowing in the Wind: Increasing Social Presence with a Virtual Human via Environmental Airflow Interaction in Mixed Reality Journal Article In: Elsevier Computers and Graphics, vol. 83, no. October 2019, pp. 23-32, 2019. @article{Kim2019blow,
title = {Blowing in the Wind: Increasing Social Presence with a Virtual Human via Environmental Airflow Interaction in Mixed Reality},
author = {Kangsoo Kim and Ryan Schubert and Jason Hochreiter and Gerd Bruder and Gregory Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2019/06/ELSEVIER_C_G2019_Special_BlowWindinMR_ICAT_EGVE2018_20190606_reduced.pdf},
doi = {10.1016/j.cag.2019.06.006},
year = {2019},
date = {2019-07-05},
journal = {Elsevier Computers and Graphics},
volume = {83},
number = {October 2019},
pages = {23-32},
abstract = {In this paper, we describe two human-subject studies in which we explored and investigated the effects of subtle multimodal interaction on social presence with a virtual human (VH) in mixed reality (MR). In the studies, participants interacted with a VH, which was co-located with them across a table, with two different platforms: a projection based MR environment and an optical see-through head-mounted display (OST-HMD) based MR environment. While the two studies were not intended to be directly comparable, the second study with an OST-HMD was carefully designed based on the insights and lessons learned from the first projection-based study. For both studies, we compared two levels of gradually increased multimodal interaction: (i) virtual objects being affected by real airflow (e.g., as commonly experienced with fans during warm weather), and (ii) a VH showing awareness of this airflow. We hypothesized that our two levels of treatment would increase the sense of being together with the VH gradually, i.e., participants would report higher social presence with airflow influence than without it, and the social presence would be even higher when the VH showed awareness of the airflow. We observed an increased social presence in the second study when both physical–virtual interaction via airflow and VH awareness behaviors were present, but we observed no clear difference in participant-reported social presence with the VH in the first study. As the considered environmental factors are incidental to the direct interaction with the real human, i.e., they are not significant or necessary for the interaction task, they can provide a reasonably generalizable approach to increase social presence in HMD-based MR environments beyond the specific scenario and environment described here.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
In this paper, we describe two human-subject studies in which we explored and investigated the effects of subtle multimodal interaction on social presence with a virtual human (VH) in mixed reality (MR). In the studies, participants interacted with a VH, which was co-located with them across a table, with two different platforms: a projection based MR environment and an optical see-through head-mounted display (OST-HMD) based MR environment. While the two studies were not intended to be directly comparable, the second study with an OST-HMD was carefully designed based on the insights and lessons learned from the first projection-based study. For both studies, we compared two levels of gradually increased multimodal interaction: (i) virtual objects being affected by real airflow (e.g., as commonly experienced with fans during warm weather), and (ii) a VH showing awareness of this airflow. We hypothesized that our two levels of treatment would increase the sense of being together with the VH gradually, i.e., participants would report higher social presence with airflow influence than without it, and the social presence would be even higher when the VH showed awareness of the airflow. We observed an increased social presence in the second study when both physical–virtual interaction via airflow and VH awareness behaviors were present, but we observed no clear difference in participant-reported social presence with the VH in the first study. As the considered environmental factors are incidental to the direct interaction with the real human, i.e., they are not significant or necessary for the interaction task, they can provide a reasonably generalizable approach to increase social presence in HMD-based MR environments beyond the specific scenario and environment described here. |
| Nahal Norouzi; Luke Bölling; Gerd Bruder; Gregory F. Welch Augmented Rotations in Virtual Reality for Users with a Reduced Range of Head Movement Journal Article In: Journal of Rehabilitation and Assistive Technologies Engineering, vol. 6, pp. 1-9, 2019. @article{Norouzi2019c,
title = {Augmented Rotations in Virtual Reality for Users with a Reduced Range of Head Movement},
author = {Nahal Norouzi and Luke Bölling and Gerd Bruder and Gregory F. Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2019/05/RATE2019_AugmentedRotations.pdf},
doi = {10.1177/2055668319841309},
year = {2019},
date = {2019-05-21},
journal = {Journal of Rehabilitation and Assistive Technologies Engineering},
volume = {6},
pages = {1-9},
abstract = {Introduction: A large body of research in the field of virtual reality (VR) is focused on making user interfaces more natural and intuitive by leveraging natural body movements to explore a virtual environment. For example, head-tracked user interfaces allow users to naturally look around a virtual space by moving their head. However, such approaches may not be appropriate for users with temporary or permanent limitations of their head movement.
Methods: In this paper, we present techniques that allow these users to get virtual benefits from a reduced range of physical movements. Specifically, we describe two techniques that augment virtual rotations relative to physical movement thresholds.
Results: We describe how each of the two techniques can be implemented with either a head tracker or an eye tracker,e.g., in cases when no physical head rotations are possible.
Conclusions: We discuss their differences and limitations and we provide guidelines for the practical use of such augmented user interfaces.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Introduction: A large body of research in the field of virtual reality (VR) is focused on making user interfaces more natural and intuitive by leveraging natural body movements to explore a virtual environment. For example, head-tracked user interfaces allow users to naturally look around a virtual space by moving their head. However, such approaches may not be appropriate for users with temporary or permanent limitations of their head movement.
Methods: In this paper, we present techniques that allow these users to get virtual benefits from a reduced range of physical movements. Specifically, we describe two techniques that augment virtual rotations relative to physical movement thresholds.
Results: We describe how each of the two techniques can be implemented with either a head tracker or an eye tracker,e.g., in cases when no physical head rotations are possible.
Conclusions: We discuss their differences and limitations and we provide guidelines for the practical use of such augmented user interfaces. |
| Mark Roman Miller; Hanseul Jun; Fernanda Herrera; Jacob Yu Villa; Greg Welch; Jeremy N Bailenson Social Interaction in Augmented Reality Journal Article In: PLOS ONE, vol. 14, no. 5, pp. 1-26, 2019. @article{Miller2019,
title = {Social Interaction in Augmented Reality},
author = {Mark Roman Miller and Hanseul Jun and Fernanda Herrera and Jacob Yu Villa and Greg Welch and Jeremy N Bailenson},
url = {https://doi.org/10.1371/journal.pone.0216290
https://sreal.ucf.edu/wp-content/uploads/2019/05/Miller2019.pdf},
doi = {10.1371/journal.pone.0216290},
year = {2019},
date = {2019-05-01},
journal = {PLOS ONE},
volume = {14},
number = {5},
pages = {1-26},
publisher = {Public Library of Science},
abstract = {There have been decades of research on the usability and educational value of augmented reality. However, less is known about how augmented reality affects social interactions. The current paper presents three studies that test the social psychological effects of augmented reality. Study 1 examined participants’ task performance in the presence of embodied agents and replicated the typical pattern of social facilitation and inhibition. Participants performed a simple task better, but a hard task worse, in the presence of an agent compared to when participants complete the tasks alone. Study 2 examined nonverbal behavior. Participants met an agent sitting in one of two chairs and were asked to choose one of the chairs to sit on. Participants wearing the headset never sat directly on the agent when given the choice of two seats, and while approaching, most of the participants chose the rotation direction to avoid turning their heads away from the agent. A separate group of participants chose a seat after removing the augmented reality headset, and the majority still avoided the seat previously occupied by the agent. Study 3 examined the social costs of using an augmented reality headset with others who are not using a headset. Participants talked in dyads, and augmented reality users reported less social connection to their partner compared to those not using augmented reality. Overall, these studies provide evidence suggesting that task performance, nonverbal behavior, and social connectedness are significantly affected by the presence or absence of virtual content.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
There have been decades of research on the usability and educational value of augmented reality. However, less is known about how augmented reality affects social interactions. The current paper presents three studies that test the social psychological effects of augmented reality. Study 1 examined participants’ task performance in the presence of embodied agents and replicated the typical pattern of social facilitation and inhibition. Participants performed a simple task better, but a hard task worse, in the presence of an agent compared to when participants complete the tasks alone. Study 2 examined nonverbal behavior. Participants met an agent sitting in one of two chairs and were asked to choose one of the chairs to sit on. Participants wearing the headset never sat directly on the agent when given the choice of two seats, and while approaching, most of the participants chose the rotation direction to avoid turning their heads away from the agent. A separate group of participants chose a seat after removing the augmented reality headset, and the majority still avoided the seat previously occupied by the agent. Study 3 examined the social costs of using an augmented reality headset with others who are not using a headset. Participants talked in dyads, and augmented reality users reported less social connection to their partner compared to those not using augmented reality. Overall, these studies provide evidence suggesting that task performance, nonverbal behavior, and social connectedness are significantly affected by the presence or absence of virtual content. |
| Myungho Lee; Gerd Bruder; Greg Welch The Virtual Pole: Exploring Human Responses to Fear of Heights in Immersive Virtual Environments Journal Article In: Journal of Virtual Reality and Broadcasting, vol. 14(2017), no. 6, 2019, ISSN: 1860-2037. @article{Lee2018b,
title = {The Virtual Pole: Exploring Human Responses to Fear of Heights in Immersive Virtual Environments},
author = {Myungho Lee and Gerd Bruder and Greg Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2019/06/Lee2019b.pdf},
doi = {10.20385/1860-2037/14.2017.6},
issn = {1860-2037},
year = {2019},
date = {2019-02-04},
journal = {Journal of Virtual Reality and Broadcasting},
volume = {14(2017)},
number = {6},
abstract = {Measuring how effective immersive virtual environments (IVEs) are in reproducing sensations as in similar situations in the real world is an important task for many application fields. In this paper, we present an experimental setup which we call the virtual pole, where we evaluated human responses to fear of heights. We conducted experiments where we analyzed correlations between subjective and physiological anxiety measures and the participant’s view direction. Our results show that the view direction plays an important role in subjective and physiological anxiety in an IVE due to the limited field of view, and that the subjective and physiological anxiety measures monotonically increase with the increasing height. In addition, we also found that participants recollected the virtual content they saw at the top more accurately compared to that at the medium height. We discuss the results and provide guidelines for simulations aimed at evoking fear of heights responses in IVEs.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Measuring how effective immersive virtual environments (IVEs) are in reproducing sensations as in similar situations in the real world is an important task for many application fields. In this paper, we present an experimental setup which we call the virtual pole, where we evaluated human responses to fear of heights. We conducted experiments where we analyzed correlations between subjective and physiological anxiety measures and the participant’s view direction. Our results show that the view direction plays an important role in subjective and physiological anxiety in an IVE due to the limited field of view, and that the subjective and physiological anxiety measures monotonically increase with the increasing height. In addition, we also found that participants recollected the virtual content they saw at the top more accurately compared to that at the medium height. We discuss the results and provide guidelines for simulations aimed at evoking fear of heights responses in IVEs. |
2018
|
| Catherine S. Oh; Jeremy N. Bailenson; Gregory F. Welch A Systematic Review of Social Presence: Definition, Antecedents, and Implications Journal Article In: Frontiers in Robotics and AI, vol. 5, no. 114, 2018, ISBN: 2296-9144. @article{Oh2018,
title = {A Systematic Review of Social Presence: Definition, Antecedents, and Implications},
author = {Catherine S. Oh and Jeremy N. Bailenson and Gregory F. Welch},
editor = {Doron Friedman},
url = {https://sreal.ucf.edu/wp-content/uploads/2018/10/Oh2018.pdf},
doi = {10.3389/frobt.2018.00114},
isbn = {2296-9144},
year = {2018},
date = {2018-10-15},
journal = {Frontiers in Robotics and AI},
volume = {5},
number = {114},
abstract = {Social presence, or the feeling of being there with a “real” person, is a crucial component of interactions that take place in virtual reality. This paper reviews the concept, antecedents, and implications of social presence, with a focus on the literature regarding the predictors of social presence. The article begins by exploring the concept of social presence, distinguishing it from two other dimensions of presence—telepresence and self-presence. After establishing the definition of social presence, the article offers a systematic review of 222 separate findings identified from 150 studies that investigate the factors (i.e., immersive qualities, contextual differences, and individual psychological traits) that predict social presence. Finally, the paper discusses the implications of heightened social presence and when it does and does not enhance one’s experience in a virtual environment.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Social presence, or the feeling of being there with a “real” person, is a crucial component of interactions that take place in virtual reality. This paper reviews the concept, antecedents, and implications of social presence, with a focus on the literature regarding the predictors of social presence. The article begins by exploring the concept of social presence, distinguishing it from two other dimensions of presence—telepresence and self-presence. After establishing the definition of social presence, the article offers a systematic review of 222 separate findings identified from 150 studies that investigate the factors (i.e., immersive qualities, contextual differences, and individual psychological traits) that predict social presence. Finally, the paper discusses the implications of heightened social presence and when it does and does not enhance one’s experience in a virtual environment. |
| Kangsoo Kim; Mark Billinghurst; Gerd Bruder; Henry Been-Lirn Duh; Gregory F. Welch Revisiting Trends in Augmented Reality Research: A Review of the 2nd Decade of ISMAR (2008–2017) Journal Article In: IEEE Transactions on Visualization and Computer Graphics, vol. 24, no. 11, pp. 2947-2962, 2018, ISSN: 1077-2626. @article{Kim2018b,
title = {Revisiting Trends in Augmented Reality Research: A Review of the 2nd Decade of ISMAR (2008–2017)},
author = {Kangsoo Kim and Mark Billinghurst and Gerd Bruder and Henry Been-Lirn Duh and Gregory F. Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2018/08/Kim2018b.pdf},
doi = {10.1109/TVCG.2018.2868591},
issn = {1077-2626},
year = {2018},
date = {2018-09-06},
journal = {IEEE Transactions on Visualization and Computer Graphics},
volume = {24},
number = {11},
pages = {2947-2962},
abstract = {In 2008, Zhou et al. presented a survey paper summarizing the previous ten years of ISMAR publications, which provided invaluable insights into the research challenges and trends associated with that time period. Ten years later, we review the research that has been presented at ISMAR conferences since the survey of Zhou et al., at a time when both academia and the AR industry are enjoying dramatic technological changes. Here we consider the research results and trends of the last decade of ISMAR by carefully reviewing the ISMAR publications from the period of 2008–2017, in the context of the first ten years. The numbers of papers for different research topics and their impacts by citations were analyzed while reviewing them—which reveals that there is a sharp increase in AR evaluation and rendering research. Based on this review we offer some observations related to potential future research areas or trends, which could be helpful to AR researchers and industry members looking ahead.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
In 2008, Zhou et al. presented a survey paper summarizing the previous ten years of ISMAR publications, which provided invaluable insights into the research challenges and trends associated with that time period. Ten years later, we review the research that has been presented at ISMAR conferences since the survey of Zhou et al., at a time when both academia and the AR industry are enjoying dramatic technological changes. Here we consider the research results and trends of the last decade of ISMAR by carefully reviewing the ISMAR publications from the period of 2008–2017, in the context of the first ten years. The numbers of papers for different research topics and their impacts by citations were analyzed while reviewing them—which reveals that there is a sharp increase in AR evaluation and rendering research. Based on this review we offer some observations related to potential future research areas or trends, which could be helpful to AR researchers and industry members looking ahead. |
| Eike Langbehn; Frank Steinicke; Markus Lappe; Gregory F. Welch; Gerd Bruder In the Blink of an Eye – Leveraging Blink-Induced Suppression for Imperceptible Position andOrientation Redirection in Virtual Reality Journal Article In: ACM Transactions of Graphics (TOG), Special Issue on ACM SIGGRAPH 2018, vol. 37, no. 4, pp. 1-11, 2018. @article{Langbehn2018,
title = {In the Blink of an Eye – Leveraging Blink-Induced Suppression for Imperceptible Position andOrientation Redirection in Virtual Reality},
author = {Eike Langbehn and Frank Steinicke and Markus Lappe and Gregory F. Welch and Gerd Bruder},
url = {https://sreal.ucf.edu/wp-content/uploads/2018/05/Langbehn2018.pdf},
doi = {10.1145/3197517.3201335},
year = {2018},
date = {2018-08-01},
journal = {ACM Transactions of Graphics (TOG), Special Issue on ACM SIGGRAPH 2018},
volume = {37},
number = {4},
pages = {1-11},
chapter = {66},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
|
| Roghayeh Barmaki; Charles E. Hughes Embodiment Analytics of Practicing Teachers in a Virtual Rehearsal Environment Journal Article In: Journal of Computer Assisted Learning, vol. 34, no. 4, pp. 387-396, 2018. @article{Barmaki2018,
title = {Embodiment Analytics of Practicing Teachers in a Virtual Rehearsal Environment},
author = {Roghayeh Barmaki and Charles E. Hughes},
doi = {10.1111/jcal.12268},
year = {2018},
date = {2018-05-21},
journal = {Journal of Computer Assisted Learning},
volume = {34},
number = {4},
pages = {387-396},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
|
| Ahmad Abualsamid; Charles E. Hughes Using a Mobile App to Reduce Off-Task Behaviors in Classrooms: A Pilot Study Journal Article In: Journal on Technology and Persons with Disabilities, vol. 6, pp. 378-384, 2018. @article{Abualsamid2018,
title = {Using a Mobile App to Reduce Off-Task Behaviors in Classrooms: A Pilot Study},
author = {Ahmad Abualsamid and Charles E. Hughes},
doi = {10211.3/203008 },
year = {2018},
date = {2018-05-17},
journal = {Journal on Technology and Persons with Disabilities},
volume = {6},
pages = {378-384},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
|
| Myungho Lee; Gerd Bruder; Tobias Hollerer; Greg Welch Effects of Unaugmented Periphery and Vibrotactile Feedback on Proxemics with Virtual Humans in AR Journal Article In: IEEE Transactions on Visualization and Computer Graphics, vol. 24, no. 4, pp. 1525-1534, 2018. @article{Lee2018,
title = {Effects of Unaugmented Periphery and Vibrotactile Feedback on Proxemics with Virtual Humans in AR},
author = {Myungho Lee and Gerd Bruder and Tobias Hollerer and Greg Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2018/04/Lee2018.pdf},
doi = {10.1109/TVCG.2018.2794074 },
year = {2018},
date = {2018-02-23},
journal = {IEEE Transactions on Visualization and Computer Graphics},
volume = {24},
number = {4},
pages = {1525-1534},
abstract = {In this paper, we investigate factors and issues related to human locomotion behavior and proxemics in the presence of a real or virtual human in augmented reality (AR). First, we discuss a unique issue with current-state optical see-through head-mounted displays. Second, we discuss the limited multimodal feedback provided by virtual humans in AR, present a potential improvement based on vibrotactile feedback induced via the floor to compensate for the limited augmented visual field, and report results showing that benefits of such vibrations are less visible in objective locomotion behavior than in subjective estimates of co-presence. Third, we investigate and document significant differences in the effects that real and virtual humans have on locomotion behavior in AR. We discuss potential explanations for these effects and analyze effects of different types of behaviors that such real or virtual humans may exhibit in the presence of an observer.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
In this paper, we investigate factors and issues related to human locomotion behavior and proxemics in the presence of a real or virtual human in augmented reality (AR). First, we discuss a unique issue with current-state optical see-through head-mounted displays. Second, we discuss the limited multimodal feedback provided by virtual humans in AR, present a potential improvement based on vibrotactile feedback induced via the floor to compensate for the limited augmented visual field, and report results showing that benefits of such vibrations are less visible in objective locomotion behavior than in subjective estimates of co-presence. Third, we investigate and document significant differences in the effects that real and virtual humans have on locomotion behavior in AR. We discuss potential explanations for these effects and analyze effects of different types of behaviors that such real or virtual humans may exhibit in the presence of an observer. |
2017
|
| Kangsoo Kim; Divine Maloney; Gerd Bruder; Jeremy Bailenson; Greg Welch The effects of virtual human's spatial and behavioral coherence with physical objects on social presence in AR Journal Article In: Computer Animation and Virtual Worlds, vol. 28, no. 3-4, pp. e1771-n/a, 2017. @article{Kim2017b,
title = {The effects of virtual human's spatial and behavioral coherence with physical objects on social presence in AR},
author = {Kangsoo Kim and Divine Maloney and Gerd Bruder and Jeremy Bailenson and Greg Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/12/Kim2017b.pdf},
doi = {10.1002/cav.1771},
year = {2017},
date = {2017-05-21},
journal = {Computer Animation and Virtual Worlds},
volume = {28},
number = {3-4},
pages = {e1771-n/a},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
|
| Ryan Schubert; Gerd Bruder; Greg Welch Mitigating Perceptual Error in Synthetic Animatronics using Visual Feature Flow Journal Article In: Journal of Vision: Abstract Issue 2017, vol. 17, no. 10, pp. 331, 2017. @article{Schubert2017,
title = {Mitigating Perceptual Error in Synthetic Animatronics using Visual Feature Flow},
author = {Ryan Schubert and Gerd Bruder and Greg Welch},
doi = {10.1167/17.10.331},
year = {2017},
date = {2017-05-01},
journal = {Journal of Vision: Abstract Issue 2017},
volume = {17},
number = {10},
pages = {331},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
|
| Myungho Lee; Gerd Bruder; Greg Welch Effect of Vibrotactile Feedback through the Floor on Social Presence in an Immersive Virtual Environment Journal Article In: Journal of Vision: Abstract Issue 2017, vol. 17, no. 10, pp. 357, 2017. @article{Lee2017,
title = {Effect of Vibrotactile Feedback through the Floor on Social Presence in an Immersive Virtual Environment},
author = {Myungho Lee and Gerd Bruder and Greg Welch},
doi = {10.1167/17.10.357},
year = {2017},
date = {2017-05-01},
journal = {Journal of Vision: Abstract Issue 2017},
volume = {17},
number = {10},
pages = {357},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
|
| Lisa Dieker; Charles E. Hughes; Michael Hynes; Carrie Straub Using simulated virtual environments to improve teacher performance Journal Article In: School University Partnerships (Journal of the National Association for Professional Development Schools): Special Issue: Technology to Enhance PDS, vol. 10, no. 3, pp. 62-81, 2017. @article{Dieker2017,
title = {Using simulated virtual environments to improve teacher performance},
author = {Lisa Dieker and Charles E. Hughes and Michael Hynes and Carrie Straub},
url = {http://napds.org/wp-content/uploads/2017/07/Using-Simulated-Virtual-Environments-to-Improve-Teacher-Performance-.pdf},
year = {2017},
date = {2017-01-01},
journal = {School University Partnerships (Journal of the National Association for Professional Development Schools): Special Issue: Technology to Enhance PDS},
volume = {10},
number = {3},
pages = {62-81},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
|
| Kangsoo Kim; Arjun Nagendran; Jeremy N. Bailenson; Andrew Raij; Gerd Bruder; Myungho Lee; Ryan Schubert; Xin Yan; Gregory F. Welch A Large-Scale Study of Surrogate Physicality and Gesturing on Human–Surrogate Interactions in a Public Space Journal Article In: Frontiers in Robotics and AI, vol. 4, pp. 1-20, 2017, ISSN: 2296-9144. @article{Kim2017,
title = {A Large-Scale Study of Surrogate Physicality and Gesturing on Human–Surrogate Interactions in a Public Space},
author = {Kangsoo Kim and Arjun Nagendran and Jeremy N. Bailenson and Andrew Raij and Gerd Bruder and Myungho Lee and Ryan Schubert and Xin Yan and Gregory F. Welch},
url = {http://journal.frontiersin.org/article/10.3389/frobt.2017.00032
https://sreal.ucf.edu/wp-content/uploads/2017/07/Kim2017.pdf},
doi = {10.3389/frobt.2017.00032},
issn = {2296-9144},
year = {2017},
date = {2017-01-01},
journal = {Frontiers in Robotics and AI},
volume = {4},
pages = {1-20},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
|