2000
|
 | Aditi Majumder; Zhu He; Herman Towles; Greg Welch Achieving color uniformity across multi-projector displays Proceedings Article In: Proceedings of the conference on Visualization '00, pp. 117–124, IEEE Computer Society Press, Salt Lake City, Utah, United States, 2000, ISBN: 1-58113-309-X. @inproceedings{Majumder2000,
title = {Achieving color uniformity across multi-projector displays},
author = {Aditi Majumder and Zhu He and Herman Towles and Greg Welch},
url = {http://portal.acm.org/citation.cfm?id=375213.375227
https://sreal.ucf.edu/wp-content/uploads/2017/02/Majumder2000.pdf},
isbn = {1-58113-309-X},
year = {2000},
date = {2000-01-01},
booktitle = {Proceedings of the conference on Visualization '00},
pages = {117--124},
publisher = {IEEE Computer Society Press},
address = {Salt Lake City, Utah, United States},
series = {VIS '00},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
|
| Ruigang Yang; Greg Welch Automatic Display Surface Estimation using Everyday Imagery Technical Report University of North Carolina at Chapel Hill, Department of Computer Science no. TR00-015, 2000. @techreport{YangR2000,
title = {Automatic Display Surface Estimation using Everyday Imagery},
author = {Ruigang Yang and Greg Welch},
year = {2000},
date = {2000-01-01},
number = {TR00-015},
institution = {University of North Carolina at Chapel Hill, Department of Computer Science},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
|
 | Greg Welch; Henry Fuchs; Ramesh Raskar; Michael Brown; Herman Towles Projected Imagery In Your Office in the Future Journal Article In: IEEE Computer Graphics and Applications, vol. 20, no. 4, pp. 62-67, 2000. @article{Welch2000a,
title = {Projected Imagery In Your Office in the Future},
author = {Greg Welch and Henry Fuchs and Ramesh Raskar and Michael Brown and Herman Towles},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Welch2000a.pdf},
year = {2000},
date = {2000-01-01},
journal = {IEEE Computer Graphics and Applications},
volume = {20},
number = {4},
pages = {62-67},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
|
1999
|
 | Greg Welch; Gary Bishop; Leandra Vicci; Stephen Brumback; Kurtis Keller; D'nardo Colucci The HiBall Tracker: High-Performance Wide-Area Tracking for Virtual and Augmented Environments Proceedings Article In: Proceedings of the ACM Symposium on Virtual Reality Software and Technology (VRST), pp. 1–11, Association of Computing Machinery ACM Press, Addison-Wesley Publishing Company, 1999, (Best Paper designation.). @inproceedings{Welch1999,
title = {The HiBall Tracker: High-Performance Wide-Area Tracking for Virtual and Augmented Environments},
author = {Greg Welch and Gary Bishop and Leandra Vicci and Stephen Brumback and Kurtis Keller and D'nardo Colucci},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Welch1999.pdf},
doi = {10.1145/323663.323664 },
year = {1999},
date = {1999-12-01},
booktitle = {Proceedings of the ACM Symposium on Virtual Reality Software and Technology (VRST)},
pages = {1--11},
publisher = {ACM Press, Addison-Wesley Publishing Company},
organization = {Association of Computing Machinery},
note = {Best Paper designation.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
|
 | Ramesh Raskar; Michael Brown; Ruigang Yang; Wei-Chao Chen; Greg Welch; Herman Towles; Brent Seales; Henry Fuchs Mutli-Projector Displays Using Camera-Based Registration Book Section In: Proceedings of the Conference on Visualization 99: Celebrating Ten Years, pp. 161-168, San Francisco, CA, USA (October 24 - 29), 1999. @incollection{Raskar1999a,
title = {Mutli-Projector Displays Using Camera-Based Registration},
author = {Ramesh Raskar and Michael Brown and Ruigang Yang and Wei-Chao Chen and Greg Welch and Herman Towles and Brent Seales and Henry Fuchs },
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Raskar1999a.pdf},
year = {1999},
date = {1999-01-01},
booktitle = {Proceedings of the Conference on Visualization 99: Celebrating Ten Years},
pages = {161-168},
address = {San Francisco, CA, USA (October 24 - 29)},
series = {IEEE Visualization},
abstract = {Conventional projector-based display systems are typically designed around precise and regular configurations of projectors and display surfaces. While this results in rendering simplicity and speed,it also means painstaking construction and ongoing maintenance. In previously published work, we introduced a vision of projector-based displays constructed from a collection of casually-arranged projectors and display surfaces.parIn this paper,we present flexible yet practical methods for realizing this vision, enabling low-cost mega-pixel display systems with large physical dimensions, higher resolution, or both. The techniques afford new opportunities to build personal 3D visualization systems in offices, conference rooms, theaters, or even your living room. As a demonstration of the simplicity and effectiveness of the methods that we continue to perfect, we show in the included video that a 10-year old child can construct and calibrate a two-camera, two-projector, head-tracked display system, all in about 15 minutes.},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
Conventional projector-based display systems are typically designed around precise and regular configurations of projectors and display surfaces. While this results in rendering simplicity and speed,it also means painstaking construction and ongoing maintenance. In previously published work, we introduced a vision of projector-based displays constructed from a collection of casually-arranged projectors and display surfaces.parIn this paper,we present flexible yet practical methods for realizing this vision, enabling low-cost mega-pixel display systems with large physical dimensions, higher resolution, or both. The techniques afford new opportunities to build personal 3D visualization systems in offices, conference rooms, theaters, or even your living room. As a demonstration of the simplicity and effectiveness of the methods that we continue to perfect, we show in the included video that a 10-year old child can construct and calibrate a two-camera, two-projector, head-tracked display system, all in about 15 minutes. |
 | Ramesh Raskar; Greg Welch; Wei-Chao Chen Table-Top Spatially-Augmented Reality: Bringing Physical Models to Life with Projected Imagery Proceedings Article In: IWAR '99: Proceedings of the 2nd IEEE and ACM International Workshop on Augmented Reality, pp. 64, IEEE Computer Society, Washington, DC, USA, 1999, ISBN: 0-7695-0359-4. @inproceedings{Raskar1999aa,
title = {Table-Top Spatially-Augmented Reality: Bringing Physical Models to Life with Projected Imagery},
author = {Ramesh Raskar and Greg Welch and Wei-Chao Chen},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Raskar1999aa.pdf},
isbn = {0-7695-0359-4},
year = {1999},
date = {1999-01-01},
booktitle = {IWAR '99: Proceedings of the 2nd IEEE and ACM International Workshop on Augmented Reality},
pages = {64},
publisher = {IEEE Computer Society},
address = {Washington, DC, USA},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
|
 | Brent Seales; Greg Welch; Chris Jaynes Real-Time Depth Warping for 3-D Scene Reconstruction Proceedings Article In: 1999 IEEE Aerospace Conference, Snowmass at Aspen, CO USA, 1999. @inproceedings{Seales1999,
title = {Real-Time Depth Warping for 3-D Scene Reconstruction},
author = {Brent Seales and Greg Welch and Chris Jaynes},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Seales1999.pdf},
year = {1999},
date = {1999-01-01},
booktitle = {1999 IEEE Aerospace Conference},
address = {Snowmass at Aspen, CO USA},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
|
| Greg Welch; Matthew Cutts; Gary Bishop A Self-Contained Wide-Area Tracker using Sensor Fusion Unpublished 1999, (Submission to SIGGRAPH 1999 (ID papers_1566)). @unpublished{Welch1999aa,
title = {A Self-Contained Wide-Area Tracker using Sensor Fusion},
author = {Greg Welch and Matthew Cutts and Gary Bishop},
year = {1999},
date = {1999-01-01},
note = {Submission to SIGGRAPH 1999 (ID papers_1566)},
keywords = {},
pubstate = {published},
tppubtype = {unpublished}
}
|
 | Ramesh Raskar; Greg Welch; Wei-Chao Chen Table-Top Spatially-Augmented Reality: Bringing Physical Models to Life with Projected Imagery Book Section In: Second International Workshop on Augmented Reality (IWAR'99), pp. 64-71, San Francisco, CA, USA, 1999. @incollection{Raskar1999b,
title = {Table-Top Spatially-Augmented Reality: Bringing Physical Models to Life with Projected Imagery},
author = {Ramesh Raskar and Greg Welch and Wei-Chao Chen},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Raskar1999b.pdf},
year = {1999},
date = {1999-01-01},
booktitle = {Second International Workshop on Augmented Reality (IWAR'99)},
pages = {64-71},
address = {San Francisco, CA, USA},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
|
1998
|
 | Ronald T. Azuma; Bruce R. Hoff; Howard E. Neely, III; Ronald Sarfaty; Michael J. Daily; Gary Bishop; Vernon Chi; Greg Welch; Ulrich Neumann; Suya You; Rich Nichols; Jim Cannon Making Augmented Reality Work Outdoors Requires Hybrid Tracking Proceedings Article In: First International Workshop on Augmented Reality, pp. 219-224, San Francisco, CA, USA, 1998. @inproceedings{Azuma1998,
title = {Making Augmented Reality Work Outdoors Requires Hybrid Tracking},
author = {Ronald T. Azuma and Bruce R. Hoff and Howard E. Neely, III and Ronald Sarfaty and Michael J. Daily and Gary Bishop and Vernon Chi and Greg Welch and Ulrich Neumann and Suya You and Rich Nichols and Jim Cannon},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Azuma1998.pdf},
year = {1998},
date = {1998-01-01},
booktitle = {First International Workshop on Augmented Reality},
pages = {219-224},
address = {San Francisco, CA, USA},
abstract = {Developing Augmented Reality systems that work outdoors, rather than indoors in constrained environments, will open new application areas and motivate the construction of new, more general tracking approaches. Accurate tracking outdoors is difficult because we have little control over the environment and fewer resources available compared to an indoor application. This position paper examines the individual tracking technologies available and concludes that for the near term, a hybrid solution is the only viable approach. The distortion measured from an electronic compass and tilt sensor is discussed.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Developing Augmented Reality systems that work outdoors, rather than indoors in constrained environments, will open new application areas and motivate the construction of new, more general tracking approaches. Accurate tracking outdoors is difficult because we have little control over the environment and fewer resources available compared to an indoor application. This position paper examines the individual tracking technologies available and concludes that for the near term, a hybrid solution is the only viable approach. The distortion measured from an electronic compass and tilt sensor is discussed. |
 | Ramesh Raskar; Gregory Welch; Matthew Cutts; Adam Lake; Lev Stesin; Henry Fuchs The Office of the Future: A Unified Approach to Image-Based Modeling and Spatially Immersive Displays Book Section In: Cohen, Michael F. (Ed.): Computer Graphics, pp. 179-188, ACM Press, Addison-Wesley, Orlando, FL, USA (July 19 - 24), 1998. @incollection{Raskar1998a,
title = {The Office of the Future: A Unified Approach to Image-Based Modeling and Spatially Immersive Displays},
author = {Ramesh Raskar and Gregory Welch and Matthew Cutts and Adam Lake and Lev Stesin and Henry Fuchs},
editor = {Michael F. Cohen},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Raskar1998a.pdf},
doi = {10.1145/280814.280861},
year = {1998},
date = {1998-01-01},
booktitle = {Computer Graphics},
pages = {179-188},
publisher = {ACM Press, Addison-Wesley},
address = {Orlando, FL, USA (July 19 - 24)},
edition = {SIGGRAPH Conference Proceedings},
series = {Annual Conference on Computer Graphics & Interactive Techniques},
abstract = {We introduce ideas, proposed technologies, and initial results for an office of the future that is based on a unified application of computer vision and computer graphics in a system that combines and builds upon the notions of the CAVE, tiled display systems, and image-based modeling. The basic idea is to use real-time computer vision techniques to dynamically extract per-pixel depth and reflectance information for the visible surfaces in the office including walls, furniture, objects, and people, and then to either project images on the surfaces, render images of the surfaces, or interpret changes in the surfaces. In the first case, one could designate every-day (potentially irregular) real surfaces in the office to be used as spatially immersive display surfaces, and then project high-resolution graphics and text onto those surfaces. In the second case, one could transmit the dynamic image-based models over a network for display at a remote site. Finally, one could interpret dynamic changes in the surfaces for the purposes of tracking, interaction, or augmented reality applications.parTo accomplish the simultaneous capture and display we envision an office of the future where the ceiling lights are replaced by computer controlled cameras and smart projectors that are used to capture dynamic image-based models with imperceptible structured light techniques, and to display high-resolution images on designated display surfaces. By doing both simultaneously on the designated display surfaces, one can dynamically adjust or autocalibrate for geometric, intensity, and resolution variations resulting from irregular or changing display surfaces, or overlapped projector images.parOur current approach to dynamic image-based modeling is to use an optimized structured light scheme that can capture per-pixel depth and reflectance at interactive rates. Our system implementation is not yet imperceptible, but we can demonstrate the approach in the laboratory. Our approach to rendering on the designated (potentially irregular) display surfaces is to employ a two-pass projective texture scheme to generate images that when projected onto the surfaces appear correct to a moving head-tracked observer. We present here an initial implementation of the overall vision, in an office-like setting, and preliminary demonstrations of our dynamic modeling and display techniques.},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
We introduce ideas, proposed technologies, and initial results for an office of the future that is based on a unified application of computer vision and computer graphics in a system that combines and builds upon the notions of the CAVE, tiled display systems, and image-based modeling. The basic idea is to use real-time computer vision techniques to dynamically extract per-pixel depth and reflectance information for the visible surfaces in the office including walls, furniture, objects, and people, and then to either project images on the surfaces, render images of the surfaces, or interpret changes in the surfaces. In the first case, one could designate every-day (potentially irregular) real surfaces in the office to be used as spatially immersive display surfaces, and then project high-resolution graphics and text onto those surfaces. In the second case, one could transmit the dynamic image-based models over a network for display at a remote site. Finally, one could interpret dynamic changes in the surfaces for the purposes of tracking, interaction, or augmented reality applications.parTo accomplish the simultaneous capture and display we envision an office of the future where the ceiling lights are replaced by computer controlled cameras and smart projectors that are used to capture dynamic image-based models with imperceptible structured light techniques, and to display high-resolution images on designated display surfaces. By doing both simultaneously on the designated display surfaces, one can dynamically adjust or autocalibrate for geometric, intensity, and resolution variations resulting from irregular or changing display surfaces, or overlapped projector images.parOur current approach to dynamic image-based modeling is to use an optimized structured light scheme that can capture per-pixel depth and reflectance at interactive rates. Our system implementation is not yet imperceptible, but we can demonstrate the approach in the laboratory. Our approach to rendering on the designated (potentially irregular) display surfaces is to employ a two-pass projective texture scheme to generate images that when projected onto the surfaces appear correct to a moving head-tracked observer. We present here an initial implementation of the overall vision, in an office-like setting, and preliminary demonstrations of our dynamic modeling and display techniques. |
| Ramesh Raskar; Greg Welch; Henry Fuchs Spatially Augmented Reality Proceedings Article In: In First IEEE Workshop on Augmented Reality (IWAR '98), pp. 11–20, 1998. @inproceedings{Raskar1998aa,
title = {Spatially Augmented Reality},
author = {Ramesh Raskar and Greg Welch and Henry Fuchs},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Raskar1998d-IWAR_SAR.pdf},
year = {1998},
date = {1998-01-01},
booktitle = {In First IEEE Workshop on Augmented Reality (IWAR '98)},
pages = {11--20},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
|
 | Ramesh Raskar; Greg Welch; Henry Fuchs Seamless Projection Overlaps Using Warping and Intensity Blending Proceedings Article In: Fourth International Conference on Virtual Systems and Multimedia (VSMM), Gifu, Japan, 1998. @inproceedings{Raskar1998b,
title = {Seamless Projection Overlaps Using Warping and Intensity Blending},
author = {Ramesh Raskar and Greg Welch and Henry Fuchs},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Raskar1998b.pdf},
year = {1998},
date = {1998-01-01},
booktitle = {Fourth International Conference on Virtual Systems and Multimedia (VSMM)},
address = {Gifu, Japan},
abstract = {High-resolution Spatially Immersive Displays (SID) generally involve wide field of view (WFOV) image generation using multiple projectors. This paper describes a robust calibration and rendering method for projector based seamless displays using a video camera. It solves the basic problem of registering and blending overlap of two projections at a time. It is applicable even when the displays are not flat walls or projection axes are not orthogonal to the displays. Projectors' intrinsic or extrinsic parameters are not required.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
High-resolution Spatially Immersive Displays (SID) generally involve wide field of view (WFOV) image generation using multiple projectors. This paper describes a robust calibration and rendering method for projector based seamless displays using a video camera. It solves the basic problem of registering and blending overlap of two projections at a time. It is applicable even when the displays are not flat walls or projection axes are not orthogonal to the displays. Projectors' intrinsic or extrinsic parameters are not required. |
 | Ramesh Raskar; Matthew Cutts; Greg Welch; Wolfgang Stüerzlinger Efficient Image Generation for Multiprojector and Multisurface Displays Book Section In: Drettakis, George; Max, Nelson (Ed.): Proceedings of the Eurographics Workshop in Vienna, Austria, pp. 139-144, Springer Verlag, Vienna, Austria (June 29 - July 1), 1998, (ISBN 3-211-83213-0). @incollection{Raskar1998c,
title = {Efficient Image Generation for Multiprojector and Multisurface Displays},
author = {Ramesh Raskar and Matthew Cutts and Greg Welch and Wolfgang Stüerzlinger},
editor = {George Drettakis and Nelson Max},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Raskar1998c.pdf},
year = {1998},
date = {1998-01-01},
booktitle = {Proceedings of the Eurographics Workshop in Vienna, Austria},
pages = {139-144},
publisher = {Springer Verlag},
address = {Vienna, Austria (June 29 - July 1)},
edition = {Rendering Techniques 98},
note = {ISBN 3-211-83213-0},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
|
| Ramesh Raskar; Greg Welch; Henry Fuchs Spatially Augmented Reality Book Section In: Behringer, Reinhold; Klinker, Gudrun; Mizell, David (Ed.): Augmented Reality; Placing Artificial Objects in Real ScenesProceedings of the First IEEE Workshop on Augmented Reality (IWAR'98). Long lasting Impact Paper Award, pp. 63-72, A.K. Peters Ltd., San Francisco, CA, USA (November 1, 1998), 1998, (ISBN 1-56881-098-9). @incollection{Raskar1998d,
title = {Spatially Augmented Reality},
author = {Ramesh Raskar and Greg Welch and Henry Fuchs},
editor = {Reinhold Behringer and Gudrun Klinker and David Mizell},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Raskar1998d-IWAR_SAR.pdf},
year = {1998},
date = {1998-01-01},
booktitle = {Augmented Reality; Placing Artificial Objects in Real ScenesProceedings of the First IEEE Workshop on Augmented Reality (IWAR'98). Long lasting Impact Paper Award},
pages = {63-72},
publisher = {A.K. Peters Ltd.},
address = {San Francisco, CA, USA (November 1, 1998)},
note = {ISBN 1-56881-098-9},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
|
 | Ramesh Raskar; Henry Fuchs; Gregory Welch; Adam Lake; Matthew Cutts 3D Talking Heads: Image Based Modeling at Interactive Rates using Structured Light Projection Technical Report University of North Carolina at Chapel Hill, Department of Computer Science no. TR98-017, 1998. @techreport{Raskar1998e,
title = {3D Talking Heads: Image Based Modeling at Interactive Rates using Structured Light Projection},
author = {Ramesh Raskar and Henry Fuchs and Gregory Welch and Adam Lake and Matthew Cutts},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Raskar1998e.pdf},
year = {1998},
date = {1998-01-01},
number = {TR98-017},
institution = {University of North Carolina at Chapel Hill, Department of Computer Science},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
|
1997
|
 | Greg Welch; Gary Bishop SCAAT: Incremental Tracking with Incomplete Information Book Section In: Whitted, Turner (Ed.): Computer Graphics, pp. 333–344, ACM Press, Addison-Wesley, Los Angeles, CA, USA (August 3--8), 1997. @incollection{Welch1997,
title = {SCAAT: Incremental Tracking with Incomplete Information},
author = {Greg Welch and Gary Bishop},
editor = {Turner Whitted},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Welch1997.pdf},
year = {1997},
date = {1997-01-01},
booktitle = {Computer Graphics},
pages = {333--344},
publisher = {ACM Press, Addison-Wesley},
address = {Los Angeles, CA, USA (August 3--8)},
edition = {SIGGRAPH 97 Conference Proceedings},
series = {Annual Conference on Computer Graphics & Interactive Techniques},
abstract = {We present a promising new mathematical method for tracking a user's pose (position and orientation) for interactive computer graphics. The method, which is applicable to a wide variety of both commercial and experimental systems, improves accuracy by properly assimilating sequential observations, filtering sensor measurements, and by concurrently autocalibrating source and sensor devices. It facilitates user motion prediction, multisensor data fusion, and higher report rates with lower latency than previous methods.parTracking systems determine the user's pose by measuring signals from low-level hardware sensors. For reasons of physics and economics, most systems make multiple sequentialnewlinemeasurements which are then combined to produce a single tracker report. For example, commercial magnetic trackers using the SPASYN (Space Synchro) system sequentially measure three magnetic vectors and then combine them mathematically to produce a report of the sensor pose.parOur new approach produces tracker reports as each new low-level sensor measurement is made rather than waiting to form a complete collection of observations. Because single observationsnewlineunder-constrain the mathematical solution, we refer to our approach as single-constraint-at-a-time or SCAAT tracking. The key is that the single observations provide some information about the user's state, and thus can be used to incrementally improve anewlineprevious estimate. We recursively apply this principle, incorporating new sensor data as soon as it is measured. With this approach we are able to generate estimates more frequently, withnewlineless latency, and with improved accuracy. We present results from both an actual implementation, and from extensive simulations.},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
We present a promising new mathematical method for tracking a user's pose (position and orientation) for interactive computer graphics. The method, which is applicable to a wide variety of both commercial and experimental systems, improves accuracy by properly assimilating sequential observations, filtering sensor measurements, and by concurrently autocalibrating source and sensor devices. It facilitates user motion prediction, multisensor data fusion, and higher report rates with lower latency than previous methods.parTracking systems determine the user's pose by measuring signals from low-level hardware sensors. For reasons of physics and economics, most systems make multiple sequentialnewlinemeasurements which are then combined to produce a single tracker report. For example, commercial magnetic trackers using the SPASYN (Space Synchro) system sequentially measure three magnetic vectors and then combine them mathematically to produce a report of the sensor pose.parOur new approach produces tracker reports as each new low-level sensor measurement is made rather than waiting to form a complete collection of observations. Because single observationsnewlineunder-constrain the mathematical solution, we refer to our approach as single-constraint-at-a-time or SCAAT tracking. The key is that the single observations provide some information about the user's state, and thus can be used to incrementally improve anewlineprevious estimate. We recursively apply this principle, incorporating new sensor data as soon as it is measured. With this approach we are able to generate estimates more frequently, withnewlineless latency, and with improved accuracy. We present results from both an actual implementation, and from extensive simulations. |
1996
|
 | Gregory Francis Welch SCAAT: Incremental Tracking with Incomplete Information PhD Thesis University of North Carolina at Chapel Hill, 1996. @phdthesis{Welch1996,
title = {SCAAT: Incremental Tracking with Incomplete Information},
author = {Gregory Francis Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Welch1996.pdf},
year = {1996},
date = {1996-01-01},
address = {Chapel Hill, NC},
school = {University of North Carolina at Chapel Hill},
keywords = {},
pubstate = {published},
tppubtype = {phdthesis}
}
|
| Greg Welch; Gary Bishop One‐Step‐at‐a‐Time Tracking Technical Report University of North Carolina at Chapel Hill, Department of Computer Science no. TR96‐021, 1996. @techreport{Welch1996b,
title = {One‐Step‐at‐a‐Time Tracking},
author = {Greg Welch and Gary Bishop},
year = {1996},
date = {1996-01-01},
number = {TR96‐021},
institution = {University of North Carolina at Chapel Hill, Department of Computer Science},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
|
1995
|
 | Greg Welch Hybrid Self-Tracker: An Inertial/Optical Hybrid Three-Dimensional Tracking System Technical Report University of North Carolina at Chapel Hill, Department of Computer Science no. TR95-048, 1995. @techreport{Welch1995aa,
title = {Hybrid Self-Tracker: An Inertial/Optical Hybrid Three-Dimensional Tracking System},
author = {Greg Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Welch1995aa.pdf},
year = {1995},
date = {1995-01-01},
number = {TR95-048},
institution = {University of North Carolina at Chapel Hill, Department of Computer Science},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
|