1998
|
| Ramesh Raskar; Gregory Welch; Matthew Cutts; Adam Lake; Lev Stesin; Henry Fuchs The Office of the Future: A Unified Approach to Image-Based Modeling and Spatially Immersive Displays Book Section In: Cohen, Michael F. (Ed.): Computer Graphics, pp. 179-188, ACM Press, Addison-Wesley, Orlando, FL, USA (July 19 - 24), 1998. @incollection{Raskar1998a,
title = {The Office of the Future: A Unified Approach to Image-Based Modeling and Spatially Immersive Displays},
author = {Ramesh Raskar and Gregory Welch and Matthew Cutts and Adam Lake and Lev Stesin and Henry Fuchs},
editor = {Michael F. Cohen},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Raskar1998a.pdf},
doi = {10.1145/280814.280861},
year = {1998},
date = {1998-01-01},
booktitle = {Computer Graphics},
pages = {179-188},
publisher = {ACM Press, Addison-Wesley},
address = {Orlando, FL, USA (July 19 - 24)},
edition = {SIGGRAPH Conference Proceedings},
series = {Annual Conference on Computer Graphics & Interactive Techniques},
abstract = {We introduce ideas, proposed technologies, and initial results for an office of the future that is based on a unified application of computer vision and computer graphics in a system that combines and builds upon the notions of the CAVE, tiled display systems, and image-based modeling. The basic idea is to use real-time computer vision techniques to dynamically extract per-pixel depth and reflectance information for the visible surfaces in the office including walls, furniture, objects, and people, and then to either project images on the surfaces, render images of the surfaces, or interpret changes in the surfaces. In the first case, one could designate every-day (potentially irregular) real surfaces in the office to be used as spatially immersive display surfaces, and then project high-resolution graphics and text onto those surfaces. In the second case, one could transmit the dynamic image-based models over a network for display at a remote site. Finally, one could interpret dynamic changes in the surfaces for the purposes of tracking, interaction, or augmented reality applications.parTo accomplish the simultaneous capture and display we envision an office of the future where the ceiling lights are replaced by computer controlled cameras and smart projectors that are used to capture dynamic image-based models with imperceptible structured light techniques, and to display high-resolution images on designated display surfaces. By doing both simultaneously on the designated display surfaces, one can dynamically adjust or autocalibrate for geometric, intensity, and resolution variations resulting from irregular or changing display surfaces, or overlapped projector images.parOur current approach to dynamic image-based modeling is to use an optimized structured light scheme that can capture per-pixel depth and reflectance at interactive rates. Our system implementation is not yet imperceptible, but we can demonstrate the approach in the laboratory. Our approach to rendering on the designated (potentially irregular) display surfaces is to employ a two-pass projective texture scheme to generate images that when projected onto the surfaces appear correct to a moving head-tracked observer. We present here an initial implementation of the overall vision, in an office-like setting, and preliminary demonstrations of our dynamic modeling and display techniques.},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
We introduce ideas, proposed technologies, and initial results for an office of the future that is based on a unified application of computer vision and computer graphics in a system that combines and builds upon the notions of the CAVE, tiled display systems, and image-based modeling. The basic idea is to use real-time computer vision techniques to dynamically extract per-pixel depth and reflectance information for the visible surfaces in the office including walls, furniture, objects, and people, and then to either project images on the surfaces, render images of the surfaces, or interpret changes in the surfaces. In the first case, one could designate every-day (potentially irregular) real surfaces in the office to be used as spatially immersive display surfaces, and then project high-resolution graphics and text onto those surfaces. In the second case, one could transmit the dynamic image-based models over a network for display at a remote site. Finally, one could interpret dynamic changes in the surfaces for the purposes of tracking, interaction, or augmented reality applications.parTo accomplish the simultaneous capture and display we envision an office of the future where the ceiling lights are replaced by computer controlled cameras and smart projectors that are used to capture dynamic image-based models with imperceptible structured light techniques, and to display high-resolution images on designated display surfaces. By doing both simultaneously on the designated display surfaces, one can dynamically adjust or autocalibrate for geometric, intensity, and resolution variations resulting from irregular or changing display surfaces, or overlapped projector images.parOur current approach to dynamic image-based modeling is to use an optimized structured light scheme that can capture per-pixel depth and reflectance at interactive rates. Our system implementation is not yet imperceptible, but we can demonstrate the approach in the laboratory. Our approach to rendering on the designated (potentially irregular) display surfaces is to employ a two-pass projective texture scheme to generate images that when projected onto the surfaces appear correct to a moving head-tracked observer. We present here an initial implementation of the overall vision, in an office-like setting, and preliminary demonstrations of our dynamic modeling and display techniques. |
| Ramesh Raskar; Greg Welch; Henry Fuchs Spatially Augmented Reality Proceedings Article In: In First IEEE Workshop on Augmented Reality (IWAR '98), pp. 11–20, 1998. @inproceedings{Raskar1998aa,
title = {Spatially Augmented Reality},
author = {Ramesh Raskar and Greg Welch and Henry Fuchs},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Raskar1998d-IWAR_SAR.pdf},
year = {1998},
date = {1998-01-01},
booktitle = {In First IEEE Workshop on Augmented Reality (IWAR '98)},
pages = {11--20},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
|
| Ramesh Raskar; Greg Welch; Henry Fuchs Seamless Projection Overlaps Using Warping and Intensity Blending Proceedings Article In: Fourth International Conference on Virtual Systems and Multimedia (VSMM), Gifu, Japan, 1998. @inproceedings{Raskar1998b,
title = {Seamless Projection Overlaps Using Warping and Intensity Blending},
author = {Ramesh Raskar and Greg Welch and Henry Fuchs},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Raskar1998b.pdf},
year = {1998},
date = {1998-01-01},
booktitle = {Fourth International Conference on Virtual Systems and Multimedia (VSMM)},
address = {Gifu, Japan},
abstract = {High-resolution Spatially Immersive Displays (SID) generally involve wide field of view (WFOV) image generation using multiple projectors. This paper describes a robust calibration and rendering method for projector based seamless displays using a video camera. It solves the basic problem of registering and blending overlap of two projections at a time. It is applicable even when the displays are not flat walls or projection axes are not orthogonal to the displays. Projectors' intrinsic or extrinsic parameters are not required.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
High-resolution Spatially Immersive Displays (SID) generally involve wide field of view (WFOV) image generation using multiple projectors. This paper describes a robust calibration and rendering method for projector based seamless displays using a video camera. It solves the basic problem of registering and blending overlap of two projections at a time. It is applicable even when the displays are not flat walls or projection axes are not orthogonal to the displays. Projectors' intrinsic or extrinsic parameters are not required. |
| Ramesh Raskar; Matthew Cutts; Greg Welch; Wolfgang Stüerzlinger Efficient Image Generation for Multiprojector and Multisurface Displays Book Section In: Drettakis, George; Max, Nelson (Ed.): Proceedings of the Eurographics Workshop in Vienna, Austria, pp. 139-144, Springer Verlag, Vienna, Austria (June 29 - July 1), 1998, (ISBN 3-211-83213-0). @incollection{Raskar1998c,
title = {Efficient Image Generation for Multiprojector and Multisurface Displays},
author = {Ramesh Raskar and Matthew Cutts and Greg Welch and Wolfgang Stüerzlinger},
editor = {George Drettakis and Nelson Max},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Raskar1998c.pdf},
year = {1998},
date = {1998-01-01},
booktitle = {Proceedings of the Eurographics Workshop in Vienna, Austria},
pages = {139-144},
publisher = {Springer Verlag},
address = {Vienna, Austria (June 29 - July 1)},
edition = {Rendering Techniques 98},
note = {ISBN 3-211-83213-0},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
|
| Ramesh Raskar; Greg Welch; Henry Fuchs Spatially Augmented Reality Book Section In: Behringer, Reinhold; Klinker, Gudrun; Mizell, David (Ed.): Augmented Reality; Placing Artificial Objects in Real ScenesProceedings of the First IEEE Workshop on Augmented Reality (IWAR'98). Long lasting Impact Paper Award, pp. 63-72, A.K. Peters Ltd., San Francisco, CA, USA (November 1, 1998), 1998, (ISBN 1-56881-098-9). @incollection{Raskar1998d,
title = {Spatially Augmented Reality},
author = {Ramesh Raskar and Greg Welch and Henry Fuchs},
editor = {Reinhold Behringer and Gudrun Klinker and David Mizell},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Raskar1998d-IWAR_SAR.pdf},
year = {1998},
date = {1998-01-01},
booktitle = {Augmented Reality; Placing Artificial Objects in Real ScenesProceedings of the First IEEE Workshop on Augmented Reality (IWAR'98). Long lasting Impact Paper Award},
pages = {63-72},
publisher = {A.K. Peters Ltd.},
address = {San Francisco, CA, USA (November 1, 1998)},
note = {ISBN 1-56881-098-9},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
|
| Ramesh Raskar; Henry Fuchs; Gregory Welch; Adam Lake; Matthew Cutts 3D Talking Heads: Image Based Modeling at Interactive Rates using Structured Light Projection Technical Report University of North Carolina at Chapel Hill, Department of Computer Science no. TR98-017, 1998. @techreport{Raskar1998e,
title = {3D Talking Heads: Image Based Modeling at Interactive Rates using Structured Light Projection},
author = {Ramesh Raskar and Henry Fuchs and Gregory Welch and Adam Lake and Matthew Cutts},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Raskar1998e.pdf},
year = {1998},
date = {1998-01-01},
number = {TR98-017},
institution = {University of North Carolina at Chapel Hill, Department of Computer Science},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
|
1997
|
| Greg Welch; Gary Bishop SCAAT: Incremental Tracking with Incomplete Information Book Section In: Whitted, Turner (Ed.): Computer Graphics, pp. 333–344, ACM Press, Addison-Wesley, Los Angeles, CA, USA (August 3--8), 1997. @incollection{Welch1997,
title = {SCAAT: Incremental Tracking with Incomplete Information},
author = {Greg Welch and Gary Bishop},
editor = {Turner Whitted},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Welch1997.pdf},
year = {1997},
date = {1997-01-01},
booktitle = {Computer Graphics},
pages = {333--344},
publisher = {ACM Press, Addison-Wesley},
address = {Los Angeles, CA, USA (August 3--8)},
edition = {SIGGRAPH 97 Conference Proceedings},
series = {Annual Conference on Computer Graphics & Interactive Techniques},
abstract = {We present a promising new mathematical method for tracking a user's pose (position and orientation) for interactive computer graphics. The method, which is applicable to a wide variety of both commercial and experimental systems, improves accuracy by properly assimilating sequential observations, filtering sensor measurements, and by concurrently autocalibrating source and sensor devices. It facilitates user motion prediction, multisensor data fusion, and higher report rates with lower latency than previous methods.parTracking systems determine the user's pose by measuring signals from low-level hardware sensors. For reasons of physics and economics, most systems make multiple sequentialnewlinemeasurements which are then combined to produce a single tracker report. For example, commercial magnetic trackers using the SPASYN (Space Synchro) system sequentially measure three magnetic vectors and then combine them mathematically to produce a report of the sensor pose.parOur new approach produces tracker reports as each new low-level sensor measurement is made rather than waiting to form a complete collection of observations. Because single observationsnewlineunder-constrain the mathematical solution, we refer to our approach as single-constraint-at-a-time or SCAAT tracking. The key is that the single observations provide some information about the user's state, and thus can be used to incrementally improve anewlineprevious estimate. We recursively apply this principle, incorporating new sensor data as soon as it is measured. With this approach we are able to generate estimates more frequently, withnewlineless latency, and with improved accuracy. We present results from both an actual implementation, and from extensive simulations.},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
We present a promising new mathematical method for tracking a user's pose (position and orientation) for interactive computer graphics. The method, which is applicable to a wide variety of both commercial and experimental systems, improves accuracy by properly assimilating sequential observations, filtering sensor measurements, and by concurrently autocalibrating source and sensor devices. It facilitates user motion prediction, multisensor data fusion, and higher report rates with lower latency than previous methods.parTracking systems determine the user's pose by measuring signals from low-level hardware sensors. For reasons of physics and economics, most systems make multiple sequentialnewlinemeasurements which are then combined to produce a single tracker report. For example, commercial magnetic trackers using the SPASYN (Space Synchro) system sequentially measure three magnetic vectors and then combine them mathematically to produce a report of the sensor pose.parOur new approach produces tracker reports as each new low-level sensor measurement is made rather than waiting to form a complete collection of observations. Because single observationsnewlineunder-constrain the mathematical solution, we refer to our approach as single-constraint-at-a-time or SCAAT tracking. The key is that the single observations provide some information about the user's state, and thus can be used to incrementally improve anewlineprevious estimate. We recursively apply this principle, incorporating new sensor data as soon as it is measured. With this approach we are able to generate estimates more frequently, withnewlineless latency, and with improved accuracy. We present results from both an actual implementation, and from extensive simulations. |
1996
|
| Gregory Francis Welch SCAAT: Incremental Tracking with Incomplete Information PhD Thesis University of North Carolina at Chapel Hill, 1996. @phdthesis{Welch1996,
title = {SCAAT: Incremental Tracking with Incomplete Information},
author = {Gregory Francis Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Welch1996.pdf},
year = {1996},
date = {1996-01-01},
address = {Chapel Hill, NC},
school = {University of North Carolina at Chapel Hill},
keywords = {},
pubstate = {published},
tppubtype = {phdthesis}
}
|
| Greg Welch; Gary Bishop One‐Step‐at‐a‐Time Tracking Technical Report University of North Carolina at Chapel Hill, Department of Computer Science no. TR96‐021, 1996. @techreport{Welch1996b,
title = {One‐Step‐at‐a‐Time Tracking},
author = {Greg Welch and Gary Bishop},
year = {1996},
date = {1996-01-01},
number = {TR96‐021},
institution = {University of North Carolina at Chapel Hill, Department of Computer Science},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
|
1995
|
| Greg Welch Hybrid Self-Tracker: An Inertial/Optical Hybrid Three-Dimensional Tracking System Technical Report University of North Carolina at Chapel Hill, Department of Computer Science no. TR95-048, 1995. @techreport{Welch1995aa,
title = {Hybrid Self-Tracker: An Inertial/Optical Hybrid Three-Dimensional Tracking System},
author = {Greg Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Welch1995aa.pdf},
year = {1995},
date = {1995-01-01},
number = {TR95-048},
institution = {University of North Carolina at Chapel Hill, Department of Computer Science},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
|
| Greg Welch A Survey of Power Management Techniques in Mobile Computing Operating Systems Journal Article In: ACM Operating Systems Review (SIGOPS-OSR), vol. 29, no. 4, pp. 47-56, 1995. @article{Welch1995ab,
title = {A Survey of Power Management Techniques in Mobile Computing Operating Systems},
author = {Greg Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Welch1995ab.pdf},
doi = {10.1145/219282.219293},
year = {1995},
date = {1995-01-01},
journal = {ACM Operating Systems Review (SIGOPS-OSR)},
volume = {29},
number = {4},
pages = {47-56},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
|
| Greg Welch; Gary Bishop An Introduction to the Kalman filter Technical Report University of North Carolina at Chapel Hill, Department of Computer Science no. TR95-041, 1995, (The article has also been translated into Chinese by Xuchen Yao, a student at The Institute of Acoustics of The Chinese Academy of Sciences. See also our Kalman filter web site at https://www.cs.unc.edu/~welch/kalman/index.html.). @techreport{Welch1995b,
title = {An Introduction to the Kalman filter},
author = {Greg Welch and Gary Bishop},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/kalman_intro.pdf},
year = {1995},
date = {1995-01-01},
number = {TR95-041},
institution = {University of North Carolina at Chapel Hill, Department of Computer Science},
note = {The article has also been translated into Chinese by Xuchen Yao, a student at The Institute of Acoustics of The Chinese Academy of Sciences. See also our Kalman filter web site at https://www.cs.unc.edu/~welch/kalman/index.html.},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
|
1989
|
| Gregory F. Welch Versicom: Versatile Communications Software Unpublished 1989, (NASA Jet Propulsion Laboratory). @unpublished{Welch1989aa,
title = {Versicom: Versatile Communications Software},
author = {Gregory F. Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Welch1989aa.pdf},
year = {1989},
date = {1989-07-01},
note = {NASA Jet Propulsion Laboratory},
keywords = {},
pubstate = {published},
tppubtype = {unpublished}
}
|
1986
|
| Gregory F. Welch; James P. Williams The Easy Chair: A Microprocessor-Controlled Wheelchair for Children With Muscular Disorders Unpublished 1986, (Purdue University, E.E.T. 490/491 Senior Design Project, Final Report). @unpublished{Welch1986aa,
title = {The Easy Chair: A Microprocessor-Controlled Wheelchair for Children With Muscular Disorders},
author = {Gregory F. Welch and James P. Williams},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Welch1986_EasyChair-red.pdf},
year = {1986},
date = {1986-05-01},
note = {Purdue University, E.E.T. 490/491 Senior Design Project, Final Report},
keywords = {},
pubstate = {published},
tppubtype = {unpublished}
}
|
| Gregory F. Welch The Infrared Touch-Pad Unpublished 1986, (Purdue University, E.E.T. 421 Report). @unpublished{Welch1986ab,
title = {The Infrared Touch-Pad},
author = {Gregory F. Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Welch1986ab.pdf},
year = {1986},
date = {1986-02-01},
note = {Purdue University, E.E.T. 421 Report},
keywords = {},
pubstate = {published},
tppubtype = {unpublished}
}
|
1985
|
| Gregory F. Welch; James P. Williams The Easy Chair: A Microprocessor-Controlled Wheelchair for Children With Muscular Disorders Unpublished 1985, (Purdue University, E.E.T. 490/491 Senior Design Project, Preliminary Report). @unpublished{Welch1985aa,
title = {The Easy Chair: A Microprocessor-Controlled Wheelchair for Children With Muscular Disorders},
author = {Gregory F. Welch and James P. Williams},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Welch1985_EasyChair.pdf},
year = {1985},
date = {1985-12-01},
note = {Purdue University, E.E.T. 490/491 Senior Design Project, Preliminary Report},
keywords = {},
pubstate = {published},
tppubtype = {unpublished}
}
|
| James P. Williams; Gregory F. Welch The Pressure Sensitive Touch-Pad Unpublished 1985, (Purdue University, E.E.T. 454 Project Report). @unpublished{Williams1985aa,
title = {The Pressure Sensitive Touch-Pad},
author = {James P. Williams and Gregory F. Welch},
url = {https://sreal.ucf.edu/wp-content/uploads/2017/02/Williams1985aa.pdf},
year = {1985},
date = {1985-04-01},
note = {Purdue University, E.E.T. 454 Project Report},
keywords = {},
pubstate = {published},
tppubtype = {unpublished}
}
|