Dr. Gerd Bruder – Patents
2024
Greg Welch; Gerd Bruder; Salam Daher; Jason Hochreiter; Mindi Anderson; Laura Gonzalez; Desiree A. Diaz
Physical-Virtual Patient System Patent
US 12,008,917, 2024.
@patent{Welch2024ab,
title = {Physical-Virtual Patient System},
author = {Greg Welch and Gerd Bruder and Salam Daher and Jason Hochreiter and Mindi Anderson and Laura Gonzalez and Desiree A. Diaz},
url = {https://sreal.ucf.edu/wp-content/uploads/2024/06/US12008917.pdf
https://ppubs.uspto.gov/dirsearch-public/print/downloadPdf/12008917},
year = {2024},
date = {2024-06-11},
urldate = {2024-06-11},
number = {US 12,008,917},
abstract = {The invention pertains to methods for monitoring the operational status of a home automation system through extrinsic visual and audible means. Initial training periods involve capturing image and audio data representative of nominal operation, which is then processed to identify operational indicators. Unsupervised machine learning models are trained with these indicators to construct a model of normalcy and identify expectation violations in the system's operational pattern. After meeting specific stopping criteria, real-time monitoring is initiated. When an expectation violation is detected, contrastive collages or sequences are generated comprising nominal and anomalous data. These are then transmitted to an end user, effectively conveying the context of the detected anomalies. Further features include providing deep links to smartphone applications for home automation configuration and the use of auditory scene analysis techniques. The invention provides a multi-modal approach to home automation monitoring, leveraging machine learning for robust anomaly detection.},
keywords = {},
pubstate = {published},
tppubtype = {patent}
}
Greg Welch; Gerd Bruder; Ryan Schubert; Austin Erickson
Audiovisual Detection of Expectation Violations in Disparate Home Automation Systems Patent
US 11,954,900, 2024.
@patent{Welch2024aa,
title = {Audiovisual Detection of Expectation Violations in Disparate Home Automation Systems},
author = {Greg Welch and Gerd Bruder and Ryan Schubert and Austin Erickson},
url = {https://sreal.ucf.edu/wp-content/uploads/2024/04/US11954900.pdf
https://ppubs.uspto.gov/dirsearch-public/print/downloadPdf/11954900
},
year = {2024},
date = {2024-04-09},
urldate = {2024-04-09},
number = {US 11,954,900},
abstract = {The invention pertains to methods for monitoring the operational status of a home automation system through extrinsic visual and audible means. Initial training periods involve capturing image and audio data representative of nominal operation, which is then processed to identify operational indicators. Unsupervised machine learning models are trained with these indicators to construct a model of normalcy and identify expectation violations in the system's operational pattern. After meeting specific stopping criteria, real-time monitoring is initiated. When an expectation violation is detected, contrastive collages or sequences are generated comprising nominal and anomalous data. These are then transmitted to an end user, effectively conveying the context of the detected anomalies. Further features include providing deep links to smartphone applications for home automation configuration and the use of auditory scene analysis techniques. The invention provides a multi-modal approach to home automation monitoring, leveraging machine learning for robust anomaly detection.
},
keywords = {},
pubstate = {published},
tppubtype = {patent}
}
2023
Gerd Bruder; Greg Welch; Kangsoo Kim; Zubin Choudhary
Spatial positioning of targeted object magnification Patent
US 11,798,127, 2023.
@patent{Bruder2023aa,
title = {Spatial positioning of targeted object magnification},
author = {Gerd Bruder and Greg Welch and Kangsoo Kim and Zubin Choudhary},
url = {https://image-ppubs.uspto.gov/dirsearch-public/print/downloadPdf/11798127
https://sreal.ucf.edu/wp-content/uploads/2023/10/11798127.pdf},
year = {2023},
date = {2023-10-24},
urldate = {2023-10-24},
number = {US 11,798,127},
abstract = {One or more cameras capture objects at a higher resolution than the human eye can perceive. Objects are segmented from the background of the image and scaled to human perceptible size. The scaled-up objects are superimposed over the unscaled background. This is presented to a user via a display whereby the process selectively amplifies the size of the objects' spatially registered retinal projection while maintaining a natural (unmodified) view in the remainder of the visual field.},
keywords = {},
pubstate = {published},
tppubtype = {patent}
}
Gregory Welch; Matthew Gottsacker; Nahal Norouzi; Gerd Bruder
Intelligent Digital Interruption Management Patent
US 11,729,448, 2023.
@patent{Welch2022ab,
title = {Intelligent Digital Interruption Management},
author = {Gregory Welch and Matthew Gottsacker and Nahal Norouzi and Gerd Bruder},
url = {https://image-ppubs.uspto.gov/dirsearch-public/print/downloadPdf/11729448
https://sreal.ucf.edu/wp-content/uploads/2023/08/US11729448.pdf},
year = {2023},
date = {2023-08-15},
urldate = {2023-08-15},
number = {US 11,729,448},
abstract = {The present invention is a system to manage interrupt notifications on an operating system based on the characteristics of content in which an end user is currently immersed or engaged. For example, relatively high bitrate video throughput is indicative of corresponding high infor- mation depth and more action occurring in the scene. For periods of high information depth, interrupt notifications are deferred until the information depth falls into a relative trough. Additional embodiments of the invention process scene transitions, technical cues, dialog and lyrics to release queued interrupt notification at optimal times. A vamping process is also provided when interrupt notification are released to keep the end user prescient to the background application in which they were engaged prior to the interrupt notification coming into focus.},
keywords = {},
pubstate = {published},
tppubtype = {patent}
}
Greg Welch; Joseph LaViola; Francisco Guido-Sanz; Gerd Bruder; Mindi Anderson; Ryan Schubert
Adaptive visual overlay for anatomical simulation Patent
US 11,557,216 B2, 2023.
@patent{Welch2023av,
title = {Adaptive visual overlay for anatomical simulation},
author = {Greg Welch and Joseph LaViola and Francisco Guido-Sanz and Gerd Bruder and Mindi Anderson and Ryan Schubert},
url = {https://image-ppubs.uspto.gov/dirsearch-public/print/downloadPdf/11557216
https://sreal.ucf.edu/wp-content/uploads/2023/11/11557216.pdf},
year = {2023},
date = {2023-01-17},
urldate = {2023-01-17},
number = {US 11,557,216 B2},
abstract = {An anatomical feature simulation unit is a physical device designed to help simulate an anatomical feature (e.g., a wound) on an object (e.g., a human being or human surrogate such as a medical manikin) for instructing a trainee to learn or practice treatment skills. For the trainee, the simulation looks like a real body part when viewed using an Augmented Reality (AR) system. Responsive to a change in the anatomic state of the object (e.g., bending a knee or raising of an arm) not only the spatial location and orientation of the anatomical feature stays locked on the object in the AR system, but the characteristics of the anatomical feature change based on the physiologic logic of changing said anatomical state (e.g., greater or less blood flow, opening or closing of a wound).},
keywords = {},
pubstate = {published},
tppubtype = {patent}
}
Greg Welch; Gerd Bruder; Ryan McMahan
Grammar Dependent Tactile Pattern Invocation Patent
US 11,550,470, 2023.
@patent{nokey,
title = {Grammar Dependent Tactile Pattern Invocation},
author = {Greg Welch and Gerd Bruder and Ryan McMahan},
url = {https://ppubs.uspto.gov/pubwebapp/external.html?q=11550470
https://sreal.ucf.edu/wp-content/uploads/2023/01/US11550470.pdf},
year = {2023},
date = {2023-01-10},
urldate = {2023-01-10},
number = {US 11,550,470},
abstract = {A system for translating text streams of alphanumeric characters into preconfigured, haptic output. Text strings are parsed against a grammar index to locate assigned haptic or vibratory output. This may include speech-to-text, chat messaging, or text arrays of any kind. When a match is located, a standardized haptic output pattern is invoked through a haptic device. A device affordance module adapts the haptic output pattern to the capabilities of the target haptic device.
},
keywords = {},
pubstate = {published},
tppubtype = {patent}
}
2022
Greg Welch; Gerd Bruder
Medical Monitoring Virtual Human with Situational Awareness Patent
US 11,535,261, 2022.
@patent{Welch2022ac,
title = {Medical Monitoring Virtual Human with Situational Awareness},
author = {Greg Welch and Gerd Bruder},
url = {https://image-ppubs.uspto.gov/dirsearch-public/print/downloadPdf/11535261},
year = {2022},
date = {2022-12-17},
urldate = {2022-12-17},
number = {US 11,535,261},
abstract = {Virtual humans exhibit behaviors associated with inputs and outputs of an autonomous control system for medical monitoring of patients. To foster the awareness and trust, the virtual humans exhibit situational awareness via apparent (e.g., rendered) behaviors based on inputs such as physiological vital signs. The virtual humans also exhibit situational control via apparent behaviors associated with outputs such as direct control of devices, functions of control, actions based on high-level goals, and the optional use of virtual versions of conventional physical controls. A dynamic virtual human who continually exhibits awareness of the system state and relevant contextual circumstances, along with the ability to directly control the system, is used to reduce negative feelings associated with the system such as uncertainty, concern, stress, or anxiety on the part of real human patients.
},
keywords = {},
pubstate = {published},
tppubtype = {patent}
}
Gregory Welch; Gerd Bruder
Augmentation of Relative Pose In Co-located Devices Patent
US 11,467,399, 2022.
@patent{Welch2022aa,
title = {Augmentation of Relative Pose In Co-located Devices},
author = {Gregory Welch and Gerd Bruder},
url = {https://ppubs.uspto.gov/pubwebapp/external.html?q=11467399
https://sreal.ucf.edu/wp-content/uploads/2022/10/US11467399.pdf
},
year = {2022},
date = {2022-10-11},
urldate = {2022-10-11},
number = {US 11,467,399},
abstract = {This invention relates to tracking of hand-held devices and vehicles with respect to each other, in circumstances where there are two or more users or existent objects interacting in the same share space (co-location). It extends conventional global and body-relative approaches to “cooperatively” estimate the relative poses between all useful combinations of user-worn tracked devices such as HMDs and hand-held controllers worn (or held) by multiple users. Additionally, the invention provides for tracking of vehicles such as cars and unmanned aerial vehicles.},
keywords = {},
pubstate = {published},
tppubtype = {patent}
}
Gerd Bruder; Gregory Welch; Kangsoo Kim; Zubin Choudhary
Intelligent Object Magnification for Augmented Reality Displays Patent
US 11,410,270, 2022.
BibTeX | Links:
@patent{Bruder2022ph,
title = {Intelligent Object Magnification for Augmented Reality Displays},
author = {Gerd Bruder and Gregory Welch and Kangsoo Kim and Zubin Choudhary},
url = {https://patft.uspto.gov/netacgi/nph-Parser?Sect1=PTO1&Sect2=HITOFF&d=PALL&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.htm&r=1&f=G&l=50&s1=11410270.PN.&OS=PN/11410270&RS=PN/11410270
https://sreal.ucf.edu/wp-content/uploads/2022/08/Bruder2022ph.pdf},
year = {2022},
date = {2022-08-09},
urldate = {2022-08-09},
number = {US 11,410,270},
keywords = {},
pubstate = {published},
tppubtype = {patent}
}
Gregory Welch; Gerd Bruder; Ryan McMahan
Visual-Tactile Virtual Telepresence Patent
US 11,287,971, 2022.
BibTeX | Links:
@patent{US11287971,
title = {Visual-Tactile Virtual Telepresence},
author = {Gregory Welch and Gerd Bruder and Ryan McMahan},
url = {https://patft.uspto.gov/netacgi/nph-Parser?Sect1=PTO1&Sect2=HITOFF&d=PALL&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.htm&r=1&f=G&l=50&s1=11287971.PN.&OS=PN/11287971&RS=PN/112879711
https://sreal.ucf.edu/wp-content/uploads/2022/08/US11287971.pdf},
year = {2022},
date = {2022-03-29},
urldate = {2022-03-29},
number = {US 11,287,971},
keywords = {},
pubstate = {published},
tppubtype = {patent}
}
2021
Gregory Welch; Gerd Bruder
Autonomous systems human controller simulation Patent
US 11,148,671, 2021.
BibTeX | Links:
@patent{Welch2021oj,
title = {Autonomous systems human controller simulation},
author = {Gregory Welch and Gerd Bruder},
url = {https://patft.uspto.gov/netacgi/nph-Parser?Sect1=PTO1&Sect2=HITOFF&d=PALL&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.htm&r=1&f=G&l=50&s1=11148671.PN.&OS=PN/11148671&RS=PN/11148671
https://sreal.ucf.edu/wp-content/uploads/2021/10/US11148671.pdf},
year = {2021},
date = {2021-10-19},
urldate = {2021-10-19},
number = {US 11,148,671},
location = {US},
keywords = {},
pubstate = {published},
tppubtype = {patent}
}
Gregory Welch; Ryan P. McMahan; Gerd Bruder
Low Latency Tactile Telepresence Patent
US 11,106,357, 2021.
@patent{Welch2021bb,
title = {Low Latency Tactile Telepresence},
author = {Gregory Welch and Ryan P. McMahan and Gerd Bruder},
url = {https://patft.uspto.gov/netacgi/nph-Parser?Sect1=PTO2&Sect2=HITOFF&p=1&u=/netahtml/PTO/search-bool.html&r=1&f=G&l=50&co1=AND&d=PTXT&s1=11106357&OS=11106357&RS=11106357
https://sreal.ucf.edu/wp-content/uploads/2021/09/US11106357.pdf},
year = {2021},
date = {2021-08-31},
urldate = {2021-08-31},
number = {US 11,106,357},
abstract = {A system for remote tactile telepresence wherein an array of predefined touch gestures are abstracted into cataloged values and invoked either by pattern matching, by assigned name or visual indicia. A local and remote cache of the catalog reduces latency even for complicated gestures as only a gesture identifier needs to be transmitted to a haptic output destination. Additional embodiments translate gestures to different haptic device affordances. Tactile telepresence sessions are time-coded along with audiovisual content wherein playback is heard, seen, and felt. Another embodiment associates motion capture associated with the tactile profile so that remote, haptic recipients may see renderings of objects (e.g., hands) imparting vibrotactile sensations.},
keywords = {},
pubstate = {published},
tppubtype = {patent}
}
Gregory Welch; Gerd Bruder
Relative Pose Data Augmentation of Tracked Devices in Virtual Environments Patent
US 11,042,028 B1, 2021.
@patent{Welch2021,
title = {Relative Pose Data Augmentation of Tracked Devices in Virtual Environments},
author = {Gregory Welch and Gerd Bruder},
url = {https://patft.uspto.gov/netacgi/nph-Parser?Sect1=PTO2&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&co1=AND&d=PTXT&s1=11042028&OS=11042028&RS=11042028
https://sreal.ucf.edu/wp-content/uploads/2021/09/Welch2021wa-002.pdf
},
year = {2021},
date = {2021-06-22},
urldate = {2021-06-22},
number = {US 11,042,028 B1},
abstract = {This invention relates to tracking of user-worn and hand-held devices with respect to each other, in circumstances where there are two or more users interacting in the same share space. It extends conventional global and body-relative approaches to "cooperatively" estimate the relative poses between all useful combinations of user-worn tracked devices such as HMDs and hand-held controllers worn (or held) by multiple users. For example, a first user's HMD estimates its absolute global pose in the coordinate frame associated with the externally-mounted devices, as well as its relative pose with respect to all other HMDs, hand-held controllers, and other user held/worn tracked devices in the environment. In this way, all HMDs (or as many as appropriate) are tracked with respect to each other, all HMDs are tracked with respect to all hand-held controllers, and all hand-held controllers are tracked with respect to all other hand-held controllers.},
keywords = {},
pubstate = {published},
tppubtype = {patent}
}
2020
Gregory Welch; Joseph LaViola Jr.; Francisco Guido-Sanz; Gerd Bruder; Mindi Anderson; Ryan Schubert
Adaptive Visual Overlay Wound Simulation Patent
US 10,854,098 B1, 2020.
@patent{Welch2020c,
title = {Adaptive Visual Overlay Wound Simulation},
author = {Gregory Welch and Joseph LaViola Jr. and Francisco Guido-Sanz and Gerd Bruder and Mindi Anderson and Ryan Schubert},
url = {https://sreal.ucf.edu/wp-content/uploads/2020/12/US10854098.pdf
http://patft.uspto.gov/netacgi/nph-Parser?Sect1=PTO1&Sect2=HITOFF&d=PALL&p=1&u=/netahtml/PTO/srchnum.htm&r=1&f=G&l=50&s1=10,854,098},
year = {2020},
date = {2020-12-01},
number = {US 10,854,098 B1},
abstract = {A wound simulation unit is a physical device designed to help simulate a wound on an object (e.g., a human being or human surrogate such as a medical manikin) for instructing a trainee to learn or practice wound-related treatment skills. For the trainee, the simulation looks like a real wound when viewed using an Augmented Reality (AR) system. Responsive to a change in the anatomic state of the object (e.g., bending a knee or raising o f an arm) not only the spatial location and orientation of the wound stays locked on the object in the AR system, but the characteristics of the wound change based on the physiologic logic o f changing said anatomical state (e.g., greater or less blood flow, opening or closing of the wound).},
keywords = {},
pubstate = {published},
tppubtype = {patent}
}
Gregory Welch; Joseph LaViola Jr.; Francisco Guido-Sanz; Gerd Bruder; Mindi Anderson; Ryan Schubert
Multisensory Wound Simulation Patent
US 10,803,761 B2, 2020.
@patent{Welch2020b,
title = {Multisensory Wound Simulation},
author = {Gregory Welch and Joseph LaViola Jr. and Francisco Guido-Sanz and Gerd Bruder and Mindi Anderson and Ryan Schubert},
url = {http://patft.uspto.gov/netacgi/nph-Parser?Sect1=PTO1&Sect2=HITOFF&d=PALL&p=1&u=/netahtml/PTO/srchnum.htm&r=1&f=G&l=50&s1=10803761.PN.&OS=PN/10803761&RS=PN/10803761
https://sreal.ucf.edu/wp-content/uploads/2020/10/welch2020b.pdf},
year = {2020},
date = {2020-10-13},
number = {US 10,803,761 B2},
abstract = {A Tactile-Visual Wound (TVW) simulation unit is a physical device designed to help simulate a wound on a human being or human surrogate (e.g., a medical manikin) for instructing a trainee to learn or practice wound-related treatment skills. For the trainee, the TVW would feel (to the touch) like a real wound, look like a real wound when viewed using an Augmented Reality (AR) system, and appear to behave like a real wound when manipulated.},
keywords = {},
pubstate = {published},
tppubtype = {patent}
}