Combined Head - Eye Tracking for Immersive Virtual Reality. Huang, H., R.S., A., & Jenkin, M. In ICAT'2004 14th International Conference on Artificial Reality and Telexistance, Seoul, Korea, November 30th- December 2nd, 2004. -1 -2 abstract bibtex Real-time gaze tracking is a promising interaction technique for virtual environments. Immersive projection-based virtual reality systems such as the CAVETM allow users a wide range of natural movements. Unfortunately, most head and eye movement measurement techniques are of limited use during free head and body motion. An improved head-eye tracking system is proposed and developed for use in immersive applications with free head motion. The system is based upon a head-mounted video-based eye tracking system and a hybrid ultrasound-inertial head tracking system. The system can measure the point of regard in a scene in real-time during relatively large head movements. The system will serve as a flexible testbed for evaluating novel gaze-contingent interaction techniques in virtual environments. The calibration of the head-eye tracking system is one of the most important issues that need to be addressed. In this paper, a simple view-based calibration method is proposed.
@inproceedings{Huang:2004kk,
abstract = {Real-time gaze tracking is a promising interaction technique for virtual environments. Immersive projection-based virtual reality systems such as the
CAVETM allow users a wide range of natural movements. Unfortunately, most head and eye movement measurement techniques are of limited use during free
head and body motion. An improved head-eye tracking system is proposed and developed for use in immersive applications with free head motion. The system is based upon a head-mounted video-based eye tracking system and a hybrid ultrasound-inertial head tracking system. The system can measure the point of regard in a scene in real-time during relatively large head movements. The
system will serve as a flexible testbed for evaluating novel gaze-contingent interaction techniques in virtual environments. The calibration of the head-eye tracking system is one of the most important issues that need to be addressed. In
this paper, a simple view-based calibration method is proposed.},
address = {Seoul, Korea},
author = {Huang, H. and Allison R.S. and Jenkin, M.R.M},
booktitle = {ICAT'2004 14th International Conference on Artificial Reality and Telexistance},
date-added = {2011-05-06 13:26:18 -0400},
date-modified = {2011-05-18 15:45:14 -0400},
keywords = {Eye Movements & Tracking},
month = {November 30th- December 2nd},
title = {Combined Head - Eye Tracking for Immersive Virtual Reality},
url-1 = {https://percept.eecs.yorku.ca/papers/icat2004.pdf},
url-2 = {http://www.vrsj.org/ic-at/https://percept.eecs.yorku.ca/papers/2004/S3-3.pdf},
year = {2004}}
Downloads: 0
{"_id":{"_str":"51fbd288c5b22c3876001019"},"__v":5,"authorIDs":["5458111c2abc8e9f37000a4d","5e596c1656d60ade0100014f","vnY8GQ5AKXHNi7dqd"],"author_short":["Huang, H.","R.S., A.","Jenkin, M."],"bibbaseid":"huang-rs-jenkin-combinedheadeyetrackingforimmersivevirtualreality-2004","bibdata":{"bibtype":"inproceedings","type":"inproceedings","abstract":"Real-time gaze tracking is a promising interaction technique for virtual environments. Immersive projection-based virtual reality systems such as the CAVETM allow users a wide range of natural movements. Unfortunately, most head and eye movement measurement techniques are of limited use during free head and body motion. An improved head-eye tracking system is proposed and developed for use in immersive applications with free head motion. The system is based upon a head-mounted video-based eye tracking system and a hybrid ultrasound-inertial head tracking system. The system can measure the point of regard in a scene in real-time during relatively large head movements. The system will serve as a flexible testbed for evaluating novel gaze-contingent interaction techniques in virtual environments. The calibration of the head-eye tracking system is one of the most important issues that need to be addressed. In this paper, a simple view-based calibration method is proposed.","address":"Seoul, Korea","author":[{"propositions":[],"lastnames":["Huang"],"firstnames":["H."],"suffixes":[]},{"firstnames":["Allison"],"propositions":[],"lastnames":["R.S."],"suffixes":[]},{"propositions":[],"lastnames":["Jenkin"],"firstnames":["M.R.M"],"suffixes":[]}],"booktitle":"ICAT'2004 14th International Conference on Artificial Reality and Telexistance","date-added":"2011-05-06 13:26:18 -0400","date-modified":"2011-05-18 15:45:14 -0400","keywords":"Eye Movements & Tracking","month":"November 30th- December 2nd","title":"Combined Head - Eye Tracking for Immersive Virtual Reality","url-1":"https://percept.eecs.yorku.ca/papers/icat2004.pdf","url-2":"http://www.vrsj.org/ic-at/https://percept.eecs.yorku.ca/papers/2004/S3-3.pdf","year":"2004","bibtex":"@inproceedings{Huang:2004kk,\n\tabstract = {Real-time gaze tracking is a promising interaction technique for virtual environments. Immersive projection-based virtual reality systems such as the\nCAVETM allow users a wide range of natural movements. Unfortunately, most head and eye movement measurement techniques are of limited use during free\nhead and body motion. An improved head-eye tracking system is proposed and developed for use in immersive applications with free head motion. The system is based upon a head-mounted video-based eye tracking system and a hybrid ultrasound-inertial head tracking system. The system can measure the point of regard in a scene in real-time during relatively large head movements. The\nsystem will serve as a flexible testbed for evaluating novel gaze-contingent interaction techniques in virtual environments. The calibration of the head-eye tracking system is one of the most important issues that need to be addressed. In\nthis paper, a simple view-based calibration method is proposed.},\n\taddress = {Seoul, Korea},\n\tauthor = {Huang, H. and Allison R.S. and Jenkin, M.R.M},\n\tbooktitle = {ICAT'2004 14th International Conference on Artificial Reality and Telexistance},\n\tdate-added = {2011-05-06 13:26:18 -0400},\n\tdate-modified = {2011-05-18 15:45:14 -0400},\n\tkeywords = {Eye Movements & Tracking},\n\tmonth = {November 30th- December 2nd},\n\ttitle = {Combined Head - Eye Tracking for Immersive Virtual Reality},\n\turl-1 = {https://percept.eecs.yorku.ca/papers/icat2004.pdf},\n\turl-2 = {http://www.vrsj.org/ic-at/https://percept.eecs.yorku.ca/papers/2004/S3-3.pdf},\n\tyear = {2004}}\n\n\n\n","author_short":["Huang, H.","R.S., A.","Jenkin, M."],"key":"Huang:2004kk","id":"Huang:2004kk","bibbaseid":"huang-rs-jenkin-combinedheadeyetrackingforimmersivevirtualreality-2004","role":"author","urls":{"-1":"https://percept.eecs.yorku.ca/papers/icat2004.pdf","-2":"http://www.vrsj.org/ic-at/https://percept.eecs.yorku.ca/papers/2004/S3-3.pdf"},"keyword":["Eye Movements & Tracking"],"metadata":{"authorlinks":{"allison, r":"https://percept.eecs.yorku.ca/bibase%20pubs.shtml"}},"downloads":0},"bibtype":"inproceedings","biburl":"https://bibbase.org/network/files/ibWG96BS4w7ibooE9","downloads":0,"keywords":["eye movements & tracking"],"search_terms":["combined","head","eye","tracking","immersive","virtual","reality","huang","r.s.","jenkin"],"title":"Combined Head - Eye Tracking for Immersive Virtual Reality","title_words":["combined","head","eye","tracking","immersive","virtual","reality"],"year":2004,"dataSources":["kmmXSosvtyJQxBtzs","BPKPSXjrbMGteC59J","MpMK4SvZzj5Fww5vJ","YbBWRH5Fc7xRr8ghk","szZaibkmSiiQBFQG8","DoyrDTpJ7HHCtki3q","JaoxzeTFRfvwgLoCW","XKwRm5Lx8Z9bzSzaP","AELuRZBpnp7nRDaqw"]}