Sidenmark, Ludwig; Mardanbegi, Diako; Gomez, Argenis Ramirez; Clarke, Christopher; Gellersen, Hans
BimodalGaze: Seamlessly Refined Pointing with Gaze and Filtered Gestural Head Movement Inproceedings
In: ACM Symposium on Eye Tracking Research and Applications, Association for Computing Machinery, Stuttgart, Germany, 2020, ISBN: 9781450371339.
Abstract | Links | BibTeX | Tags: Eye tracking, Eye-head coordination, gaze interaction, Gaze interaction in 3D (VR/AR/MR & real world), Gaze-supported multimodal interaction, Refinement, Virtual reality
@inproceedings{3379155.3391312,
title = {BimodalGaze: Seamlessly Refined Pointing with Gaze and Filtered Gestural Head Movement},
author = {Ludwig Sidenmark and Diako Mardanbegi and Argenis Ramirez Gomez and Christopher Clarke and Hans Gellersen},
url = {https://doi.org/10.1145/3379155.3391312},
doi = {10.1145/3379155.3391312},
isbn = {9781450371339},
year = {2020},
date = {2020-01-01},
booktitle = {ACM Symposium on Eye Tracking Research and Applications},
publisher = {Association for Computing Machinery},
address = {Stuttgart, Germany},
series = {ETRA '20 Full Papers},
abstract = {Eye gaze is a fast and ergonomic modality for pointing but limited in precision and accuracy. In this work, we introduce BimodalGaze, a novel technique for seamless head-based refinement of a gaze cursor. The technique leverages eye-head coordination insights to separate natural from gestural head movement. This allows users to quickly shift their gaze to targets over larger fields of view with naturally combined eye-head movement, and to refine the cursor position with gestural head movement. In contrast to an existing baseline, head refinement is invoked automatically, and only if a target is not already acquired by the initial gaze shift. Study results show that users reliably achieve fine-grained target selection, but we observed a higher rate of initial selection errors affecting overall performance. An in-depth analysis of user performance provides insight into the classification of natural versus gestural head movement, for improvement of BimodalGaze and other potential applications.},
keywords = {Eye tracking, Eye-head coordination, gaze interaction, Gaze interaction in 3D (VR/AR/MR & real world), Gaze-supported multimodal interaction, Refinement, Virtual reality},
pubstate = {published},
tppubtype = {inproceedings}
}
Zhang, Guangtao; Hansen, John Paulin
Accessible Control of Telepresence Robots Based on Eye Tracking Inproceedings
In: Proceedings of the 11th ACM Symposium on Eye Tracking Research & Applications, Association for Computing Machinery, Denver, Colorado, 2019, ISBN: 9781450367097.
Abstract | Links | BibTeX | Tags: accessibility, assistive technology, Eye tracking, gaze interaction, Gaze-based assistive technologies, Gaze-controlled vehicles, human-robot interaction, telepresence robots
@inproceedings{10.1145/3314111.3322869,
title = {Accessible Control of Telepresence Robots Based on Eye Tracking},
author = {Guangtao Zhang and John Paulin Hansen},
url = {https://doi.org/10.1145/3314111.3322869},
doi = {10.1145/3314111.3322869},
isbn = {9781450367097},
year = {2019},
date = {2019-01-01},
booktitle = {Proceedings of the 11th ACM Symposium on Eye Tracking Research & Applications},
publisher = {Association for Computing Machinery},
address = {Denver, Colorado},
series = {ETRA '19},
abstract = {Gaze may be a good alternative input modality for people with limited hand mobility. This accessible control based on eye tracking can be implemented into telepresence robots, which are widely used to promote remote social interaction and providing the feeling of presence. This extended abstract introduces a Ph.D. research project, which takes a two-phase approach towards investigating gaze-controlled telepresence robots. A system supporting gaze-controlled telepresence has been implemented. However, our current findings indicate that there were still serious challenges with regard to gaze-based driving. Potential improvements are discussed, and plans for future study are also presented.},
keywords = {accessibility, assistive technology, Eye tracking, gaze interaction, Gaze-based assistive technologies, Gaze-controlled vehicles, human-robot interaction, telepresence robots},
pubstate = {published},
tppubtype = {inproceedings}
}
Minakata, Katsumi; Thomsen, Martin; Hansen, John Paulin
Bicycles and Wheelchairs for Locomotion Control of a Simulated Telerobot Supported by Gaze- and Head-Interaction Inproceedings
In: Proceedings of the 11th PErvasive Technologies Related to Assistive Environments Conference, pp. 371–378, Association for Computing Machinery, Corfu, Greece, 2018, ISBN: 9781450363907.
Abstract | Links | BibTeX | Tags: accessibility, Eye writing, gaze interaction, human-robot interaction, Telerobot, Virtual reality
@inproceedings{10.1145/3197768.3201573,
title = {Bicycles and Wheelchairs for Locomotion Control of a Simulated Telerobot Supported by Gaze- and Head-Interaction},
author = {Katsumi Minakata and Martin Thomsen and John Paulin Hansen},
url = {https://doi.org/10.1145/3197768.3201573},
doi = {10.1145/3197768.3201573},
isbn = {9781450363907},
year = {2018},
date = {2018-01-01},
booktitle = {Proceedings of the 11th PErvasive Technologies Related to Assistive Environments Conference},
pages = {371–378},
publisher = {Association for Computing Machinery},
address = {Corfu, Greece},
series = {PETRA '18},
abstract = {We present an interface for control of a telerobot that supports field-of-view panning, mode selections and keyboard typing by head- and gaze-interaction. The utility of the interface was tested by 19 able-bodied participants controlling a virtual telerobot from a wheelchair mounted on rollers which measure its wheel rotations, and by 14 able-bodied participants controlling the telerobot with a exercise bike. Both groups tried the interface twice: with head- and with gaze-interaction. Comparing wheelchair and bike locomotion control, the wheelchair simulator was faster and more manoeuvrable. Comparing gaze- and head-interaction, the two input methods were preferred by an equal number of participants. However, participants made more errors typing with gaze than with head. We conclude that virtual reality is a viable way of specifying and testing interfaces for telerobots and an effective probe for eliciting peoples subjective experiences.},
keywords = {accessibility, Eye writing, gaze interaction, human-robot interaction, Telerobot, Virtual reality},
pubstate = {published},
tppubtype = {inproceedings}
}
Agustin, Javier San; Skovsgaard, Henrik; Hansen, John Paulin; Hansen, Dan Witzner
Low-cost gaze interaction: ready to deliver the promises Incollection
In: CHI'09 Extended Abstracts on Human Factors in Computing Systems, pp. 4453–4458, 2009.
BibTeX | Tags: eye typing, Eye writing, gaze interaction, Gaze interaction with ordinary computers, Gaze-based assistive technologies, low-cost gaze tracking, performance evaluation, universal access
@incollection{san2009low,
title = {Low-cost gaze interaction: ready to deliver the promises},
author = {Javier San Agustin and Henrik Skovsgaard and John Paulin Hansen and Dan Witzner Hansen},
year = {2009},
date = {2009-01-01},
booktitle = {CHI'09 Extended Abstracts on Human Factors in Computing Systems},
pages = {4453--4458},
keywords = {eye typing, Eye writing, gaze interaction, Gaze interaction with ordinary computers, Gaze-based assistive technologies, low-cost gaze tracking, performance evaluation, universal access},
pubstate = {published},
tppubtype = {incollection}
}