Porta, Marco; Dondi, Piercarlo; Pianetta, Alice; Cantoni, Virginio
SPEye: A Calibration-Free Gaze-Driven Text Entry Technique Based on Smooth Pursuit Journal Article
In: IEEE Transactions on Human-Machine Systems, vol. 52, iss. 2, pp. 312-323, 2022.
Abstract | Links | BibTeX | Tags: Eye writing
@article{9619860,
title = {SPEye: A Calibration-Free Gaze-Driven Text Entry Technique Based on Smooth Pursuit},
author = {Marco Porta and Piercarlo Dondi and Alice Pianetta and Virginio Cantoni},
doi = {10.1109/THMS.2021.3123202},
year = {2022},
date = {2022-04-01},
urldate = {2021-01-01},
journal = {IEEE Transactions on Human-Machine Systems},
volume = {52},
issue = {2},
pages = {312-323},
abstract = {Gaze-based text entry is undoubtedly one of the most useful applications of eye-tracking technology for human-machine interaction, both in the assistive context (users with severe motor disabilities can exploit such writing modalities to communicate with the world) and as a way to allow touchless text input in everyday life. Different eye-driven text entry methods have been developed to date, and almost all of them require preliminary calibration procedures to work correctly. When a short text, such as a password or a PIN, needs to be entered without using hands or voice, calibration may be perceived as an unnecessary nuisance (and may not be properly maintained in public places due to “ambient noise,” caused, for example, by nearby people). Inadequate calibration may also be a problem in case of assistive uses. In this article we present SPEye, a calibration-free eye-controlled writing technique based on smooth pursuit. Although its writing speed is significantly lower than that of ordinary calibrated methods, the absence of an initial calibration makes it suitable for short text entry. The technique has been tested through several experiments, obtaining good performances in terms of key strokes per character and total error rate metrics, and receiving positive feedback from the participants in the tests.},
keywords = {Eye writing},
pubstate = {published},
tppubtype = {article}
}
Homola, Barnabás; Sheldon, Isabella; Ago, Stela; Mariani, Milton; Hansen, John Paulin
Prototyping Exoskeleton Interaction for Game-based Rehabilitation Inproceedings
In: CHI Conference on Human Factors in Computing Systems Extended Abstracts, pp. 1–6, 2022.
BibTeX | Tags: Gaze-controlled robots
@inproceedings{homola2022prototyping,
title = {Prototyping Exoskeleton Interaction for Game-based Rehabilitation},
author = {Barnabás Homola and Isabella Sheldon and Stela Ago and Milton Mariani and John Paulin Hansen},
year = {2022},
date = {2022-01-01},
booktitle = {CHI Conference on Human Factors in Computing Systems Extended Abstracts},
pages = {1--6},
keywords = {Gaze-controlled robots},
pubstate = {published},
tppubtype = {inproceedings}
}
Dondi, Piercarlo; Porta, Marco; Donvito, Angelo; Volpe, Giovanni
A gaze-based interactive system to explore artwork imagery Journal Article
In: Journal on Multimodal User Interfaces, 2021, ISSN: 1783-8738.
Abstract | Links | BibTeX | Tags: Gaze interaction with ordinary computers, Gaze-based assistive technologies, Gaze-controlled GUIs
@article{Dondi2021,
title = {A gaze-based interactive system to explore artwork imagery},
author = {Piercarlo Dondi and Marco Porta and Angelo Donvito and Giovanni Volpe},
url = {https://doi.org/10.1007/s12193-021-00373-z},
doi = {10.1007/s12193-021-00373-z},
issn = {1783-8738},
year = {2021},
date = {2021-05-21},
journal = {Journal on Multimodal User Interfaces},
abstract = {Interactive and immersive technologies can significantly enhance the fruition of museums and exhibits. Several studies have proved that multimedia installations can attract visitors, presenting cultural and scientific information in an appealing way. In this article, we present our workflow for achieving a gaze-based interaction with artwork imagery. We designed both a tool for creating interactive ``gaze-aware'' images and an eye tracking application conceived to interact with those images with the gaze. Users can display different pictures, perform pan and zoom operations, and search for regions of interest with associated multimedia content (text, image, audio, or video). Besides being an assistive technology for motor impaired people (like most gaze-based interaction applications), our solution can also be a valid alternative to the common touch screen panels present in museums, in accordance with the new safety guidelines imposed by the COVID-19 pandemic. Experiments carried out with a panel of volunteer testers have shown that the tool is usable, effective, and easy to learn.},
keywords = {Gaze interaction with ordinary computers, Gaze-based assistive technologies, Gaze-controlled GUIs},
pubstate = {published},
tppubtype = {article}
}
Ovchinnikova, Anastasia O; Vasilyev, Anatoly N; Zubarev, Ivan P; Kozyrskiy, Bogdan L; Shishkin, Sergei L
MEG-based detection of voluntary eye fixations used to control a computer Journal Article
In: Frontiers in neuroscience, pp. 38, 2021.
BibTeX | Tags: Brain-computer interfaces for enhancing gaze interaction, Gaze interaction paradigms, Gaze-supported multimodal interaction
@article{ovchinnikova2021meg,
title = {MEG-based detection of voluntary eye fixations used to control a computer},
author = {Anastasia O Ovchinnikova and Anatoly N Vasilyev and Ivan P Zubarev and Bogdan L Kozyrskiy and Sergei L Shishkin},
year = {2021},
date = {2021-01-01},
journal = {Frontiers in neuroscience},
pages = {38},
publisher = {Frontiers},
keywords = {Brain-computer interfaces for enhancing gaze interaction, Gaze interaction paradigms, Gaze-supported multimodal interaction},
pubstate = {published},
tppubtype = {article}
}
Zhao, Darisy G; Vasilyev, Anatoly N; Kozyrskiy, Bogdan L; Melnichuk, Eugeny V; Isachenko, Andrey V; Velichkovsky, Boris M; Shishkin, Sergei L
A passive BCI for monitoring the intentionality of the gaze-based moving object selection Journal Article
In: Journal of Neural Engineering, vol. 18, no. 2, pp. 026001, 2021.
BibTeX | Tags: Brain-computer interfaces for enhancing gaze interaction, Gaze interaction paradigms, Gaze-supported multimodal interaction
@article{zhao2021passive,
title = {A passive BCI for monitoring the intentionality of the gaze-based moving object selection},
author = {Darisy G Zhao and Anatoly N Vasilyev and Bogdan L Kozyrskiy and Eugeny V Melnichuk and Andrey V Isachenko and Boris M Velichkovsky and Sergei L Shishkin},
year = {2021},
date = {2021-01-01},
journal = {Journal of Neural Engineering},
volume = {18},
number = {2},
pages = {026001},
publisher = {IOP Publishing},
keywords = {Brain-computer interfaces for enhancing gaze interaction, Gaze interaction paradigms, Gaze-supported multimodal interaction},
pubstate = {published},
tppubtype = {article}
}
Bafna, Tanya; Bækgaard, Per; Hansen, John Paulin Paulin
EyeTell: Tablet-based Calibration-free Eye-typing using Smooth-pursuit movements Inproceedings
In: ACM Symposium on Eye Tracking Research and Applications, pp. 1–6, 2021.
BibTeX | Tags: calibration-free, Eye writing, eye-tracking, Gaze interaction with mobile devices, mobile, smooth-pursuit, tablet, text entry
@inproceedings{bafna2021eyetell,
title = {EyeTell: Tablet-based Calibration-free Eye-typing using Smooth-pursuit movements},
author = {Tanya Bafna and Per Bækgaard and John Paulin Paulin Hansen},
year = {2021},
date = {2021-01-01},
booktitle = {ACM Symposium on Eye Tracking Research and Applications},
pages = {1--6},
keywords = {calibration-free, Eye writing, eye-tracking, Gaze interaction with mobile devices, mobile, smooth-pursuit, tablet, text entry},
pubstate = {published},
tppubtype = {inproceedings}
}
Bafna, Tanya; Bækgaard, Per; Hansen, John Paulin
Mental fatigue prediction during eye-typing Journal Article
In: Plos one, vol. 16, no. 2, pp. e0246739, 2021.
BibTeX | Tags: Cognition, eye movements, Eye writing, Fatigue, Gaze interaction with ordinary computers, Language, Machine learning, Neurology, Pupil, User context estimation from eye movements
@article{bafna2021mental,
title = {Mental fatigue prediction during eye-typing},
author = {Tanya Bafna and Per Bækgaard and John Paulin Hansen},
year = {2021},
date = {2021-01-01},
journal = {Plos one},
volume = {16},
number = {2},
pages = {e0246739},
publisher = {Public Library of Science San Francisco, CA USA},
keywords = {Cognition, eye movements, Eye writing, Fatigue, Gaze interaction with ordinary computers, Language, Machine learning, Neurology, Pupil, User context estimation from eye movements},
pubstate = {published},
tppubtype = {article}
}
Mulvey, Fiona Br'id; Mikitovic, Marek; Sadowski, Mateusz; Hou, Baosheng; Rasamoel, Nils David; Hansen, John Paulin Paulin; Bækgaard, Per
Gaze Interactive and Attention Aware Low Vision Aids as Future Smart Glasses Inproceedings
In: ACM Symposium on Eye Tracking Research and Applications, pp. 1–4, 2021.
BibTeX | Tags: Eye tracking, Gaze interfaces for wearable computing, Gaze-aware systems, Gaze-based assistive technologies, mixed reality, Virtual reality, vision aids, vision loss
@inproceedings{mulvey2021gaze,
title = {Gaze Interactive and Attention Aware Low Vision Aids as Future Smart Glasses},
author = {Fiona Br'id Mulvey and Marek Mikitovic and Mateusz Sadowski and Baosheng Hou and Nils David Rasamoel and John Paulin Paulin Hansen and Per Bækgaard},
year = {2021},
date = {2021-01-01},
booktitle = {ACM Symposium on Eye Tracking Research and Applications},
pages = {1--4},
keywords = {Eye tracking, Gaze interfaces for wearable computing, Gaze-aware systems, Gaze-based assistive technologies, mixed reality, Virtual reality, vision aids, vision loss},
pubstate = {published},
tppubtype = {inproceedings}
}
Bonikowski, Leszek; Gruszczy'nski, Dawid; Matulewski, Jacek
Open-source Software for Determining the Dynamic Areas of Interest for Eye Tracking Data Analysis Journal Article
In: Procedia Computer Science, vol. 192, pp. 2568–2575, 2021.
BibTeX | Tags: Usability and UX evaluation of gaze-based interfaces
@article{bonikowski2021open,
title = {Open-source Software for Determining the Dynamic Areas of Interest for Eye Tracking Data Analysis},
author = {Leszek Bonikowski and Dawid Gruszczy'nski and Jacek Matulewski},
year = {2021},
date = {2021-01-01},
journal = {Procedia Computer Science},
volume = {192},
pages = {2568--2575},
publisher = {Elsevier},
keywords = {Usability and UX evaluation of gaze-based interfaces},
pubstate = {published},
tppubtype = {article}
}
Maio, M. Di; Dondi, P.; Lombardi, L.; Porta, M.
Hybrid Manual and Gaze-Based Interaction With a Robotic Arm Inproceedings
In: Proceedings of ETFA '21 (26th IEEE Conference on Emerging Technologies and Factory Automation), 2021.
Abstract | BibTeX | Tags: Gaze-controlled GUIs, Gaze-controlled robots
@inproceedings{DiMaio2021-8654,
title = {Hybrid Manual and Gaze-Based Interaction With a Robotic Arm},
author = {M. Di Maio and P. Dondi and L. Lombardi and M. Porta},
year = {2021},
date = {2021-01-01},
booktitle = {Proceedings of ETFA '21 (26th IEEE Conference on Emerging Technologies and Factory Automation)},
abstract = {Robots, very widespread in industrial settings, are now used for many different purposes. Even if several tasks can be fully automated, the presence of a human operator is often needed. However, direct robot control is generally complex, and it is thus important to explore alternative and comfortable ways of communication. In this paper, we propose a hybrid approach to human-robot interaction that exploits both manual and gaze-based input. While simple, our solution goes in the direction of enhancing the control capabilities of robots by human operators.},
keywords = {Gaze-controlled GUIs, Gaze-controlled robots},
pubstate = {published},
tppubtype = {inproceedings}
}
Zhao, Darisy G; Karikov, Nikita D; Melnichuk, Eugeny V; Velichkovsky, Boris M; Shishkin, Sergei L
Voice as a mouse click: Usability and effectiveness of simplified hands-free gaze-voice selection Journal Article
In: Applied Sciences, vol. 10, no. 24, pp. 8791, 2020.
BibTeX | Tags: Gaze interaction paradigms, Gaze-supported multimodal interaction, Voice-supported gaze interaction
@article{zhao2020voice,
title = {Voice as a mouse click: Usability and effectiveness of simplified hands-free gaze-voice selection},
author = {Darisy G Zhao and Nikita D Karikov and Eugeny V Melnichuk and Boris M Velichkovsky and Sergei L Shishkin},
year = {2020},
date = {2020-01-01},
journal = {Applied Sciences},
volume = {10},
number = {24},
pages = {8791},
publisher = {Multidisciplinary Digital Publishing Institute},
keywords = {Gaze interaction paradigms, Gaze-supported multimodal interaction, Voice-supported gaze interaction},
pubstate = {published},
tppubtype = {article}
}
Araujo, Jacopo M; Zhang, Guangtao; Hansen, John Paulin Paulin; Puthusserypady, Sadasivan
Exploring eye-gaze wheelchair control Inproceedings
In: ACM Symposium on Eye Tracking Research and Applications, pp. 1–8, 2020.
BibTeX | Tags: Gaze interaction in 3D (VR/AR/MR & real world), Gaze-based assistive technologies, Gaze-controlled vehicles
@inproceedings{araujo2020exploring,
title = {Exploring eye-gaze wheelchair control},
author = {Jacopo M Araujo and Guangtao Zhang and John Paulin Paulin Hansen and Sadasivan Puthusserypady},
year = {2020},
date = {2020-01-01},
booktitle = {ACM Symposium on Eye Tracking Research and Applications},
pages = {1--8},
keywords = {Gaze interaction in 3D (VR/AR/MR & real world), Gaze-based assistive technologies, Gaze-controlled vehicles},
pubstate = {published},
tppubtype = {inproceedings}
}
Bafna, Tanya; Hansen, John Paulin Paulin; Baekgaard, Per
Cognitive load during eye-typing Inproceedings
In: ACM Symposium on Eye Tracking Research and Applications, pp. 1–8, 2020.
BibTeX | Tags: Eye writing, Gaze-based assistive technologies, User context estimation from eye movements
@inproceedings{bafna2020cognitive,
title = {Cognitive load during eye-typing},
author = {Tanya Bafna and John Paulin Paulin Hansen and Per Baekgaard},
year = {2020},
date = {2020-01-01},
booktitle = {ACM Symposium on Eye Tracking Research and Applications},
pages = {1--8},
keywords = {Eye writing, Gaze-based assistive technologies, User context estimation from eye movements},
pubstate = {published},
tppubtype = {inproceedings}
}
Hou, Baosheng James; Bekgaard, Per; MacKenzie, Scott; Hansen, John Paulin Paulin; Puthusserypady, Sadasivan
GIMIS: Gaze input with motor imagery selection Inproceedings
In: ACM Symposium on Eye Tracking Research and Applications, pp. 1–10, 2020.
BibTeX | Tags: Gaze-based assistive technologies, Gaze-supported multimodal interaction
@inproceedings{hou2020gimis,
title = {GIMIS: Gaze input with motor imagery selection},
author = {Baosheng James Hou and Per Bekgaard and Scott MacKenzie and John Paulin Paulin Hansen and Sadasivan Puthusserypady},
year = {2020},
date = {2020-01-01},
booktitle = {ACM Symposium on Eye Tracking Research and Applications},
pages = {1--10},
keywords = {Gaze-based assistive technologies, Gaze-supported multimodal interaction},
pubstate = {published},
tppubtype = {inproceedings}
}
Zhang, Guangtao; Hansen, John Paulin
People with Motor Disabilities Using Gaze to Control Telerobots Inproceedings
In: Extended Abstracts of the 2020 CHI Conference on Human Factors in Computing Systems, pp. 1–9, 2020.
BibTeX | Tags: Gaze-based assistive technologies, Gaze-controlled vehicles
@inproceedings{zhang2020people,
title = {People with Motor Disabilities Using Gaze to Control Telerobots},
author = {Guangtao Zhang and John Paulin Hansen},
year = {2020},
date = {2020-01-01},
booktitle = {Extended Abstracts of the 2020 CHI Conference on Human Factors in Computing Systems},
pages = {1--9},
keywords = {Gaze-based assistive technologies, Gaze-controlled vehicles},
pubstate = {published},
tppubtype = {inproceedings}
}
Matulewski, Jacek; Patera, Mateusz
Comparison of three dwell-time-based gaze text entry methods Inproceedings
In: ACM Symposium on Eye Tracking Research and Applications, pp. 1–5, 2020.
BibTeX | Tags: Eye writing
@inproceedings{matulewski2020comparison,
title = {Comparison of three dwell-time-based gaze text entry methods},
author = {Jacek Matulewski and Mateusz Patera},
year = {2020},
date = {2020-01-01},
booktitle = {ACM Symposium on Eye Tracking Research and Applications},
pages = {1--5},
keywords = {Eye writing},
pubstate = {published},
tppubtype = {inproceedings}
}
Casarini, Matteo; Porta, Marco; Dondi, Piercarlo
A Gaze-Based Web Browser with Multiple Methods for Link Selection Inproceedings
In: ACM Symposium on Eye Tracking Research and Applications, Association for Computing Machinery, Stuttgart, Germany, 2020, ISBN: 9781450371353.
Abstract | Links | BibTeX | Tags: Gaze interaction with ordinary computers, Gaze-based assistive technologies, Gaze-controlled GUIs
@inproceedings{10.1145/3379157.3388929,
title = {A Gaze-Based Web Browser with Multiple Methods for Link Selection},
author = {Matteo Casarini and Marco Porta and Piercarlo Dondi},
url = {https://doi.org/10.1145/3379157.3388929},
doi = {10.1145/3379157.3388929},
isbn = {9781450371353},
year = {2020},
date = {2020-01-01},
booktitle = {ACM Symposium on Eye Tracking Research and Applications},
publisher = {Association for Computing Machinery},
address = {Stuttgart, Germany},
series = {ETRA '20 Adjunct},
abstract = {This paper presents a gaze-based web browser that allows hands-free navigation through five different link selection methods (namely, Menu, Discrete Cursor, Progressive Zoom, Quick Zoom, and Free Pointing) and two page scrolling techniques. For link selection, the purpose of this multi-approach solution is two-fold. On the one hand, we want users to be able to choose either their preferred methods or those that, in each specific case, are the most suitable (e.g., depending on the kind of link to activate). On the other hand, we wanted to assess the performance and appreciation level of the different approaches through formal tests, to identify their strengths and weaknesses. The browser, which is conceived as an assistive technology tool, also includes a built-inon-screen keyboard and the possibility to save and retrieve bookmarks.},
keywords = {Gaze interaction with ordinary computers, Gaze-based assistive technologies, Gaze-controlled GUIs},
pubstate = {published},
tppubtype = {inproceedings}
}
Li, Zhenxing; Akkil, Deepak; Raisamo, Roope
Gaze-based kinaesthetic interaction for virtual reality Journal Article
In: Interacting with Computers, vol. 32, no. 1, pp. 17–32, 2020.
BibTeX | Tags: Gaze interaction in 3D (VR/AR/MR & real world), Gaze-supported multimodal interaction, Haptics
@article{li2020gaze,
title = {Gaze-based kinaesthetic interaction for virtual reality},
author = {Zhenxing Li and Deepak Akkil and Roope Raisamo},
year = {2020},
date = {2020-01-01},
journal = {Interacting with Computers},
volume = {32},
number = {1},
pages = {17--32},
publisher = {Oxford University Press},
keywords = {Gaze interaction in 3D (VR/AR/MR & real world), Gaze-supported multimodal interaction, Haptics},
pubstate = {published},
tppubtype = {article}
}
Sidenmark, Ludwig; Mardanbegi, Diako; Gomez, Argenis Ramirez; Clarke, Christopher; Gellersen, Hans
BimodalGaze: Seamlessly Refined Pointing with Gaze and Filtered Gestural Head Movement Inproceedings
In: ACM Symposium on Eye Tracking Research and Applications, Association for Computing Machinery, Stuttgart, Germany, 2020, ISBN: 9781450371339.
Abstract | Links | BibTeX | Tags: Eye tracking, Eye-head coordination, gaze interaction, Gaze interaction in 3D (VR/AR/MR & real world), Gaze-supported multimodal interaction, Refinement, Virtual reality
@inproceedings{3379155.3391312,
title = {BimodalGaze: Seamlessly Refined Pointing with Gaze and Filtered Gestural Head Movement},
author = {Ludwig Sidenmark and Diako Mardanbegi and Argenis Ramirez Gomez and Christopher Clarke and Hans Gellersen},
url = {https://doi.org/10.1145/3379155.3391312},
doi = {10.1145/3379155.3391312},
isbn = {9781450371339},
year = {2020},
date = {2020-01-01},
booktitle = {ACM Symposium on Eye Tracking Research and Applications},
publisher = {Association for Computing Machinery},
address = {Stuttgart, Germany},
series = {ETRA '20 Full Papers},
abstract = {Eye gaze is a fast and ergonomic modality for pointing but limited in precision and accuracy. In this work, we introduce BimodalGaze, a novel technique for seamless head-based refinement of a gaze cursor. The technique leverages eye-head coordination insights to separate natural from gestural head movement. This allows users to quickly shift their gaze to targets over larger fields of view with naturally combined eye-head movement, and to refine the cursor position with gestural head movement. In contrast to an existing baseline, head refinement is invoked automatically, and only if a target is not already acquired by the initial gaze shift. Study results show that users reliably achieve fine-grained target selection, but we observed a higher rate of initial selection errors affecting overall performance. An in-depth analysis of user performance provides insight into the classification of natural versus gestural head movement, for improvement of BimodalGaze and other potential applications.},
keywords = {Eye tracking, Eye-head coordination, gaze interaction, Gaze interaction in 3D (VR/AR/MR & real world), Gaze-supported multimodal interaction, Refinement, Virtual reality},
pubstate = {published},
tppubtype = {inproceedings}
}
Rivu, Radiah; Abdrabou, Yasmeen; Pfeuffer, Ken; Esteves, Augusto; Meitner, Stefanie; Alt, Florian
Stare: gaze-assisted face-to-face communication in augmented reality Inproceedings
In: ACM Symposium on Eye Tracking Research and Applications, pp. 1–5, 2020.
BibTeX | Tags: Gaze interfaces for wearable computing, Gaze-aware systems, Usability and UX evaluation of gaze-based interfaces
@inproceedings{rivu2020stare,
title = {Stare: gaze-assisted face-to-face communication in augmented reality},
author = {Radiah Rivu and Yasmeen Abdrabou and Ken Pfeuffer and Augusto Esteves and Stefanie Meitner and Florian Alt},
year = {2020},
date = {2020-01-01},
booktitle = {ACM Symposium on Eye Tracking Research and Applications},
pages = {1--5},
keywords = {Gaze interfaces for wearable computing, Gaze-aware systems, Usability and UX evaluation of gaze-based interfaces},
pubstate = {published},
tppubtype = {inproceedings}
}