Dondi, Piercarlo; Porta, Marco; Donvito, Angelo; Volpe, Giovanni
A gaze-based interactive system to explore artwork imagery Journal Article
In: Journal on Multimodal User Interfaces, 2021, ISSN: 1783-8738.
Abstract | Links | BibTeX | Tags: Gaze interaction with ordinary computers, Gaze-based assistive technologies, Gaze-controlled GUIs
@article{Dondi2021,
title = {A gaze-based interactive system to explore artwork imagery},
author = {Piercarlo Dondi and Marco Porta and Angelo Donvito and Giovanni Volpe},
url = {https://doi.org/10.1007/s12193-021-00373-z},
doi = {10.1007/s12193-021-00373-z},
issn = {1783-8738},
year = {2021},
date = {2021-05-21},
journal = {Journal on Multimodal User Interfaces},
abstract = {Interactive and immersive technologies can significantly enhance the fruition of museums and exhibits. Several studies have proved that multimedia installations can attract visitors, presenting cultural and scientific information in an appealing way. In this article, we present our workflow for achieving a gaze-based interaction with artwork imagery. We designed both a tool for creating interactive ``gaze-aware'' images and an eye tracking application conceived to interact with those images with the gaze. Users can display different pictures, perform pan and zoom operations, and search for regions of interest with associated multimedia content (text, image, audio, or video). Besides being an assistive technology for motor impaired people (like most gaze-based interaction applications), our solution can also be a valid alternative to the common touch screen panels present in museums, in accordance with the new safety guidelines imposed by the COVID-19 pandemic. Experiments carried out with a panel of volunteer testers have shown that the tool is usable, effective, and easy to learn.},
keywords = {Gaze interaction with ordinary computers, Gaze-based assistive technologies, Gaze-controlled GUIs},
pubstate = {published},
tppubtype = {article}
}
Maio, M. Di; Dondi, P.; Lombardi, L.; Porta, M.
Hybrid Manual and Gaze-Based Interaction With a Robotic Arm Inproceedings
In: Proceedings of ETFA '21 (26th IEEE Conference on Emerging Technologies and Factory Automation), 2021.
Abstract | BibTeX | Tags: Gaze-controlled GUIs, Gaze-controlled robots
@inproceedings{DiMaio2021-8654,
title = {Hybrid Manual and Gaze-Based Interaction With a Robotic Arm},
author = {M. Di Maio and P. Dondi and L. Lombardi and M. Porta},
year = {2021},
date = {2021-01-01},
booktitle = {Proceedings of ETFA '21 (26th IEEE Conference on Emerging Technologies and Factory Automation)},
abstract = {Robots, very widespread in industrial settings, are now used for many different purposes. Even if several tasks can be fully automated, the presence of a human operator is often needed. However, direct robot control is generally complex, and it is thus important to explore alternative and comfortable ways of communication. In this paper, we propose a hybrid approach to human-robot interaction that exploits both manual and gaze-based input. While simple, our solution goes in the direction of enhancing the control capabilities of robots by human operators.},
keywords = {Gaze-controlled GUIs, Gaze-controlled robots},
pubstate = {published},
tppubtype = {inproceedings}
}
Casarini, Matteo; Porta, Marco; Dondi, Piercarlo
A Gaze-Based Web Browser with Multiple Methods for Link Selection Inproceedings
In: ACM Symposium on Eye Tracking Research and Applications, Association for Computing Machinery, Stuttgart, Germany, 2020, ISBN: 9781450371353.
Abstract | Links | BibTeX | Tags: Gaze interaction with ordinary computers, Gaze-based assistive technologies, Gaze-controlled GUIs
@inproceedings{10.1145/3379157.3388929,
title = {A Gaze-Based Web Browser with Multiple Methods for Link Selection},
author = {Matteo Casarini and Marco Porta and Piercarlo Dondi},
url = {https://doi.org/10.1145/3379157.3388929},
doi = {10.1145/3379157.3388929},
isbn = {9781450371353},
year = {2020},
date = {2020-01-01},
booktitle = {ACM Symposium on Eye Tracking Research and Applications},
publisher = {Association for Computing Machinery},
address = {Stuttgart, Germany},
series = {ETRA '20 Adjunct},
abstract = {This paper presents a gaze-based web browser that allows hands-free navigation through five different link selection methods (namely, Menu, Discrete Cursor, Progressive Zoom, Quick Zoom, and Free Pointing) and two page scrolling techniques. For link selection, the purpose of this multi-approach solution is two-fold. On the one hand, we want users to be able to choose either their preferred methods or those that, in each specific case, are the most suitable (e.g., depending on the kind of link to activate). On the other hand, we wanted to assess the performance and appreciation level of the different approaches through formal tests, to identify their strengths and weaknesses. The browser, which is conceived as an assistive technology tool, also includes a built-inon-screen keyboard and the possibility to save and retrieve bookmarks.},
keywords = {Gaze interaction with ordinary computers, Gaze-based assistive technologies, Gaze-controlled GUIs},
pubstate = {published},
tppubtype = {inproceedings}
}
Hansen, John Paulin; Mardanbegi, Diako; Biermann, Florian; Bekgaard, Per
A gaze interactive assembly instruction with pupillometric recording Journal Article
In: Behavior Research Methods, vol. 50, no. 4, pp. 1723–1733, 2018, ISSN: 1554-3528.
Links | BibTeX | Tags: Gaze-controlled GUIs, Gaze-enhanced games
@article{Hansen2018,
title = {A gaze interactive assembly instruction with pupillometric recording},
author = {John Paulin Hansen and Diako Mardanbegi and Florian Biermann and Per Bekgaard},
url = {https://doi.org/10.3758/s13428-018-1074-z},
doi = {10.3758/s13428-018-1074-z},
issn = {1554-3528},
year = {2018},
date = {2018-08-01},
journal = {Behavior Research Methods},
volume = {50},
number = {4},
pages = {1723--1733},
keywords = {Gaze-controlled GUIs, Gaze-enhanced games},
pubstate = {published},
tppubtype = {article}
}
Davanzo, Nicola; Dondi, Piercarlo; Mosconi, Mauro; Porta, Marco
Playing Music with the Eyes through an Isomorphic Interface Inproceedings
In: Proceedings of the Workshop on Communication by Gaze Interaction, Association for Computing Machinery, Warsaw, Poland, 2018, ISBN: 9781450357906.
Abstract | Links | BibTeX | Tags: Gaze interaction with ordinary computers, Gaze-based assistive technologies, Gaze-controlled GUIs
@inproceedings{10.1145/3206343.3206350,
title = {Playing Music with the Eyes through an Isomorphic Interface},
author = {Nicola Davanzo and Piercarlo Dondi and Mauro Mosconi and Marco Porta},
url = {https://doi.org/10.1145/3206343.3206350},
doi = {10.1145/3206343.3206350},
isbn = {9781450357906},
year = {2018},
date = {2018-01-01},
booktitle = {Proceedings of the Workshop on Communication by Gaze Interaction},
publisher = {Association for Computing Machinery},
address = {Warsaw, Poland},
series = {COGAIN '18},
abstract = {Playing music with the eyes is a challenging task. In this paper, we propose a virtualdigital musical instrument, usable by both motor-impaired and able-bodied people,controlled through an eye tracker and a "switch". Musically speaking, the layout ofthe graphical interface is isomorphic, since the harmonic relations between noteshave the same geometrical shape regardless of the key signature of the music piece.Four main design principles guided our choices, namely: (1) Minimization of eye movements,especially in case of large note intervals; (2) Use of a grid layout where "nodes"(keys) are connected each other through segments (employed as guides for the gaze);(3) No need for smoothing filters or time thresholds; and (4) Strategic use of colorto facilitate gaze shifts. Preliminary tests, also involving another eye-controlledmusical instrument, have shown that the developed system allows "correct" executionof music pieces even when characterized by complex melodies.},
keywords = {Gaze interaction with ordinary computers, Gaze-based assistive technologies, Gaze-controlled GUIs},
pubstate = {published},
tppubtype = {inproceedings}
}
Cantoni, V.; Dondi, P.; Lombardi, L.; Nugrahaningsih, N.; Porta, M.; Setti, A.
A Multi-Sensory Approach to Cultural Heritage: The Battle of Pavia Exhibition Inproceedings
In: Proceedings of Florence Heri-Tech 2018, 2018.
Abstract | Links | BibTeX | Tags: Gaze interaction with ordinary computers, Gaze-based assistive technologies, Gaze-controlled GUIs
@inproceedings{Cantoni2018-7606,
title = {A Multi-Sensory Approach to Cultural Heritage: The Battle of Pavia Exhibition},
author = {V. Cantoni and P. Dondi and L. Lombardi and N. Nugrahaningsih and M. Porta and A. Setti},
doi = {10.1088/1757-899X/364/1/012039},
year = {2018},
date = {2018-01-01},
booktitle = {Proceedings of Florence Heri-Tech 2018},
abstract = {In the last years, several museums and exhibits have adopted new kinds of
interactive installations that present artworks in more attractive ways, especially for young
visitors. At the same time, new communication technologies have been introduced to allow
vision and motion impaired people to visit arts centers. In this work, we present the multi-
sensory solutions we have implemented for the “Battle of Pavia” Exhibition, a collateral event
of Milan Expo 2015. The installation combined different interaction methods to achieve two
main goals: providing visitors with engaging experiences and allowing blind and partially
sighted people to appreciate the exposed artworks. The used technologies include gesture
communication, gaze-based interaction, 3D character reconstruction, virtual avatars, and 3D
tactile images. This work can be also viewed in the context of digital humanities for cultural
heritage. To the best of our knowledge, this is the first exhibit to gather such a high number of
interactive technologies in a single installation. The positive response from visitors is a great
spur to continue our research in this direction.},
keywords = {Gaze interaction with ordinary computers, Gaze-based assistive technologies, Gaze-controlled GUIs},
pubstate = {published},
tppubtype = {inproceedings}
}
interactive installations that present artworks in more attractive ways, especially for young
visitors. At the same time, new communication technologies have been introduced to allow
vision and motion impaired people to visit arts centers. In this work, we present the multi-
sensory solutions we have implemented for the “Battle of Pavia” Exhibition, a collateral event
of Milan Expo 2015. The installation combined different interaction methods to achieve two
main goals: providing visitors with engaging experiences and allowing blind and partially
sighted people to appreciate the exposed artworks. The used technologies include gesture
communication, gaze-based interaction, 3D character reconstruction, virtual avatars, and 3D
tactile images. This work can be also viewed in the context of digital humanities for cultural
heritage. To the best of our knowledge, this is the first exhibit to gather such a high number of
interactive technologies in a single installation. The positive response from visitors is a great
spur to continue our research in this direction.
Hansen, John Paulin; Rajanna, Vijay; MacKenzie, I Scott; Bækgaard, Per
A Fitts' law study of click and dwell interaction by gaze, head and mouse with a head-mounted display Inproceedings
In: Proceedings of the Workshop on Communication by Gaze Interaction, pp. 1–5, 2018.
BibTeX | Tags: Gaze interaction in 3D (VR/AR/MR & real world), Gaze-controlled GUIs, Usability and UX evaluation of gaze-based interfaces
@inproceedings{hansen2018fitts,
title = {A Fitts' law study of click and dwell interaction by gaze, head and mouse with a head-mounted display},
author = {John Paulin Hansen and Vijay Rajanna and I Scott MacKenzie and Per Bækgaard},
year = {2018},
date = {2018-01-01},
booktitle = {Proceedings of the Workshop on Communication by Gaze Interaction},
pages = {1--5},
keywords = {Gaze interaction in 3D (VR/AR/MR & real world), Gaze-controlled GUIs, Usability and UX evaluation of gaze-based interfaces},
pubstate = {published},
tppubtype = {inproceedings}
}
Hansen, John Paulin; Mardanbegi, Diako; Biermann, Florian; Bækgaard, Per
A gaze interactive assembly instruction with pupillometric recording Journal Article
In: Behavior research methods, vol. 50, no. 4, pp. 1723–1733, 2018.
BibTeX | Tags: Gaze-aware systems, Gaze-controlled GUIs, User context estimation from eye movements
@article{hansen2018gaze,
title = {A gaze interactive assembly instruction with pupillometric recording},
author = {John Paulin Hansen and Diako Mardanbegi and Florian Biermann and Per Bækgaard},
year = {2018},
date = {2018-01-01},
journal = {Behavior research methods},
volume = {50},
number = {4},
pages = {1723--1733},
publisher = {Springer},
keywords = {Gaze-aware systems, Gaze-controlled GUIs, User context estimation from eye movements},
pubstate = {published},
tppubtype = {article}
}
Matulewski, Jacek; Bałaj, Bibianna; Marek, Ewelina; Piasecki, Łukasz; Gruszczy'nski, Dawid; Kuchta, Mateusz; Duch, Włodzisław
Moveye: gaze control of video playback Inproceedings
In: Proceedings of the Workshop on Communication by Gaze Interaction, pp. 1–5, 2018.
BibTeX | Tags: Gaze-controlled GUIs
@inproceedings{matulewski2018moveye,
title = {Moveye: gaze control of video playback},
author = {Jacek Matulewski and Bibianna Bałaj and Ewelina Marek and Łukasz Piasecki and Dawid Gruszczy'nski and Mateusz Kuchta and Włodzisław Duch},
year = {2018},
date = {2018-01-01},
booktitle = {Proceedings of the Workshop on Communication by Gaze Interaction},
pages = {1--5},
keywords = {Gaze-controlled GUIs},
pubstate = {published},
tppubtype = {inproceedings}
}
Cantoni, Virginio; Merlano, Lorenzo; Nugrahaningsih, Nahumi; Porta, Marco
Eye Tracking for Cultural Heritage: A Gaze-Controlled System for Handless Interaction with Artworks Inproceedings
In: Proceedings of the 17th International Conference on Computer Systems and Technologies 2016, pp. 307–314, Association for Computing Machinery, Palermo, Italy, 2016, ISBN: 9781450341820.
Abstract | Links | BibTeX | Tags: Gaze interaction with ordinary computers, Gaze-based assistive technologies, Gaze-controlled GUIs
@inproceedings{10.1145/2983468.2983499,
title = {Eye Tracking for Cultural Heritage: A Gaze-Controlled System for Handless Interaction with Artworks},
author = {Virginio Cantoni and Lorenzo Merlano and Nahumi Nugrahaningsih and Marco Porta},
url = {https://doi.org/10.1145/2983468.2983499},
doi = {10.1145/2983468.2983499},
isbn = {9781450341820},
year = {2016},
date = {2016-01-01},
booktitle = {Proceedings of the 17th International Conference on Computer Systems and Technologies 2016},
pages = {307–314},
publisher = {Association for Computing Machinery},
address = {Palermo, Italy},
series = {CompSysTech '16},
abstract = {This paper presents a system that allows users to interact with artwork pictures throughtheir gaze, by means of an eye tracker. In particular, visitors of an exhibition canselect artworks, perform image scrolling, change the size of displayed pictures, definesensitive areas (that, when watched, display associated descriptive information),collect user gaze data, show dynamic gaze replays and generate static images showinga "summary" of what the visitor watched. The system was continuously used in an exhibitionat the Visconti Castle of Pavia (Italy) from 14th June to 29th November 2015. Thegathered data show that, in spite of some calibration problems, the system can beconsidered reliable and provides the user with an overall "pleasant" interaction experience.},
keywords = {Gaze interaction with ordinary computers, Gaze-based assistive technologies, Gaze-controlled GUIs},
pubstate = {published},
tppubtype = {inproceedings}
}
Cantoni, Virginio; Lombardi, Luca; Porta, Marco; Setti, Alessandra
Interactive, Tangible and Multi-sensory Technology for a Cultural Heritage Exhibition: The Battle of Pavia Book Chapter
In: Margenov, Svetozar; Angelova, Galia; Agre, Gennady (Ed.): Innovative Approaches and Solutions in Advanced Intelligent Systems, pp. 77–94, Springer International Publishing, Cham, 2016, ISBN: 978-3-319-32207-0.
Abstract | Links | BibTeX | Tags: Gaze interaction with ordinary computers, Gaze-based assistive technologies, Gaze-controlled GUIs
@inbook{Cantoni2016,
title = {Interactive, Tangible and Multi-sensory Technology for a Cultural Heritage Exhibition: The Battle of Pavia},
author = {Virginio Cantoni and Luca Lombardi and Marco Porta and Alessandra Setti},
editor = {Svetozar Margenov and Galia Angelova and Gennady Agre},
url = {https://doi.org/10.1007/978-3-319-32207-0_6},
doi = {10.1007/978-3-319-32207-0_6},
isbn = {978-3-319-32207-0},
year = {2016},
date = {2016-01-01},
booktitle = {Innovative Approaches and Solutions in Advanced Intelligent Systems},
pages = {77--94},
publisher = {Springer International Publishing},
address = {Cham},
abstract = {New generation multimedia may have a great impact on exhibition visit experience. This contribution focuses on the innovative use of interactive digital technologies in cultural heritage practices. ``Live'' displays steered by visitors support the creation of various content formats, smartly adapt the content delivered to the visitor, stimulate self-motivated learning, and lead to a memorable and effective experience. Multimodal interaction modalities have been developed for the exhibition ``1525--2015. Pavia, the Battle, the Future. Nothing was the same again'', a satellite event of the Universal Exhibition in Milan (Expo 2015). The Computer Vision & Multimedia Lab of the University of Pavia, in cooperation with the Bulgarian Academy of Sciences, in the framework of the European project ``Advanced Computing for Innovation'', has contributed to set up the exhibition, enriching an educational and experiential room with products and targeted applications. Visitors can observe and analyze seven ancient tapestries, illustrating different phases of the battle, through 3D reconstructions, virtual simulations, eye interaction and gesture navigation, along with transpositions of the tapestries into tactile images that enable the exploration by partially sighted and blind people. In the near future, we may assess the impact of this interactive experience. Due to the novelty of the approach, new insights can be potentially derived about the effectiveness and manageability of each specific system component. Under this scope, not only the exhibition success is important, but also the augmented learning experience in cultural heritage contexts.},
keywords = {Gaze interaction with ordinary computers, Gaze-based assistive technologies, Gaze-controlled GUIs},
pubstate = {published},
tppubtype = {inbook}
}
Akkil, Deepak; Lucero, Andrés; Kangas, Jari; Jokela, Tero; Salmimaa, Marja; Raisamo, Roope
User expectations of everyday gaze interaction on smartglasses Inproceedings
In: Proceedings of the 9th Nordic Conference on Human-Computer Interaction, pp. 1–10, 2016.
BibTeX | Tags: Gaze interaction in 3D (VR/AR/MR & real world), Gaze-controlled GUIs
@inproceedings{akkil2016user,
title = {User expectations of everyday gaze interaction on smartglasses},
author = {Deepak Akkil and Andrés Lucero and Jari Kangas and Tero Jokela and Marja Salmimaa and Roope Raisamo},
year = {2016},
date = {2016-01-01},
booktitle = {Proceedings of the 9th Nordic Conference on Human-Computer Interaction},
pages = {1--10},
keywords = {Gaze interaction in 3D (VR/AR/MR & real world), Gaze-controlled GUIs},
pubstate = {published},
tppubtype = {inproceedings}
}
Špakov, Oleg; Isokoski, Poika; Kangas, Jari; Akkil, Deepak; Majaranta, Päivi
PursuitAdjuster: an exploration into the design space of smooth pursuit--based widgets Inproceedings
In: Proceedings of the Ninth Biennial ACM Symposium on Eye Tracking Research & Applications, pp. 287–290, 2016.
BibTeX | Tags: Gaze-controlled GUIs
@inproceedings{vspakov2016pursuitadjuster,
title = {PursuitAdjuster: an exploration into the design space of smooth pursuit--based widgets},
author = {Oleg Špakov and Poika Isokoski and Jari Kangas and Deepak Akkil and Päivi Majaranta},
year = {2016},
date = {2016-01-01},
booktitle = {Proceedings of the Ninth Biennial ACM Symposium on Eye Tracking Research & Applications},
pages = {287--290},
keywords = {Gaze-controlled GUIs},
pubstate = {published},
tppubtype = {inproceedings}
}
Jalaliniya, Shahram; Mardanbegi, Diako
EyeGrip: Detecting Targets in a Series of Uni-directional Moving Objects Using Optokinetic Nystagmus Eye Movements Inproceedings
In: Proceedings of the 2016 CHI Conference on Human Factors in Computing Systems, pp. 5801–5811, ACM, Santa Clara, California, USA, 2016, ISBN: 978-1-4503-3362-7.
Links | BibTeX | Tags: Gaze interaction with mobile devices, gaze tracking, Gaze-controlled GUIs, implicit interaction, optokinetic nystagmus (okn) eye movements, scrolling
@inproceedings{Jalaliniya:2016:EDT:2858036.2858584,
title = {EyeGrip: Detecting Targets in a Series of Uni-directional Moving Objects Using Optokinetic Nystagmus Eye Movements},
author = {Shahram Jalaliniya and Diako Mardanbegi},
url = {http://doi.acm.org/10.1145/2858036.2858584},
doi = {10.1145/2858036.2858584},
isbn = {978-1-4503-3362-7},
year = {2016},
date = {2016-01-01},
booktitle = {Proceedings of the 2016 CHI Conference on Human Factors in Computing Systems},
pages = {5801--5811},
publisher = {ACM},
address = {Santa Clara, California, USA},
series = {CHI'16},
keywords = {Gaze interaction with mobile devices, gaze tracking, Gaze-controlled GUIs, implicit interaction, optokinetic nystagmus (okn) eye movements, scrolling},
pubstate = {published},
tppubtype = {inproceedings}
}
Cantoni, Virginio; Porta, Marco
Eye Tracking as a Computer Input and Interaction Method Inproceedings
In: Proceedings of the 15th International Conference on Computer Systems and Technologies, pp. 1–12, Association for Computing Machinery, Ruse, Bulgaria, 2014, ISBN: 9781450327534.
Abstract | Links | BibTeX | Tags: Gaze interaction paradigms, Gaze-controlled GUIs
@inproceedings{10.1145/2659532.2659592,
title = {Eye Tracking as a Computer Input and Interaction Method},
author = {Virginio Cantoni and Marco Porta},
url = {https://doi.org/10.1145/2659532.2659592},
doi = {10.1145/2659532.2659592},
isbn = {9781450327534},
year = {2014},
date = {2014-01-01},
booktitle = {Proceedings of the 15th International Conference on Computer Systems and Technologies},
pages = {1–12},
publisher = {Association for Computing Machinery},
address = {Ruse, Bulgaria},
series = {CompSysTech '14},
abstract = {Eye tracking applications can be considered under two points of view: in the formerthe eye tracker is a passive sensor that monitors the eyes to determine what the useris watching. In the latter the eye tracker has an active role that allows the userto control a computer. As a computer input device, an eye tracker typically substitutesthe mouse point-select operation with a look-select process to: press buttons, selecticons, follow links, etc. While look-select operations are naturally suited to eyeinput, controlling an interface element is not, because the eyes move covertly bysaccades -- quick movements of the point of gaze from one location to another. Sincethe main task of the eyes is simply to see, if they are also used for interactingwith the computer it may be difficult to decide, for example, whether a button iswatched to understand its function or to trigger the associated action. In general,eye tracking systems present significant challenges when used for computer input andmuch research has been carried out in this field.},
keywords = {Gaze interaction paradigms, Gaze-controlled GUIs},
pubstate = {published},
tppubtype = {inproceedings}
}
Gaudenzi, Enrico De; Porta, Marco
In: Chen, Supriya Limingand Kapoor; Bhatia, Rahul (Ed.): Intelligent Systems for Science and Information: Extended and Selected Results from the Science and Information Conference 2013, pp. 197–212, Springer International Publishing, Cham, 2014, ISBN: 978-3-319-04702-7.
Abstract | Links | BibTeX | Tags: Gaze interaction with ordinary computers, Gaze-controlled GUIs
@inbook{DeGaudenzi2014,
title = {Gaze Input for Ordinary Interfaces: Combining Automatic and Manual Error Correction Techniques to Improve Pointing Precision},
author = {Enrico De Gaudenzi and Marco Porta},
editor = {Supriya Limingand Kapoor Chen and Rahul Bhatia},
url = {https://doi.org/10.1007/978-3-319-04702-7_12},
doi = {10.1007/978-3-319-04702-7_12},
isbn = {978-3-319-04702-7},
year = {2014},
date = {2014-01-01},
booktitle = {Intelligent Systems for Science and Information: Extended and Selected Results from the Science and Information Conference 2013},
pages = {197--212},
publisher = {Springer International Publishing},
address = {Cham},
abstract = {Although eye tracking technology has greatly advanced in recent years, gaze-based interaction is still not as comfortable and effective as traditional mouse or touchpad input. In this paper we present the solutions we have developed to enhance eye pointing in ordinary interfaces, often characterized by small graphical elements. The described approach combines both automatic and manual error correction techniques (namely eye data filtering, virtual magnetization, magnifying glass and cursor manual shift) to allow the use of gaze as an alternative or supplementary communication channel. A modified keyboard with two additional keys is also proposed, with the purpose to speed up gaze interaction. Experiments have shown that the approach can provide better performance than touchpad pointing and compares fairly with the mouse. The obtained results, while preliminary, can be the starting point for deeper and more focused investigations, driving further research on the topic.},
keywords = {Gaze interaction with ordinary computers, Gaze-controlled GUIs},
pubstate = {published},
tppubtype = {inbook}
}
Špakov, Oleg; Isokoski, Poika; Majaranta, Päivi
Look and lean: accurate head-assisted eye pointing Inproceedings
In: Proceedings of the Symposium on Eye Tracking Research and Applications, pp. 35–42, 2014.
BibTeX | Tags: Gaze-controlled GUIs, Gaze-supported multimodal interaction
@inproceedings{vspakov2014look,
title = {Look and lean: accurate head-assisted eye pointing},
author = {Oleg Špakov and Poika Isokoski and Päivi Majaranta},
year = {2014},
date = {2014-01-01},
booktitle = {Proceedings of the Symposium on Eye Tracking Research and Applications},
pages = {35--42},
keywords = {Gaze-controlled GUIs, Gaze-supported multimodal interaction},
pubstate = {published},
tppubtype = {inproceedings}
}
Gaudenzi, Enrico De; Porta, Marco
Towards effective eye pointing for gaze-enhanced human-computer interaction Inproceedings
In: 2013 Science and Information Conference, pp. 22-27, 2013.
Abstract | BibTeX | Tags: Gaze-controlled GUIs
@inproceedings{6661713,
title = {Towards effective eye pointing for gaze-enhanced human-computer interaction},
author = {Enrico De Gaudenzi and Marco Porta},
year = {2013},
date = {2013-10-01},
booktitle = {2013 Science and Information Conference},
pages = {22-27},
abstract = {In this paper we present some solutions we have devised to improve eye pointing in ordinary PC use. While eye tracking technology has advanced considerably in the last years, several limitations prevent gaze-based interaction from becoming as comfortable and effective as traditional mouse or touchpad input. The proposed approach combines both automatic and manual error correction techniques (namely eye data filtering, virtual magnetization, magnifying glass and cursor manual shift) to allow enhanced exploitation of gaze as an alternative or additional communication channel. Experiments have shown that the approach can provide better performance than touchpad pointing and compares fairly with the mouse.},
keywords = {Gaze-controlled GUIs},
pubstate = {published},
tppubtype = {inproceedings}
}
Dybdal, Morten Lund; Agustin, Javier San; Hansen, John Paulin
Gaze input for mobile devices by dwell and gestures Inproceedings
In: Proceedings of the Symposium on Eye Tracking Research and Applications, pp. 225–228, 2012.
BibTeX | Tags: Gaze interfaces for wearable computing, Gaze-controlled GUIs, Usability and UX evaluation of gaze-based interfaces
@inproceedings{dybdal2012gaze,
title = {Gaze input for mobile devices by dwell and gestures},
author = {Morten Lund Dybdal and Javier San Agustin and John Paulin Hansen},
year = {2012},
date = {2012-01-01},
booktitle = {Proceedings of the Symposium on Eye Tracking Research and Applications},
pages = {225--228},
keywords = {Gaze interfaces for wearable computing, Gaze-controlled GUIs, Usability and UX evaluation of gaze-based interfaces},
pubstate = {published},
tppubtype = {inproceedings}
}
Skovsgaard, Henrik; Mateo, Julio C.; Hansen, John Paulin
Evaluating gaze-based interface tools to facilitate point-and-select tasks with small targets Journal Article
In: Behaviour & Information Technology, vol. 30, no. 6, pp. 821-831, 2011.
Links | BibTeX | Tags: Gaze interaction with ordinary computers, Gaze-controlled GUIs
@article{doi:10.1080/0144929X.2011.563801,
title = {Evaluating gaze-based interface tools to facilitate point-and-select tasks with small targets},
author = {Henrik Skovsgaard and Julio C. Mateo and John Paulin Hansen},
url = {https://doi.org/10.1080/0144929X.2011.563801},
doi = {10.1080/0144929X.2011.563801},
year = {2011},
date = {2011-01-01},
journal = {Behaviour & Information Technology},
volume = {30},
number = {6},
pages = {821-831},
publisher = {Taylor & Francis},
keywords = {Gaze interaction with ordinary computers, Gaze-controlled GUIs},
pubstate = {published},
tppubtype = {article}
}