Porta, Marco; Dondi, Piercarlo; Pianetta, Alice; Cantoni, Virginio
SPEye: A Calibration-Free Gaze-Driven Text Entry Technique Based on Smooth Pursuit Journal Article
In: IEEE Transactions on Human-Machine Systems, vol. 52, iss. 2, pp. 312-323, 2022.
Abstract | Links | BibTeX | Tags: Eye writing
@article{9619860,
title = {SPEye: A Calibration-Free Gaze-Driven Text Entry Technique Based on Smooth Pursuit},
author = {Marco Porta and Piercarlo Dondi and Alice Pianetta and Virginio Cantoni},
doi = {10.1109/THMS.2021.3123202},
year = {2022},
date = {2022-04-01},
urldate = {2021-01-01},
journal = {IEEE Transactions on Human-Machine Systems},
volume = {52},
issue = {2},
pages = {312-323},
abstract = {Gaze-based text entry is undoubtedly one of the most useful applications of eye-tracking technology for human-machine interaction, both in the assistive context (users with severe motor disabilities can exploit such writing modalities to communicate with the world) and as a way to allow touchless text input in everyday life. Different eye-driven text entry methods have been developed to date, and almost all of them require preliminary calibration procedures to work correctly. When a short text, such as a password or a PIN, needs to be entered without using hands or voice, calibration may be perceived as an unnecessary nuisance (and may not be properly maintained in public places due to “ambient noise,” caused, for example, by nearby people). Inadequate calibration may also be a problem in case of assistive uses. In this article we present SPEye, a calibration-free eye-controlled writing technique based on smooth pursuit. Although its writing speed is significantly lower than that of ordinary calibrated methods, the absence of an initial calibration makes it suitable for short text entry. The technique has been tested through several experiments, obtaining good performances in terms of key strokes per character and total error rate metrics, and receiving positive feedback from the participants in the tests.},
keywords = {Eye writing},
pubstate = {published},
tppubtype = {article}
}
Homola, Barnabás; Sheldon, Isabella; Ago, Stela; Mariani, Milton; Hansen, John Paulin
Prototyping Exoskeleton Interaction for Game-based Rehabilitation Proceedings Article
In: CHI Conference on Human Factors in Computing Systems Extended Abstracts, pp. 1–6, 2022.
BibTeX | Tags: Gaze-controlled robots
@inproceedings{homola2022prototyping,
title = {Prototyping Exoskeleton Interaction for Game-based Rehabilitation},
author = {Barnabás Homola and Isabella Sheldon and Stela Ago and Milton Mariani and John Paulin Hansen},
year = {2022},
date = {2022-01-01},
booktitle = {CHI Conference on Human Factors in Computing Systems Extended Abstracts},
pages = {1--6},
keywords = {Gaze-controlled robots},
pubstate = {published},
tppubtype = {inproceedings}
}
Porta, Marco; Caminiti, Antonino; Dondi, Piercarlo
GazeScale: Towards General Gaze-Based Interaction in Public Places Proceedings Article
In: Proceedings of the 2022 International Conference on Multimodal Interaction, pp. 591–596, Association for Computing Machinery, Bengaluru, India, 2022, ISBN: 9781450393904.
Abstract | Links | BibTeX | Tags: Eye tracking, Gaze Input, gaze interaction, Gaze-controlled GUIs, Gaze-controlled special devices, multimodal interfaces
@inproceedings{10.1145/3536221.3556588,
title = {GazeScale: Towards General Gaze-Based Interaction in Public Places},
author = {Marco Porta and Antonino Caminiti and Piercarlo Dondi},
url = {https://doi.org/10.1145/3536221.3556588},
doi = {10.1145/3536221.3556588},
isbn = {9781450393904},
year = {2022},
date = {2022-01-01},
booktitle = {Proceedings of the 2022 International Conference on Multimodal Interaction},
pages = {591–596},
publisher = {Association for Computing Machinery},
address = {Bengaluru, India},
series = {ICMI '22},
abstract = {Gaze-based interaction has until now been almost an exclusive prerogative of the assistive field, as it is considered not sufficiently performing compared to traditional communication methods based on keyboards, pointing devices, and touch screens. However, situations such as the one we are experiencing now due to the COVID-19 pandemic highlight the importance of touchless communication, to minimize the spread of the disease. In this paper, as an example of the potential pervasive use of eye tracking technology in public contexts, we propose and study five interfaces for a gaze-controlled scale, to be used in supermarkets to weigh fruits and vegetables. Given the great heterogeneity of potential users, the interaction must be as simple and intuitive as possible and occur without the need for calibration. The experiments carried out confirm that this goal is achievable and show strengths and weaknesses of the five interfaces.},
keywords = {Eye tracking, Gaze Input, gaze interaction, Gaze-controlled GUIs, Gaze-controlled special devices, multimodal interfaces},
pubstate = {published},
tppubtype = {inproceedings}
}
Dondi, Piercarlo; Porta, Marco; Donvito, Angelo; Volpe, Giovanni
A gaze-based interactive system to explore artwork imagery Journal Article
In: Journal on Multimodal User Interfaces, 2021, ISSN: 1783-8738.
Abstract | Links | BibTeX | Tags: Gaze interaction with ordinary computers, Gaze-based assistive technologies, Gaze-controlled GUIs
@article{Dondi2021,
title = {A gaze-based interactive system to explore artwork imagery},
author = {Piercarlo Dondi and Marco Porta and Angelo Donvito and Giovanni Volpe},
url = {https://doi.org/10.1007/s12193-021-00373-z},
doi = {10.1007/s12193-021-00373-z},
issn = {1783-8738},
year = {2021},
date = {2021-05-21},
journal = {Journal on Multimodal User Interfaces},
abstract = {Interactive and immersive technologies can significantly enhance the fruition of museums and exhibits. Several studies have proved that multimedia installations can attract visitors, presenting cultural and scientific information in an appealing way. In this article, we present our workflow for achieving a gaze-based interaction with artwork imagery. We designed both a tool for creating interactive ``gaze-aware'' images and an eye tracking application conceived to interact with those images with the gaze. Users can display different pictures, perform pan and zoom operations, and search for regions of interest with associated multimedia content (text, image, audio, or video). Besides being an assistive technology for motor impaired people (like most gaze-based interaction applications), our solution can also be a valid alternative to the common touch screen panels present in museums, in accordance with the new safety guidelines imposed by the COVID-19 pandemic. Experiments carried out with a panel of volunteer testers have shown that the tool is usable, effective, and easy to learn.},
keywords = {Gaze interaction with ordinary computers, Gaze-based assistive technologies, Gaze-controlled GUIs},
pubstate = {published},
tppubtype = {article}
}
Ovchinnikova, Anastasia O; Vasilyev, Anatoly N; Zubarev, Ivan P; Kozyrskiy, Bogdan L; Shishkin, Sergei L
MEG-based detection of voluntary eye fixations used to control a computer Journal Article
In: Frontiers in neuroscience, pp. 38, 2021.
BibTeX | Tags: Brain-computer interfaces for enhancing gaze interaction, Gaze interaction paradigms, Gaze-supported multimodal interaction
@article{ovchinnikova2021meg,
title = {MEG-based detection of voluntary eye fixations used to control a computer},
author = {Anastasia O Ovchinnikova and Anatoly N Vasilyev and Ivan P Zubarev and Bogdan L Kozyrskiy and Sergei L Shishkin},
year = {2021},
date = {2021-01-01},
journal = {Frontiers in neuroscience},
pages = {38},
publisher = {Frontiers},
keywords = {Brain-computer interfaces for enhancing gaze interaction, Gaze interaction paradigms, Gaze-supported multimodal interaction},
pubstate = {published},
tppubtype = {article}
}
Zhao, Darisy G; Vasilyev, Anatoly N; Kozyrskiy, Bogdan L; Melnichuk, Eugeny V; Isachenko, Andrey V; Velichkovsky, Boris M; Shishkin, Sergei L
A passive BCI for monitoring the intentionality of the gaze-based moving object selection Journal Article
In: Journal of Neural Engineering, vol. 18, no. 2, pp. 026001, 2021.
BibTeX | Tags: Brain-computer interfaces for enhancing gaze interaction, Gaze interaction paradigms, Gaze-supported multimodal interaction
@article{zhao2021passive,
title = {A passive BCI for monitoring the intentionality of the gaze-based moving object selection},
author = {Darisy G Zhao and Anatoly N Vasilyev and Bogdan L Kozyrskiy and Eugeny V Melnichuk and Andrey V Isachenko and Boris M Velichkovsky and Sergei L Shishkin},
year = {2021},
date = {2021-01-01},
journal = {Journal of Neural Engineering},
volume = {18},
number = {2},
pages = {026001},
publisher = {IOP Publishing},
keywords = {Brain-computer interfaces for enhancing gaze interaction, Gaze interaction paradigms, Gaze-supported multimodal interaction},
pubstate = {published},
tppubtype = {article}
}
Bafna, Tanya; Bækgaard, Per; Hansen, John Paulin Paulin
EyeTell: Tablet-based Calibration-free Eye-typing using Smooth-pursuit movements Proceedings Article
In: ACM Symposium on Eye Tracking Research and Applications, pp. 1–6, 2021.
BibTeX | Tags: calibration-free, Eye writing, eye-tracking, Gaze interaction with mobile devices, mobile, smooth-pursuit, tablet, text entry
@inproceedings{bafna2021eyetell,
title = {EyeTell: Tablet-based Calibration-free Eye-typing using Smooth-pursuit movements},
author = {Tanya Bafna and Per Bækgaard and John Paulin Paulin Hansen},
year = {2021},
date = {2021-01-01},
booktitle = {ACM Symposium on Eye Tracking Research and Applications},
pages = {1--6},
keywords = {calibration-free, Eye writing, eye-tracking, Gaze interaction with mobile devices, mobile, smooth-pursuit, tablet, text entry},
pubstate = {published},
tppubtype = {inproceedings}
}
Bafna, Tanya; Bækgaard, Per; Hansen, John Paulin
Mental fatigue prediction during eye-typing Journal Article
In: Plos one, vol. 16, no. 2, pp. e0246739, 2021.
BibTeX | Tags: Cognition, eye movements, Eye writing, Fatigue, Gaze interaction with ordinary computers, Language, Machine learning, Neurology, Pupil, User context estimation from eye movements
@article{bafna2021mental,
title = {Mental fatigue prediction during eye-typing},
author = {Tanya Bafna and Per Bækgaard and John Paulin Hansen},
year = {2021},
date = {2021-01-01},
journal = {Plos one},
volume = {16},
number = {2},
pages = {e0246739},
publisher = {Public Library of Science San Francisco, CA USA},
keywords = {Cognition, eye movements, Eye writing, Fatigue, Gaze interaction with ordinary computers, Language, Machine learning, Neurology, Pupil, User context estimation from eye movements},
pubstate = {published},
tppubtype = {article}
}
Mulvey, Fiona Br'id; Mikitovic, Marek; Sadowski, Mateusz; Hou, Baosheng; Rasamoel, Nils David; Hansen, John Paulin Paulin; Bækgaard, Per
Gaze Interactive and Attention Aware Low Vision Aids as Future Smart Glasses Proceedings Article
In: ACM Symposium on Eye Tracking Research and Applications, pp. 1–4, 2021.
BibTeX | Tags: Eye tracking, Gaze interfaces for wearable computing, Gaze-aware systems, Gaze-based assistive technologies, mixed reality, Virtual reality, vision aids, vision loss
@inproceedings{mulvey2021gaze,
title = {Gaze Interactive and Attention Aware Low Vision Aids as Future Smart Glasses},
author = {Fiona Br'id Mulvey and Marek Mikitovic and Mateusz Sadowski and Baosheng Hou and Nils David Rasamoel and John Paulin Paulin Hansen and Per Bækgaard},
year = {2021},
date = {2021-01-01},
booktitle = {ACM Symposium on Eye Tracking Research and Applications},
pages = {1--4},
keywords = {Eye tracking, Gaze interfaces for wearable computing, Gaze-aware systems, Gaze-based assistive technologies, mixed reality, Virtual reality, vision aids, vision loss},
pubstate = {published},
tppubtype = {inproceedings}
}
Bonikowski, Leszek; Gruszczy'nski, Dawid; Matulewski, Jacek
Open-source Software for Determining the Dynamic Areas of Interest for Eye Tracking Data Analysis Journal Article
In: Procedia Computer Science, vol. 192, pp. 2568–2575, 2021.
BibTeX | Tags: Usability and UX evaluation of gaze-based interfaces
@article{bonikowski2021open,
title = {Open-source Software for Determining the Dynamic Areas of Interest for Eye Tracking Data Analysis},
author = {Leszek Bonikowski and Dawid Gruszczy'nski and Jacek Matulewski},
year = {2021},
date = {2021-01-01},
journal = {Procedia Computer Science},
volume = {192},
pages = {2568--2575},
publisher = {Elsevier},
keywords = {Usability and UX evaluation of gaze-based interfaces},
pubstate = {published},
tppubtype = {article}
}
Feng, Wenxin; Zou, Jiangnan; Kurauchi, Andrew; Morimoto, Carlos H; Betke, Margrit
HGaze Typing: Head-Gesture Assisted Gaze Typing Book Chapter
In: ACM ETRA 2021 -- Best Paper Award, Association for Computing Machinery, New York, NY, USA, 2021, ISBN: 9781450383448.
Abstract | Links | BibTeX | Tags: Eye writing, Gaze-based assistive technologies, Gaze-supported multimodal interaction
@inbook{10.1145/3448017.3457379,
title = {HGaze Typing: Head-Gesture Assisted Gaze Typing},
author = {Wenxin Feng and Jiangnan Zou and Andrew Kurauchi and Carlos H Morimoto and Margrit Betke},
url = {https://doi.org/10.1145/3448017.3457379},
isbn = {9781450383448},
year = {2021},
date = {2021-01-01},
booktitle = {ACM ETRA 2021 -- Best Paper Award},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
abstract = {This paper introduces a bi-modal typing interface, HGaze Typing, which combines the simplicity of head gestures with the speed of gaze inputs to provide efficient and comfortable dwell-free text entry. HGaze Typing uses gaze path information to compute candidate words and allows explicit activation of common text entry commands, such as selection, deletion, and revision, by using head gestures (nodding, shaking, and tilting). By adding a head-based input channel, HGaze Typing reduces the size of the screen regions for cancel/deletion buttons and the word candidate list, which are required by most eye-typing interfaces. A user study finds HGaze Typing outperforms a dwell-time-based keyboard in efficacy and user satisfaction. The results demonstrate that the proposed method of integrating gaze and head-movement inputs can serve as an effective interface for text entry and is robust to unintended selections.},
keywords = {Eye writing, Gaze-based assistive technologies, Gaze-supported multimodal interaction},
pubstate = {published},
tppubtype = {inbook}
}
Velloso, Eduardo; Morimoto, Carlos H
A Probabilistic Interpretation of Motion Correlation Selection Techniques Proceedings Article
In: Proceedings of the 2021 CHI Conference on Human Factors in Computing Systems, Association for Computing Machinery, Yokohama, Japan, 2021, ISBN: 9781450380966.
Abstract | Links | BibTeX | Tags: computational interaction, gaze interaction, gestures, motion correlation, probabilistic input, pursuits, User context estimation from eye movements
@inproceedings{10.1145/3411764.3445184,
title = {A Probabilistic Interpretation of Motion Correlation Selection Techniques},
author = {Eduardo Velloso and Carlos H Morimoto},
url = {https://doi.org/10.1145/3411764.3445184},
doi = {10.1145/3411764.3445184},
isbn = {9781450380966},
year = {2021},
date = {2021-01-01},
booktitle = {Proceedings of the 2021 CHI Conference on Human Factors in Computing Systems},
publisher = {Association for Computing Machinery},
address = {Yokohama, Japan},
series = {CHI '21},
abstract = {Motion correlation interfaces are those that present targets moving in different patterns, which the user can select by matching their motion. In this paper, we re-formulate the task of target selection as a probabilistic inference problem. We demonstrate that previous interaction techniques can be modelled using a Bayesian approach and that how modelling the selection task as transmission of information can help us make explicit the assumptions behind similarity measures. We propose ways of incorporating uncertainty into the decision-making process and demonstrate how the concept of entropy can illuminate the measurement of the quality of a design. We apply these techniques in a case study and suggest guidelines for future work.},
keywords = {computational interaction, gaze interaction, gestures, motion correlation, probabilistic input, pursuits, User context estimation from eye movements},
pubstate = {published},
tppubtype = {inproceedings}
}
Elmadjian, Carlos; Morimoto, Carlos H
GazeBar: Exploiting the Midas Touch in Gaze Interaction Book Chapter
In: Extended Abstracts of the 2021 CHI Conference on Human Factors in Computing Systems, Association for Computing Machinery, New York, NY, USA, 2021, ISBN: 9781450380959.
Abstract | Links | BibTeX | Tags: Gaze interaction with ordinary computers, Gaze-supported multimodal interaction
@inbook{10.1145/3411763.3451703,
title = {GazeBar: Exploiting the Midas Touch in Gaze Interaction},
author = {Carlos Elmadjian and Carlos H Morimoto},
url = {https://doi.org/10.1145/3411763.3451703},
isbn = {9781450380959},
year = {2021},
date = {2021-01-01},
booktitle = {Extended Abstracts of the 2021 CHI Conference on Human Factors in Computing Systems},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
abstract = {Imagine an application that requires constant configuration changes, such as modifying the brush type in a drawing application. Typically, options are hierarchically organized in menu bars that the user must navigate, sometimes through several levels, to select the desired mode. An alternative to reduce hand motion is the use of multimodal techniques such as gaze-touch, that combines gaze pointing with mechanical selection. In this paper, we introduce GazeBar, a novel multimodal gaze interaction technique that uses gaze paths as a combined pointing and selection mechanism. The idea behind GazeBar is to maximize the interaction flow by reducing ”safety” mechanisms (such as clicking) under certain circumstances. We present GazeBar’s design and demonstrate it using a digital drawing application prototype. Advantages and disadvantages of GazeBar are discussed based on a user performance model.},
keywords = {Gaze interaction with ordinary computers, Gaze-supported multimodal interaction},
pubstate = {published},
tppubtype = {inbook}
}
Maio, Matteo Di; Dondi, Piercarlo; Lombardi, Luca; Porta, Marco
Hybrid Manual and Gaze-Based Interaction With a Robotic Arm Proceedings Article
In: 2021 26th IEEE International Conference on Emerging Technologies and Factory Automation (ETFA ), pp. 1-4, 2021.
@inproceedings{9613371,
title = {Hybrid Manual and Gaze-Based Interaction With a Robotic Arm},
author = {Matteo Di Maio and Piercarlo Dondi and Luca Lombardi and Marco Porta},
doi = {10.1109/ETFA45728.2021.9613371},
year = {2021},
date = {2021-01-01},
booktitle = {2021 26th IEEE International Conference on Emerging Technologies and Factory Automation (ETFA )},
pages = {1-4},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Maio, M. Di; Dondi, P.; Lombardi, L.; Porta, M.
Hybrid Manual and Gaze-Based Interaction With a Robotic Arm Proceedings Article
In: Proceedings of ETFA '21 (26th IEEE Conference on Emerging Technologies and Factory Automation), 2021.
Abstract | BibTeX | Tags: Gaze-controlled GUIs, Gaze-controlled robots
@inproceedings{DiMaio2021-8654,
title = {Hybrid Manual and Gaze-Based Interaction With a Robotic Arm},
author = {M. Di Maio and P. Dondi and L. Lombardi and M. Porta},
year = {2021},
date = {2021-01-01},
booktitle = {Proceedings of ETFA '21 (26th IEEE Conference on Emerging Technologies and Factory Automation)},
abstract = {Robots, very widespread in industrial settings, are now used for many different purposes. Even if several tasks can be fully automated, the presence of a human operator is often needed. However, direct robot control is generally complex, and it is thus important to explore alternative and comfortable ways of communication. In this paper, we propose a hybrid approach to human-robot interaction that exploits both manual and gaze-based input. While simple, our solution goes in the direction of enhancing the control capabilities of robots by human operators.},
keywords = {Gaze-controlled GUIs, Gaze-controlled robots},
pubstate = {published},
tppubtype = {inproceedings}
}
Zhao, Darisy G; Karikov, Nikita D; Melnichuk, Eugeny V; Velichkovsky, Boris M; Shishkin, Sergei L
Voice as a mouse click: Usability and effectiveness of simplified hands-free gaze-voice selection Journal Article
In: Applied Sciences, vol. 10, no. 24, pp. 8791, 2020.
BibTeX | Tags: Gaze interaction paradigms, Gaze-supported multimodal interaction, Voice-supported gaze interaction
@article{zhao2020voice,
title = {Voice as a mouse click: Usability and effectiveness of simplified hands-free gaze-voice selection},
author = {Darisy G Zhao and Nikita D Karikov and Eugeny V Melnichuk and Boris M Velichkovsky and Sergei L Shishkin},
year = {2020},
date = {2020-01-01},
journal = {Applied Sciences},
volume = {10},
number = {24},
pages = {8791},
publisher = {Multidisciplinary Digital Publishing Institute},
keywords = {Gaze interaction paradigms, Gaze-supported multimodal interaction, Voice-supported gaze interaction},
pubstate = {published},
tppubtype = {article}
}
Araujo, Jacopo M; Zhang, Guangtao; Hansen, John Paulin Paulin; Puthusserypady, Sadasivan
Exploring eye-gaze wheelchair control Proceedings Article
In: ACM Symposium on Eye Tracking Research and Applications, pp. 1–8, 2020.
BibTeX | Tags: Gaze interaction in 3D (VR/AR/MR & real world), Gaze-based assistive technologies, Gaze-controlled vehicles
@inproceedings{araujo2020exploring,
title = {Exploring eye-gaze wheelchair control},
author = {Jacopo M Araujo and Guangtao Zhang and John Paulin Paulin Hansen and Sadasivan Puthusserypady},
year = {2020},
date = {2020-01-01},
booktitle = {ACM Symposium on Eye Tracking Research and Applications},
pages = {1--8},
keywords = {Gaze interaction in 3D (VR/AR/MR & real world), Gaze-based assistive technologies, Gaze-controlled vehicles},
pubstate = {published},
tppubtype = {inproceedings}
}
Bafna, Tanya; Hansen, John Paulin Paulin; Baekgaard, Per
Cognitive load during eye-typing Proceedings Article
In: ACM Symposium on Eye Tracking Research and Applications, pp. 1–8, 2020.
BibTeX | Tags: Eye writing, Gaze-based assistive technologies, User context estimation from eye movements
@inproceedings{bafna2020cognitive,
title = {Cognitive load during eye-typing},
author = {Tanya Bafna and John Paulin Paulin Hansen and Per Baekgaard},
year = {2020},
date = {2020-01-01},
booktitle = {ACM Symposium on Eye Tracking Research and Applications},
pages = {1--8},
keywords = {Eye writing, Gaze-based assistive technologies, User context estimation from eye movements},
pubstate = {published},
tppubtype = {inproceedings}
}
Hou, Baosheng James; Bekgaard, Per; MacKenzie, Scott; Hansen, John Paulin Paulin; Puthusserypady, Sadasivan
GIMIS: Gaze input with motor imagery selection Proceedings Article
In: ACM Symposium on Eye Tracking Research and Applications, pp. 1–10, 2020.
BibTeX | Tags: Gaze-based assistive technologies, Gaze-supported multimodal interaction
@inproceedings{hou2020gimis,
title = {GIMIS: Gaze input with motor imagery selection},
author = {Baosheng James Hou and Per Bekgaard and Scott MacKenzie and John Paulin Paulin Hansen and Sadasivan Puthusserypady},
year = {2020},
date = {2020-01-01},
booktitle = {ACM Symposium on Eye Tracking Research and Applications},
pages = {1--10},
keywords = {Gaze-based assistive technologies, Gaze-supported multimodal interaction},
pubstate = {published},
tppubtype = {inproceedings}
}
Zhang, Guangtao; Hansen, John Paulin
People with Motor Disabilities Using Gaze to Control Telerobots Proceedings Article
In: Extended Abstracts of the 2020 CHI Conference on Human Factors in Computing Systems, pp. 1–9, 2020.
BibTeX | Tags: Gaze-based assistive technologies, Gaze-controlled vehicles
@inproceedings{zhang2020people,
title = {People with Motor Disabilities Using Gaze to Control Telerobots},
author = {Guangtao Zhang and John Paulin Hansen},
year = {2020},
date = {2020-01-01},
booktitle = {Extended Abstracts of the 2020 CHI Conference on Human Factors in Computing Systems},
pages = {1--9},
keywords = {Gaze-based assistive technologies, Gaze-controlled vehicles},
pubstate = {published},
tppubtype = {inproceedings}
}