% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@ARTICLE{Wagner:865042,
author = {Wagner, Adina Svenja and Halchenko, Yaroslav and Hanke,
Michael},
title = {multimatch-gaze: {T}he {M}ulti{M}atch algorithm for gaze
path comparison in {P}ython},
journal = {The journal of open source software},
volume = {4},
number = {40},
issn = {2475-9066},
reportid = {FZJ-2019-04604},
pages = {1525 -},
year = {2019},
abstract = {Multimatch-gaze is a Python package for computing the
similarity of eye-movement sequences, so called scan paths.
Scan paths are the trace of eye-movements in space and time,
usually captured with eye tracking devices. Scan path
similarity is a measure that is used in a variety of
disciplines ranging from cognitive psychology, medicine, and
marketing to human-machine interfaces. In addition to
quantifying position and order of a series of eye-movements,
comparing their temporo-spatial sequence adds an insightful
dimension to the traditional analysis of eye tracking data.
It reveals commonalities and differences of viewing behavior
within and between observers, and is used to study how
people explore visual information. For example, scan path
comparisons are used to study analogy-making (French, Glady,
$\&$ Thibaut, 2017), visual exploration and imagery
(Johansson, Holsanova, $\&$ Holmqvist, 2006), habituation in
repetitive visual search (Burmester $\&$ Mast, 2010), or
spatial attention allocation in dynamic scenes (Mital,
Smith, Hill, $\&$ Henderson, 2011). The method is applied
within individuals as a measure of change (Burmester $\&$
Mast, 2010), or across samples to study group differences
(French et al., 2017).Therefore, in recent years, interest
in the study of eye movement sequences has sparked the
development of novel methodologies and algorithms to perform
scan path comparisons. However, many of the contemporary
scan path comparison algorithms are implemented
inclosed-source, non-free software such as
Matlab.Multimatch-gaze is a Python-based reimplementation of
the MultiMatch toolbox for scanpath comparison, originally
developed by Jarodzka, Holmqvist, $\&$ Nyström (2010) and
implemented by Dewhurst et al. (2012) in Matlab. This
algorithm represents scan paths asgeometrical vectors in a
two-dimensional space: Any scan path is built up of a
coordinate vector sequence in which the start and end
position of vectors represent fixations, and the vectors
represent saccades. Two such vector sequences are, after
optional simplification based on angular relations and
amplitudes of saccades, compared on the five dimensions
“vector shape”, “vector length (amplitude)”,
“vector position”, “vector direction”, and
“fixation duration” for a multidimensional similarity
evaluation.This reimplementation in Python aims at providing
an accessible, documented, and tested open source
alternative to the existing MultiMatch toolbox. The
algorithm is an established tool for scan path comparison
(N. C. Anderson, Anderson, Kingstone, $\&$ Bischof,
2015),and improved availability aids adoption in a broader
research community. multimatch-gaze is available from its
Github repository and as the Python package multimatch-gaze
via pip install multimatch-gaze. The module contains the
same functionality as the original Matlab toolbox, that is,
scan path comparison with optional simplification according
to userdefined thresholds, and it provides this
functionality via a command line interface or a PythonAPI.
Data for scan path comparison can be supplied as nx3
fixation vectors with columns corresponding to
x-coordinates, y-coordinates, and duration of the fixation
in seconds (as for the original Matlab toolbox).
Alternatively, multimatch-gaze can natively read in event
detection output produced by REMoDNaV (Dar, Wagner, $\&$
Hanke, 2019), a velocity-based eye movement classification
algorithm written in Python. For REMoDNaV-based input, users
can additionally specify whether smooth pursuit events in
the data should be kept in the scan path or discarded.},
cin = {INM-7},
ddc = {004},
cid = {I:(DE-Juel1)INM-7-20090406},
pnm = {574 - Theory, modelling and simulation (POF3-574)},
pid = {G:(DE-HGF)POF3-574},
typ = {PUB:(DE-HGF)16},
doi = {10.21105/joss.01525},
url = {https://juser.fz-juelich.de/record/865042},
}