% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@ARTICLE{Dar:877488,
      author       = {Dar, Asim H. and Wagner, Adina S. and Hanke, Michael},
      title        = {{REM}o{DN}a{V}: robust eye-movement classification for
                      dynamic stimulation},
      journal      = {Behavior research methods},
      volume       = {52},
      issn         = {0005-7878},
      address      = {Austin, Tex.},
      publisher    = {Psychonomic Society Publ.},
      reportid     = {FZJ-2020-02240},
      pages        = {1-16},
      year         = {2020},
      abstract     = {Tracking of eye movements is an established measurement for
                      many types of experimental paradigms. More complex and more
                      prolonged visual stimuli have made algorithmic approaches to
                      eye-movement event classification the most pragmatic option.
                      A recent analysis revealed that many current algorithms are
                      lackluster when it comes to data from viewing dynamic
                      stimuli such as video sequences. Here we present an event
                      classification algorithm—built on an existing
                      velocity-based approach—that is suitable for both static
                      and dynamic stimulation, and is capable of classifying
                      saccades, post-saccadic oscillations, fixations, and smooth
                      pursuit events. We validated classification performance and
                      robustness on three public datasets: 1) manually annotated,
                      trial-based gaze trajectories for viewing static images,
                      moving dots, and short video sequences, 2) lab-quality gaze
                      recordings for a feature-length movie, and 3) gaze
                      recordings acquired under suboptimal lighting conditions
                      inside the bore of a magnetic resonance imaging (MRI)
                      scanner for the same full-length movie. We found that the
                      proposed algorithm performs on par or better compared to
                      state-of-the-art alternatives for static stimulation.
                      Moreover, it yields eye-movement events with biologically
                      plausible characteristics on prolonged dynamic recordings.
                      Lastly, algorithm performance is robust on data acquired
                      under suboptimal conditions that exhibit a temporally
                      varying noise level. These results indicate that the
                      proposed algorithm is a robust tool with improved
                      classification accuracy across a range of use cases. The
                      algorithm is cross-platform compatible, implemented using
                      the Python programming language, and readily available as
                      free and open-source software from public sources.},
      cin          = {INM-7},
      ddc          = {150},
      cid          = {I:(DE-Juel1)INM-7-20090406},
      pnm          = {574 - Theory, modelling and simulation (POF3-574)},
      pid          = {G:(DE-HGF)POF3-574},
      typ          = {PUB:(DE-HGF)16},
      pubmed       = {pmid:32710238},
      UT           = {WOS:000552180600001},
      doi          = {10.3758/s13428-020-01428-x},
      url          = {https://juser.fz-juelich.de/record/877488},
}