% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@ARTICLE{DeHaan:845446,
author = {De Haan, Marcel and Brochier, Thomas and Grün, Sonja and
Riehle, Alexa and Barthélemy, Frédéric},
title = {{R}eal-time visuomotor behavior and electrophysiology
recording setup for use with humans and monkeys},
journal = {Journal of neurophysiology},
volume = {120},
number = {2},
issn = {0022-3077},
address = {Bethesda, Md.},
publisher = {Soc.},
reportid = {FZJ-2018-02710},
pages = {539 - 552},
year = {2018},
abstract = {Large-scale network dynamics in multiple visuomotor areas
is of great interest in the study of eye-hand coordination
in both human and monkey. To explore this, it is essential
to develop a setup that allows for precise tracking of eye
and hand movements. It is desirable that it is able to
generate mechanical or visual perturbations of hand
trajectories so that eye-hand coordination can be studied in
a variety of conditions. There are simple solutions that
satisfy these requirements for hand movements performed in
the horizontal plane while visual stimuli and hand feedback
are presented in the vertical plane. However, this spatial
dissociation requires cognitive rules for eye-hand
coordination different from eye-hand movements performed in
the same space, as is the case in most natural conditions.
Here we present an innovative solution for the precise
tracking of eye and hand movements in a single reference
frame. Importantly, our solution allows behavioral
explorations under normal and perturbed conditions in both
humans and monkeys. It is based on the integration of two
noninvasive commercially available systems to achieve online
control and synchronous recording of eye (EyeLink) and hand
(KINARM) positions during interactive visuomotor tasks. We
also present an eye calibration method compatible with
different eye trackers that compensates for nonlinearities
caused by the system's geometry. Our setup monitors the two
effectors in real time with high spatial and temporal
resolution and simultaneously outputs behavioral and
neuronal data to an external data acquisition system using a
common data format. NEW $\&$ NOTEWORTHY We developed a new
setup for studying eye-hand coordination in humans and
monkeys that monitors the two effectors in real time in a
common reference frame. Our eye calibration method allows us
to track gaze positions relative to visual stimuli presented
in the horizontal workspace of the hand movements. This
method compensates for nonlinearities caused by the system's
geometry and transforms kinematics signals from the eye
tracker into the same coordinate system as hand and
targets.},
cin = {INM-6 / IAS-6 / INM-10},
ddc = {610},
cid = {I:(DE-Juel1)INM-6-20090406 / I:(DE-Juel1)IAS-6-20130828 /
I:(DE-Juel1)INM-10-20170113},
pnm = {571 - Connectivity and Activity (POF3-571) / AVis -
Einfluss von top-down Signalen auf den autonomen Sehvorgang
- Multi-skalen Analyse von massiv-parallelen
Multi-Area-Daten des visuellen Pfades (BMBF-01GQ1114) / HBP
SGA2 - Human Brain Project Specific Grant Agreement 2
(785907) / HBP SGA1 - Human Brain Project Specific Grant
Agreement 1 (720270) / DFG project 238707842 - Kausative
Mechanismen mesoskopischer Aktivitätsmuster in der
auditorischen Kategorien-Diskrimination (238707842) / DFG
project 238707842 - Kausative Mechanismen mesoskopischer
Aktivitätsmuster in der auditorischen
Kategorien-Diskrimination (238707842) / SMHB -
Supercomputing and Modelling for the Human Brain
(HGF-SMHB-2013-2017)},
pid = {G:(DE-HGF)POF3-571 / G:(DE-Juel1)BMBF-01GQ1114 /
G:(EU-Grant)785907 / G:(EU-Grant)720270 /
G:(GEPRIS)238707842 / G:(GEPRIS)238707842 /
G:(DE-Juel1)HGF-SMHB-2013-2017},
typ = {PUB:(DE-HGF)16},
pubmed = {pmid:29718806},
UT = {WOS:000441195200014},
doi = {10.1152/jn.00262.2017},
url = {https://juser.fz-juelich.de/record/845446},
}