% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@ARTICLE{Gilson:885723,
author = {Gilson, Matthieu and Dahmen, David and Moreno-Bote, Rubén
and Insabato, Andrea and Helias, Moritz},
title = {{T}he covariance perceptron: {A} new paradigm for
classification and processing of time series in recurrent
neuronal networks},
journal = {PLoS Computational Biology},
volume = {16},
number = {10},
issn = {1553-7358},
address = {San Francisco, Calif.},
publisher = {Public Library of Science},
reportid = {FZJ-2020-04035},
pages = {e1008127},
year = {2020},
abstract = {Learning in neuronal networks has developed in many
directions, in particular to reproduce cognitive tasks like
image recognition and speech processing. Implementations
have been inspired by stereotypical neuronal responses like
tuning curves in the visual system, where, for example,
ON/OFF cells fire or not depending on the contrast in their
receptive fields. Classical models of neuronal networks
therefore map a set of input signals to a set of activity
levels in the output of the network. Each category of inputs
is thereby predominantly characterized by its mean. In the
case of time series, fluctuations around this mean
constitute noise in this view. For this paradigm, the high
variability exhibited by the cortical activity may thus
imply limitations or constraints, which have been discussed
for many years. For example, the need for averaging neuronal
activity over long periods or large groups of cells to
assess a robust mean and to diminish the effect of noise
correlations. To reconcile robust computations with variable
neuronal activity, we here propose a conceptual change of
perspective by employing variability of activity as the
basis for stimulus-related information to be learned by
neurons, rather than merely being the noise that corrupts
the mean signal. In this new paradigm both afferent and
recurrent weights in a network are tuned to shape the
input-output mapping for covariances, the second-order
statistics of the fluctuating activity. When including time
lags, covariance patterns define a natural metric for time
series that capture their propagating nature. We develop the
theory for classification of time series based on their
spatio-temporal covariances, which reflect dynamical
properties. We demonstrate that recurrent connectivity is
able to transform information contained in the temporal
structure of the signal into spatial covariances. Finally,
we use the MNIST database to show how the covariance
perceptron can capture specific second-order statistical
patterns generated by moving digits.},
cin = {INM-6 / IAS-6 / INM-10},
ddc = {610},
cid = {I:(DE-Juel1)INM-6-20090406 / I:(DE-Juel1)IAS-6-20130828 /
I:(DE-Juel1)INM-10-20170113},
pnm = {571 - Connectivity and Activity (POF3-571) / 574 - Theory,
modelling and simulation (POF3-574) / MSNN - Theory of
multi-scale neuronal networks (HGF-SMHB-2014-2018) /
neuroIC002 - Recurrence and stochasticity for neuro-inspired
computation (EXS-SF-neuroIC002) / HBP SGA2 - Human Brain
Project Specific Grant Agreement 2 (785907) / HBP SGA3 -
Human Brain Project Specific Grant Agreement 3 (945539)},
pid = {G:(DE-HGF)POF3-571 / G:(DE-HGF)POF3-574 /
G:(DE-Juel1)HGF-SMHB-2014-2018 / G:(DE-82)EXS-SF-neuroIC002
/ G:(EU-Grant)785907 / G:(EU-Grant)945539},
typ = {PUB:(DE-HGF)16},
pubmed = {33044953},
UT = {WOS:000581784900003},
doi = {10.1371/journal.pcbi.1008127},
url = {https://juser.fz-juelich.de/record/885723},
}