% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@ARTICLE{Bouhadjar:908611,
author = {Bouhadjar, Younes and Wouters, Dirk J. and Diesmann, Markus
and Tetzlaff, Tom},
title = {{S}equence learning, prediction, and replay in networks of
spiking neurons},
journal = {PLoS Computational Biology},
volume = {18},
issn = {1553-734X},
publisher = {Public Library of Science},
reportid = {FZJ-2022-02720},
pages = {e1010233},
year = {2022},
abstract = {Sequence learning, prediction and replay have been proposed
to constitute the universal computations performed by the
neocortex. The Hierarchical Temporal Memory (HTM) algorithm
realizes these forms of computation. It learns sequences in
an unsupervised and continuous manner using local learning
rules, permits a context specific prediction of future
sequence elements, and generates mismatch signals in case
the predictions are not met. While the HTM algorithm
accounts for a number of biological features such as
topographic receptive fields, nonlinear dendritic
processing, and sparse connectivity, it is based on abstract
discrete-time neuron and synapse dynamics, as well as on
plasticity mechanisms that can only partly be related to
known biological mechanisms. Here, we devise a
continuous-time implementation of the temporal-memory (TM)
component of the HTM algorithm, which is based on a
recurrent network of spiking neurons with biophysically
interpretable variables and parameters. The model learns
high-order sequences by means of a structural Hebbian
synaptic plasticity mechanism supplemented with a rate-based
homeostatic control. In combination with nonlinear dendritic
input integration and local inhibitory feedback, this type
of plasticity leads to the dynamic self-organization of
narrow sequence-specific subnetworks. These subnetworks
provide the substrate for a faithful propagation of sparse,
synchronous activity, and, thereby, for a robust, context
specific prediction of future sequence elements as well as
for the autonomous replay of previously learned sequences.
By strengthening the link to biology, our implementation
facilitates the evaluation of the TM hypothesis based on
experimentally accessible quantities. The continuous-time
implementation of the TM algorithm permits, in particular,
an investigation of the role of sequence timing for sequence
learning, prediction and replay. We demonstrate this aspect
by studying the effect of the sequence speed on the sequence
learning performance and on the speed of autonomous sequence
replay.},
cin = {INM-6 / IAS-6 / INM-10 / PGI-7 / PGI-10},
ddc = {610},
cid = {I:(DE-Juel1)INM-6-20090406 / I:(DE-Juel1)IAS-6-20130828 /
I:(DE-Juel1)INM-10-20170113 / I:(DE-Juel1)PGI-7-20110106 /
I:(DE-Juel1)PGI-10-20170113},
pnm = {574 - Theory, modelling and simulation (POF3-574) / 5232 -
Computational Principles (POF4-523) / Advanced Computing
Architectures $(aca_20190115)$ / PhD no Grant - Doktorand
ohne besondere Förderung (PHD-NO-GRANT-20170405) / HBP SGA3
- Human Brain Project Specific Grant Agreement 3 (945539) /
Open-Access-Publikationskosten Forschungszentrum Jülich
(OAPKFZJ) (491111487)},
pid = {G:(DE-HGF)POF3-574 / G:(DE-HGF)POF4-5232 /
$G:(DE-Juel1)aca_20190115$ /
G:(DE-Juel1)PHD-NO-GRANT-20170405 / G:(EU-Grant)945539 /
G:(GEPRIS)491111487},
typ = {PUB:(DE-HGF)16},
pubmed = {35727857},
UT = {WOS:000829288500004},
doi = {10.1371/journal.pcbi.1010233},
url = {https://juser.fz-juelich.de/record/908611},
}