% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@ARTICLE{Bouhadjar:902308,
author = {Bouhadjar, Younes and Wouters, Dirk J. and Diesmann, Markus
and Tetzlaff, Tom},
title = {{S}equence learning, prediction, and replay in networks of
spiking neurons},
publisher = {arXiv},
reportid = {FZJ-2021-04170},
year = {2021},
abstract = {Sequence learning, prediction and replay have been proposed
to constitute the universal computations performed by the
neocortex. The Hierarchical Temporal Memory (HTM) algorithm
realizes these forms of computation. It learns sequences in
an unsupervised and continuous manner using local learning
rules, permits a context specific prediction of future
sequence elements, and generates mismatch signals in case
the predictions are not met. While the HTM algorithm
accounts for a number of biological features such as
topographic receptive fields, nonlinear dendritic
processing, and sparse connectivity, it is based on abstract
discrete-time neuron and synapse dynamics, as well as on
plasticity mechanisms that can only partly be related to
known biological mechanisms.Here, we devise a
continuous-time implementation of the temporal-memory (TM)
component of the HTM algorithm, which is based on a
recurrent network of spiking neurons with biophysically
interpretable variables and parameters. The model learns
high-order sequences by means of a structural Hebbian
synaptic plasticity mechanism supplemented with a rate-based
homeostatic control. In combination with nonlinear dendritic
input integration and local inhibitory feedback, this type
of plasticity leads to the dynamic self-organization of
narrow sequence-specific feedforward subnetworks. These
subnetworks provide the substrate for a faithful propagation
of sparse, synchronous activity, and, thereby, for a robust,
context specific prediction of future sequence elements as
well as for the autonomous replay of previously learned
sequences.By strengthening the link to biology, our
implementation facilitates the evaluation of the TM
hypothesis based on experimentally accessible quantities.
The continuous-time implementation of the TM algorithm
permits, in particular, an investigation of the role of
sequence timing for sequence learning, prediction and
replay. We demonstrate this aspect by studying the effect of
the sequence speed on the sequence learning performance and
on the speed of autonomous sequence replay.},
cin = {INM-6 / IAS-6 / INM-10 / PGI-7 / PGI-10},
cid = {I:(DE-Juel1)INM-6-20090406 / I:(DE-Juel1)IAS-6-20130828 /
I:(DE-Juel1)INM-10-20170113 / I:(DE-Juel1)PGI-7-20110106 /
I:(DE-Juel1)PGI-10-20170113},
pnm = {574 - Theory, modelling and simulation (POF3-574) / 5232 -
Computational Principles (POF4-523) / Advanced Computing
Architectures $(aca_20190115)$ / HBP SGA3 - Human Brain
Project Specific Grant Agreement 3 (945539) / HBP SGA2 -
Human Brain Project Specific Grant Agreement 2 (785907)},
pid = {G:(DE-HGF)POF3-574 / G:(DE-HGF)POF4-5232 /
$G:(DE-Juel1)aca_20190115$ / G:(EU-Grant)945539 /
G:(EU-Grant)785907},
typ = {PUB:(DE-HGF)25},
url = {https://juser.fz-juelich.de/record/902308},
}