% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@INPROCEEDINGS{Lober:1047535,
author = {Lober, Melissa and Bouhadjar, Younes and Neftci, Emre and
Diesmann, Markus and Tetzlaff, Tom},
title = {{U}nsupervised online learning of complex sequences in
spiking neuronal networks},
school = {RWTH Aachen},
reportid = {FZJ-2025-04365},
year = {2025},
abstract = {Learning and processing sequential data constitutes a
universal form of computation performed by the brain.
Understanding the underlying principles does not only shed
light on brain function, but also guides the development of
energy efficient neuromorphic computing architectures. In a
previous study, we devised a spiking recurrent neural
network, the spiking temporal memory (spiking TM) model,
implementing this type of computation. It learns sequences
in a continual, unsupervised manner by means of a local
Hebbian synaptic plasticity mechanism. Context specific
predictions of upcoming sequence elements are represented by
dendritic action potentials. Upon successful learning, the
network activity is characterized by a highly sparse and
hence energy efficient code. To date, the sequence learning
capabilities of the spiking TM model have only been
demonstrated for relatively small sequence sets. Here, we
systematically investigate the sequence learning capacity of
the model by gradually increasing the sequence length and
optimizing the plasticity (hyper-) parameters. We show that
the spiking TM model at the scale of a few thousand neurons
can successfully learn random sequences composed of several
tens of elements,with the maximum sequence length exceeding
the vocabulary size. After optimizing the plasticity
parameters for a given sequence length, the model exhibits
high prediction performance for a range of sequence lengths,
without additional fine tuning.The learning duration (time
to solution) scales supralinearly with the sequence length.
Learning longer sequences is hence computationally
demanding, and requires accelerated computing
architectures.},
month = {Jul},
date = {2025-07-29},
organization = {International Conference on
Neuromorphic Systems, Seattle (USA), 29
Jul 2025 - 31 Jul 2025},
subtyp = {After Call},
cin = {IAS-6 / PGI-15 / INM-10},
cid = {I:(DE-Juel1)IAS-6-20130828 / I:(DE-Juel1)PGI-15-20210701 /
I:(DE-Juel1)INM-10-20170113},
pnm = {5231 - Neuroscientific Foundations (POF4-523) / 5232 -
Computational Principles (POF4-523) / JL SMHB - Joint Lab
Supercomputing and Modeling for the Human Brain (JL
SMHB-2021-2027) / BMFTR 03ZU2106CB - NeuroSys:
Algorithm-Hardware Co-Design (Projekt C) - B
(BMBF-03ZU2106CB) / BMBF 16ME0398K - Verbundprojekt:
Neuro-inspirierte Technologien der künstlichen Intelligenz
für die Elektronik der Zukunft - NEUROTEC II -
(BMBF-16ME0398K)},
pid = {G:(DE-HGF)POF4-5231 / G:(DE-HGF)POF4-5232 / G:(DE-Juel1)JL
SMHB-2021-2027 / G:(DE-Juel1)BMBF-03ZU2106CB /
G:(DE-82)BMBF-16ME0398K},
typ = {PUB:(DE-HGF)24},
doi = {10.34734/FZJ-2025-04365},
url = {https://juser.fz-juelich.de/record/1047535},
}