% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@ARTICLE{Stapmanns:893117,
author = {Stapmanns, Jonas and Hahne, Jan and Helias, Moritz and
Bolten, Matthias and Diesmann, Markus and Dahmen, David},
title = {{E}vent-{B}ased {U}pdate of {S}ynapses in {V}oltage-{B}ased
{L}earning {R}ules},
journal = {Frontiers in neuroinformatics},
volume = {15},
issn = {1662-5196},
address = {Lausanne},
publisher = {Frontiers Research Foundation},
reportid = {FZJ-2021-02574},
pages = {609147},
year = {2021},
abstract = {Due to the point-like nature of neuronal spiking, efficient
neural network simulators often employ event-based
simulation schemes for synapses. Yet many types of synaptic
plasticity rely on the membrane potential of the
postsynaptic cell as a third factor in addition to pre- and
postsynaptic spike times. In some learning rules membrane
potentials not only influence synaptic weight changes at the
time points of spike events but in a continuous manner. In
these cases, synapses therefore require information on the
full time course of membrane potentials to update their
strength which a priori suggests a continuous update in a
time-driven manner. The latter hinders scaling of
simulations to realistic cortical network sizes and relevant
time scales for learning. Here, we derive two efficient
algorithms for archiving postsynaptic membrane potentials,
both compatible with modern simulation engines based on
event-based synapse updates. We theoretically contrast the
two algorithms with a time-driven synapse update scheme to
analyze advantages in terms of memory and computations. We
further present a reference implementation in the spiking
neural network simulator NEST for two prototypical
voltage-based plasticity rules: the Clopath rule and the
Urbanczik-Senn rule. For both rules, the two event-based
algorithms significantly outperform the time-driven scheme.
Depending on the amount of data to be stored for plasticity,
which heavily differs between the rules, a strong
performance increase can be achieved by compressing or
sampling of information on membrane potentials. Our results
on computational efficiency related to archiving of
information provide guidelines for the design of learning
rules in order to make them practically usable in
large-scale networks.},
cin = {INM-6 / IAS-6 / INM-10},
ddc = {610},
cid = {I:(DE-Juel1)INM-6-20090406 / I:(DE-Juel1)IAS-6-20130828 /
I:(DE-Juel1)INM-10-20170113},
pnm = {523 - Neuromorphic Computing and Network Dynamics
(POF4-523) / 89574 - Theory, modelling and simulation
(POF2-89574) / Advanced Computing Architectures
$(aca_20190115)$ / HBP SGA3 - Human Brain Project Specific
Grant Agreement 3 (945539) / HBP SGA2 - Human Brain Project
Specific Grant Agreement 2 (785907) / MSNN - Theory of
multi-scale neuronal networks (HGF-SMHB-2014-2018)},
pid = {G:(DE-HGF)POF4-523 / G:(DE-HGF)POF2-89574 /
$G:(DE-Juel1)aca_20190115$ / G:(EU-Grant)945539 /
G:(EU-Grant)785907 / G:(DE-Juel1)HGF-SMHB-2014-2018},
typ = {PUB:(DE-HGF)16},
pubmed = {34177505},
UT = {WOS:000664997900001},
doi = {10.3389/fninf.2021.609147},
url = {https://juser.fz-juelich.de/record/893117},
}