% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@INPROCEEDINGS{Diesmann:1044917,
author = {Diesmann, Markus},
title = {{N}ew biophysical mechanisms in {NEST}},
reportid = {FZJ-2025-03439},
year = {2024},
abstract = {While the focus of this workshop is on the GPU kernel of
NEST in preparation of exascale computers like JUPITER, the
extension of the CPU kernel of NEST by further biophysical
mechanisms is continuing. In this way, researchers
immediately profit from advanced features and the
functionality of the CPU code serves as a reference for the
GPU implementation. The talk discusses two mechanisms
recently added to the CPU code. The first is a framework for
neuron-astrocyte interactions. There is presently no
consensus on the role of astrocytes in the dynamics and
plasticity of neuronal networks. In addition, the equations
are complex because the relevance of individual details is
unknown. Nevertheless, it is evident now that astrocytes
play a role in plasticity, cognition, and behavior.
Therefore, the attention of neuroscience to astrocytes is
growing and research in the area is expanding. Computational
work has so far been restricted to small networks due to the
long observation times required and the lack of suitable
simulation code. As an example, we discuss a network where
astrocytes deliver slow calcium-governed currents (SIC) to
postsynaptic neurons. The network exhibits oscillations
controlled by neuron-astrocyte interaction. Astrocytes
require a generalization of the connectivity concepts of
NEST from pairwise rules to tripartite motifs. The framework
provides an implementation compatible with the usual hybrid
parallelization of the CPU code.The second is a framework
for backpropagation-like learning for spiking neuronal
networks based on only local information. The theory for
this, called eligibility propagation (e-prop), was published
by Bellec et al. (2020) together with a time-driven
algorithm for TensorFlow. The talk explains the
reformulation of the algorithm for the event-driven update
of synapses in the NEST code and demonstrates the
reproduction of original test cases. This naturally leads to
further modifications like the asynchronous update of
synaptic weights that maintain learning performance while
enhancing biological plausibility. In this way constraints
of the original theory are relaxed, and the learning scheme
is available for large-scale spiking network models.},
month = {Oct},
date = {2025-10-23},
organization = {NEST GPU Workshop, Cagliari (Italy),
23 Oct 2025 - 25 Oct 2025},
subtyp = {Invited},
cin = {IAS-6 / INM-10},
cid = {I:(DE-Juel1)IAS-6-20130828 / I:(DE-Juel1)INM-10-20170113},
pnm = {5234 - Emerging NC Architectures (POF4-523) / EBRAINS 2.0 -
EBRAINS 2.0: A Research Infrastructure to Advance
Neuroscience and Brain Health (101147319)},
pid = {G:(DE-HGF)POF4-5234 / G:(EU-Grant)101147319},
typ = {PUB:(DE-HGF)31},
url = {https://juser.fz-juelich.de/record/1044917},
}