% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@ARTICLE{Knight:1047373,
author = {Knight, James C. and Senk, Johanna and Nowotny, Thomas},
title = {{A} flexible framework for structural plasticity in
{GPU}-accelerated sparse spiking neural networks},
journal = {arXiv},
publisher = {arXiv},
reportid = {FZJ-2025-04262},
year = {2025},
abstract = {The majority of research in both training Artificial Neural
Networks (ANNs) and modeling learning in biological brains
focuses on synaptic plasticity, where learning equates to
changing the strength of existing connections. However, in
biological brains, structural plasticity - where new
connections are created and others removed - is also vital,
not only for effective learning but also for recovery from
damage and optimal resource usage. Inspired by structural
plasticity, pruning is often used in machine learning to
remove weak connections from trained models to reduce the
computational requirements of inference. However, the
machine learning frameworks typically used for
backpropagation-based training of both ANNs and Spiking
Neural Networks (SNNs) are optimized for dense connectivity,
meaning that pruning does not help reduce the training costs
of ever-larger models. The GeNN simulator already supports
efficient GPU-accelerated simulation of sparse SNNs for
computational neuroscience and machine learning. Here, we
present a new flexible framework for implementing
GPU-accelerated structural plasticity rules and demonstrate
this first using the e-prop supervised learning rule and
DEEP R to train efficient, sparse SNN classifiers and then,
in an unsupervised learning context, to learn topographic
maps. Compared to baseline dense models, our sparse
classifiers reduce training time by up to 10x while the DEEP
R rewiring enables them to perform as well as the original
models. We demonstrate topographic map formation in
faster-than-realtime simulations, provide insights into the
connectivity evolution, and measure simulation speed versus
network size. The proposed framework will enable further
research into achieving and maintaining sparsity in network
structure and neural communication, as well as exploring the
computational benefits of sparsity in a range of
neuromorphic applications.},
keywords = {Neural and Evolutionary Computing (cs.NE) (Other) / Neurons
and Cognition (q-bio.NC) (Other) / FOS: Computer and
information sciences (Other) / FOS: Biological sciences
(Other)},
cin = {IAS-6},
cid = {I:(DE-Juel1)IAS-6-20130828},
pnm = {5232 - Computational Principles (POF4-523) / 5234 -
Emerging NC Architectures (POF4-523) / HBP SGA3 - Human
Brain Project Specific Grant Agreement 3 (945539) / EBRAINS
2.0 - EBRAINS 2.0: A Research Infrastructure to Advance
Neuroscience and Brain Health (101147319)},
pid = {G:(DE-HGF)POF4-5232 / G:(DE-HGF)POF4-5234 /
G:(EU-Grant)945539 / G:(EU-Grant)101147319},
typ = {PUB:(DE-HGF)25},
doi = {10.48550/arXiv.2510.19764},
url = {https://juser.fz-juelich.de/record/1047373},
}