% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@ARTICLE{Dasbach:901914,
author = {Dasbach, Stefan and Tetzlaff, Tom and Diesmann, Markus and
Senk, Johanna},
title = {{P}rominent characteristics of recurrent neuronal networks
are robust against low synaptic weight resolution},
reportid = {FZJ-2021-03901, 2105.05002},
year = {2021},
abstract = {The representation of the natural-density, heterogeneous
connectivity of neuronal network models at relevant spatial
scales remains a challenge for Computational Neuroscience
and Neuromorphic Computing. In particular, the memory
demands imposed by the vast number of synapses in
brain-scale network simulations constitutes a major
obstacle. Limiting the number resolution of synaptic weights
appears to be a natural strategy to reduce memory and
compute load. In this study, we investigate the effects of a
limited synaptic-weight resolution on the dynamics of
recurrent spiking neuronal networks resembling local
cortical circuits, and develop strategies for minimizing
deviations from the dynamics of networks with
high-resolution synaptic weights. We mimic the effect of a
limited synaptic weight resolution by replacing normally
distributed synaptic weights by weights drawn from a
discrete distribution, and compare the resulting statistics
characterizing firing rates, spike-train irregularity, and
correlation coefficients with the reference solution. We
show that a naive discretization of synaptic weights
generally leads to a distortion of the spike-train
statistics. Only if the weights are discretized such that
the mean and the variance of the total synaptic input
currents are preserved, the firing statistics remains
unaffected for the types of networks considered in this
study. For networks with sufficiently heterogeneous
in-degrees, the firing statistics can be preserved even if
all synaptic weights are replaced by the mean of the weight
distribution. We conclude that even for simple networks with
non-plastic neurons and synapses, a discretization of
synaptic weights can lead to substantial deviations in the
firing statistics, unless the discretization is performed
with care and guided by a rigorous validation process. For
the network model used in this study, the synaptic weights
can be replaced by low-resolution weights without affecting
its macroscopic dynamical characteristics, thereby saving
substantial amounts of memory.},
cin = {INM-6 / IAS-6 / INM-10},
cid = {I:(DE-Juel1)INM-6-20090406 / I:(DE-Juel1)IAS-6-20130828 /
I:(DE-Juel1)INM-10-20170113},
pnm = {5231 - Neuroscientific Foundations (POF4-523) / HBP SGA2 -
Human Brain Project Specific Grant Agreement 2 (785907) /
HBP SGA3 - Human Brain Project Specific Grant Agreement 3
(945539) / ACA - Advanced Computing Architectures (SO-092) /
Brain-Scale Simulations $(jinb33_20191101)$ / PhD no Grant -
Doktorand ohne besondere Förderung (PHD-NO-GRANT-20170405)},
pid = {G:(DE-HGF)POF4-5231 / G:(EU-Grant)785907 /
G:(EU-Grant)945539 / G:(DE-HGF)SO-092 /
$G:(DE-Juel1)jinb33_20191101$ /
G:(DE-Juel1)PHD-NO-GRANT-20170405},
typ = {PUB:(DE-HGF)25},
url = {https://juser.fz-juelich.de/record/901914},
}