% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@ARTICLE{Dasbach:905087,
author = {Dasbach, Stefan and Tetzlaff, Tom and Diesmann, Markus and
Senk, Johanna},
title = {{D}ynamical {C}haracteristics of {R}ecurrent {N}euronal
{N}etworks {A}re {R}obust {A}gainst {L}ow {S}ynaptic
{W}eight {R}esolution},
journal = {Frontiers in neuroscience},
volume = {15},
issn = {1662-453X},
address = {Lausanne},
publisher = {Frontiers Research Foundation},
reportid = {FZJ-2022-00386},
pages = {757790},
year = {2021},
abstract = {The representation of the natural-density, heterogeneous
connectivity of neuronalnetwork models at relevant spatial
scales remains a challenge for ComputationalNeuroscience and
Neuromorphic Computing. In particular, the memory
demandsimposed by the vast number of synapses in brain-scale
network simulations constitutea major obstacle. Limiting the
number resolution of synaptic weights appears to bea natural
strategy to reduce memory and compute load. In this study,
we investigatethe effects of a limited synaptic-weight
resolution on the dynamics of recurrent spikingneuronal
networks resembling local cortical circuits and develop
strategies for minimizingdeviations from the dynamics of
networks with high-resolution synaptic weights. Wemimic the
effect of a limited synaptic weight resolution by replacing
normally distributedsynaptic weights with weights drawn from
a discrete distribution, and compare theresulting statistics
characterizing firing rates, spike-train irregularity, and
correlationcoefficients with the reference solution. We show
that a naive discretization of synapticweights generally
leads to a distortion of the spike-train statistics. If the
weights arediscretized such that the mean and the variance
of the total synaptic input currents arepreserved, the
firing statistics remain unaffected for the types of
networks considered inthis study. For networks with
sufficiently heterogeneous in-degrees, the firing
statisticscan be preserved even if all synaptic weights are
replaced by the mean of the weightdistribution. We conclude
that even for simple networks with non-plastic neurons
andsynapses, a discretization of synaptic weights can lead
to substantial deviations in thefiring statistics unless the
discretization is performed with care and guided by a
rigorousvalidation process. For the network model used in
this study, the synaptic weightscan be replaced by
low-resolution weights without affecting its macroscopic
dynamicalcharacteristics, thereby saving substantial amounts
of memory.},
cin = {INM-6 / IAS-6 / INM-10},
ddc = {610},
cid = {I:(DE-Juel1)INM-6-20090406 / I:(DE-Juel1)IAS-6-20130828 /
I:(DE-Juel1)INM-10-20170113},
pnm = {5231 - Neuroscientific Foundations (POF4-523) / HBP SGA2 -
Human Brain Project Specific Grant Agreement 2 (785907) /
HBP SGA3 - Human Brain Project Specific Grant Agreement 3
(945539) / ACA - Advanced Computing Architectures (SO-092) /
Brain-Scale Simulations $(jinb33_20191101)$},
pid = {G:(DE-HGF)POF4-5231 / G:(EU-Grant)785907 /
G:(EU-Grant)945539 / G:(DE-HGF)SO-092 /
$G:(DE-Juel1)jinb33_20191101$},
typ = {PUB:(DE-HGF)16},
pubmed = {35002599},
UT = {WOS:000743979800001},
doi = {10.3389/fnins.2021.757790},
url = {https://juser.fz-juelich.de/record/905087},
}