% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@INPROCEEDINGS{Diesmann:1029459,
author = {Diesmann, Markus},
title = {{L}arge-scale network models as digital twins advance
theory and neuromorphic computing},
reportid = {FZJ-2024-05134},
year = {2023},
abstract = {Large-scale network models as digital twins advance theory
and neuromorphic computingMarkus DiesmannComputational
neuroscience is entering a new era. This originates from the
convergence of two developments: First, knowledge has been
accumulated enabling the construction of anatomically
detailed models of one or multiple brain areas. The models
have cellular and synaptic resolution, represent the
respective part of the brain with its natural number of
neurons and synapses, and are multi-scale. Next to spiking
activity, also mesoscopic signals like the local field
potential (LFP) and fMRI signals can be generated (e.g.
[1]). Second, with the completion of the European Human
Brain Project (HBP), simulation has firmly established
itself in neuroscience as a third pillar alongside
experiment and theory. A conceptual separation has been
achieved between concrete network models and generic
simulation engines [2,3]. Many different models can be
simulated with the same engine, such that these simulation
codes can continuously be optimized and operated as an
infrastructure [4]. Network models with millions of neurons
can routinely be investigated. Neuroscientists can now work
with digital twins of certain brain structures to test their
ideas on brain functions and probe the validity of
approximations required for analytical approaches.However,
the efficient use of this new capability also requires a
change in mindset. Computational neuroscience seems stuck at
a certain level of model complexity for the last decade not
only because anatomical data were missing or because of a
lack of simulation technology. The fascination of the field
with minimal models leads to explanations for individual
mechanisms, but the reduction to the bare equations required
provides researchers with few contact points to build on
these works and construct larger systems with a wider
explanatory scope. In addition, constructing large-scale
models goes beyond the period of an individual PhD project,
but an exclusive focus on hypothesis-driven research may
prevent such sustained constructive work. Possibly,
researchers may also just be missing the digital workflows
to reuse large-scale models and extend them reproducibly.
The change of perspective required is to view digital twins
as research platforms and scientific software as
infrastructure with all consequences for the requirements on
quality, long-term availability, and support.As a concrete
example, the presentation discusses how the universality of
mammalian cortex has acted as a motivation to construct
large-scale models and demonstrates how digital workflows
have helped to reproduce results and increase the confidence
in such models. A digital twin promotes neuroscientific
investigations, but can also serve as a benchmark for
technology. The talk shows how a model of the cortical
microcircuit has become a de facto standard for neuromorphic
computing [5].[1] Senk J., Hagen E, van Albada SJ, Diesmann
M (2018) Reconciliation of weak pairwise spike-train
correlations and highly coherent local field potentials
across space. arXiv:1805.10235 [q-bio.NC][2] Einevoll GT,
Destexhe A, Diesmann M, Grün S, Jirsa V, de Kamps M,
Migliore M, Ness TV, Plesser HE, Schürmann F (2019) The
Scientific Case for Brain Simulations. Neuron 102:735-744[3]
Senk J., Kriener B., Djurfeldt M., Voges N., Jiang HJ.,
Schüttler L., Gramelsberger G., Diesmann M., Plesser HE.,
van Albada SJ. (2022) Connectivity concepts in neuronal
network modeling. PLOS Comput Biol 18(9):e1010086[4] Aimone
JB, Awile O, Diesmann M, Knight JC, Nowotny T, Schürmann F
(2023) Editorial: Neuroscience, Computing, Performance, and
Benchmarks: Why It Matters to Neuroscience How Fast We Can
Compute. Front Neuroinform 17. DOI:
10.3389/fninf.2023.1157418[5] Kurth AC., Senk J., Terhorst
D., Finnerty J., Diesmann M. (2022) Sub-realtime simulation
of a neuronal network of natural density. Neuromorphic
Computing and Engineering 2:021001keywords: simulation as
third pillar, software as infrastructure, universality of
cortex, cellular-resolution cortical microcircuit,
multi-area model, neuromorphic computing},
month = {Sep},
date = {2023-09-12},
organization = {Workshop "Stochastic Models of the
Brain", Torino (Italy), 12 Sep 2023 -
13 Sep 2023},
subtyp = {Invited},
cin = {IAS-6 / INM-6 / INM-10},
cid = {I:(DE-Juel1)IAS-6-20130828 / I:(DE-Juel1)INM-6-20090406 /
I:(DE-Juel1)INM-10-20170113},
pnm = {5232 - Computational Principles (POF4-523) / HBP SGA3 -
Human Brain Project Specific Grant Agreement 3 (945539) /
ACA - Advanced Computing Architectures (SO-092) / BMBF
03ZU1106CB - NeuroSys: Algorithm-Hardware Co-Design (Projekt
C) - B (BMBF-03ZU1106CB) / EBRAINS 2.0 - EBRAINS 2.0: A
Research Infrastructure to Advance Neuroscience and Brain
Health (101147319) / HBP - The Human Brain Project (604102)},
pid = {G:(DE-HGF)POF4-5232 / G:(EU-Grant)945539 / G:(DE-HGF)SO-092
/ G:(DE-Juel1)BMBF-03ZU1106CB / G:(EU-Grant)101147319 /
G:(EU-Grant)604102},
typ = {PUB:(DE-HGF)31},
url = {https://juser.fz-juelich.de/record/1029459},
}