% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@INPROCEEDINGS{Helias:890928,
      author       = {Helias, Moritz and van Meegen, Alexander and Dahmen, David
                      and Keup, Christian and Nestler, Sandra},
      title        = {{F}luctuations, correlations, chaos: dynamics and
                      computation in recurrent networks},
      reportid     = {FZJ-2021-01253},
      year         = {2021},
      abstract     = {The remarkable properties of information-processing by
                      biological and artificial neuronal networks arise from the
                      interaction of large numbers of neurons. A central quest is
                      thus to characterize their collective states. The directed
                      coupling between pairs of neurons and their continuous
                      dissipation of energy, moreover, cause dynamics of neuronal
                      networks outside thermodynamic equilibrium. Tools from
                      non-equilibrium statistical mechanics and field theory are
                      thus useful to obtain a quantitative understanding. We here
                      present recent progress using such approaches [1].We show
                      how activity in large, random networks can be described by a
                      unified approach of path-integrals and large deviation
                      theory that allows the inference of parameters from data and
                      the prediction of future activity [2]. This approach also
                      allows one to quantify fluctuations around the mean-field
                      theory. These are important to understand why correlations
                      observed between pairs of neurons indicate dynamics of
                      cortical networks that are poised near a critical point [3].
                      Close to this transition, we find chaotic dynamics and
                      prolonged sequential memory for past signals [4]. In the
                      chaotic regime, networks offer representations of
                      information whose dimensionality expands with time. We show
                      how this mechanism aids classification performance [5].
                      Performance in such settings of reservoir computing,
                      moreover, sensitively depends on the way information is fed
                      into the network. Formally unrolling recurrence with the
                      help of Green‘s functions yields a controlled practical
                      method to optimize reservoir computing [6].Together these
                      works illustrate the fruitful interplay between theoretical
                      physics, neuronal networks, and neural information
                      processing.References: 1. Helias, Dahmen (2020) Statistical
                      field theory for neural networks. Springer lecture notes in
                      physics.2. Meegen, Kuehn, Helias (2020) Large Deviation
                      Approach to Random Recurrent Neuronal Networks: Rate
                      Function, Parameter Inference, and Activity Prediction
                      arXiv:2009.088893. Dahmen, Grün, Diesmann, Helias (2019).
                      Second type of criticality in the brain uncovers rich
                      multiple-neuron dynamics. PNAS 116 (26) 13051-130604.
                      Schuecker J, Goedeke S, Helias M (2018). Optimal sequence
                      memory in driven random networks. Phys Rev X 8, 0410295.
                      Keup, Kuehn, Dahmen, Helias (2020) Transient chaotic
                      dimensionality expansion by recurrent networks.
                      arXiv:2002.110066. Nestler, Keup, Dahmen, Gilson, Rauhut,
                      Helias (2020) Unfolding recurrence by Green's functions for
                      optimized reservoir computing. In Advances in Neural
                      Information Processing Systems 33 (NeurIPS 2020)},
      organization  = {MILA Seminar, online (Canada)},
      subtyp        = {Invited},
      cin          = {INM-6 / INM-10 / IAS-6},
      cid          = {I:(DE-Juel1)INM-6-20090406 / I:(DE-Juel1)INM-10-20170113 /
                      I:(DE-Juel1)IAS-6-20130828},
      pnm          = {5231 - Neuroscientific Foundations (POF4-523) / 5232 -
                      Computational Principles (POF4-523) / 5234 - Emerging NC
                      Architectures (POF4-523) / MSNN - Theory of multi-scale
                      neuronal networks (HGF-SMHB-2014-2018) / HBP SGA2 - Human
                      Brain Project Specific Grant Agreement 2 (785907) / HBP SGA3
                      - Human Brain Project Specific Grant Agreement 3 (945539) /
                      neuroIC002 - Recurrence and stochasticity for neuro-inspired
                      computation (EXS-SF-neuroIC002) / RenormalizedFlows -
                      Transparent Deep Learning with Renormalized Flows
                      (BMBF-01IS19077A) / Advanced Computing Architectures
                      $(aca_20190115)$ / SDS005 - Towards an integrated data
                      science of complex natural systems (PF-JARA-SDS005)},
      pid          = {G:(DE-HGF)POF4-5231 / G:(DE-HGF)POF4-5232 /
                      G:(DE-HGF)POF4-5234 / G:(DE-Juel1)HGF-SMHB-2014-2018 /
                      G:(EU-Grant)785907 / G:(EU-Grant)945539 /
                      G:(DE-82)EXS-SF-neuroIC002 / G:(DE-Juel-1)BMBF-01IS19077A /
                      $G:(DE-Juel1)aca_20190115$ / G:(DE-Juel-1)PF-JARA-SDS005},
      typ          = {PUB:(DE-HGF)31},
      url          = {https://juser.fz-juelich.de/record/890928},
}