% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@INPROCEEDINGS{Nestler:889332,
      author       = {Nestler, Sandra and Keup, Christian and Dahmen, David and
                      Gilson, Matthieu and Rauhut, Holger and Helias, Moritz},
      title        = {{U}nfolding recurrence by {G}reen’s functions for
                      optimized reservoir computing},
      reportid     = {FZJ-2021-00223},
      pages        = {1},
      year         = {2020},
      abstract     = {Cortical networks are strongly recurrent, and neurons have
                      intrinsic temporaldynamics. This sets them apart from deep
                      feed-forward networks. Despite thetremendous progress in the
                      application of feed-forward networks and their the-oretical
                      understanding, it remains unclear how the interplay of
                      recurrence andnon-linearities in recurrent cortical networks
                      contributes to their function. Thepurpose of this work is to
                      present a solvable recurrent network model that links tofeed
                      forward networks. By perturbative methods we transform the
                      time-continuous,recurrent dynamics into an effective
                      feed-forward structure of linear and non-lineartemporal
                      kernels. The resulting analytical expressions allow us to
                      build optimaltime-series classifiers from random reservoir
                      networks. Firstly, this allows us tooptimize not only the
                      readout vectors, but also the input projection,
                      demonstratinga strong potential performance gain. Secondly,
                      the analysis exposes how the secondorder stimulus statistics
                      is a crucial element that interacts with the non-linearity
                      ofthe dynamics and boosts performance.},
      month         = {Dec},
      date          = {2020-12-06},
      organization  = {34th Conference on Neural Information
                       Processing Systems, online (online), 6
                       Dec 2020 - 12 Dec 2020},
      cin          = {INM-6 / IAS-6 / INM-10},
      cid          = {I:(DE-Juel1)INM-6-20090406 / I:(DE-Juel1)IAS-6-20130828 /
                      I:(DE-Juel1)INM-10-20170113},
      pnm          = {574 - Theory, modelling and simulation (POF3-574) / HBP
                      SGA3 - Human Brain Project Specific Grant Agreement 3
                      (945539) / neuroIC002 - Recurrence and stochasticity for
                      neuro-inspired computation (EXS-SF-neuroIC002) / Advanced
                      Computing Architectures $(aca_20190115)$ / RenormalizedFlows
                      - Transparent Deep Learning with Renormalized Flows
                      (BMBF-01IS19077A) / SDS005 - Towards an integrated data
                      science of complex natural systems (PF-JARA-SDS005)},
      pid          = {G:(DE-HGF)POF3-574 / G:(EU-Grant)945539 /
                      G:(DE-82)EXS-SF-neuroIC002 / $G:(DE-Juel1)aca_20190115$ /
                      G:(DE-Juel-1)BMBF-01IS19077A / G:(DE-Juel-1)PF-JARA-SDS005},
      typ          = {PUB:(DE-HGF)8},
      url          = {https://juser.fz-juelich.de/record/889332},
}