% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@ARTICLE{Nestler:885634,
      author       = {Nestler, Sandra and Keup, Christian and Dahmen, David and
                      Gilson, Matthieu and Rauhut, Holger and Helias, Moritz},
      title        = {{U}nfolding recurrence by {G}reen's functions for optimized
                      reservoir computing},
      reportid     = {FZJ-2020-03975},
      year         = {2020},
      abstract     = {Cortical networks are strongly recurrent, and neurons have
                      intrinsic temporal dynamics. This sets them apart from deep
                      feed-forward networks. Despite the tremendous progress in
                      the application of feed-forward networks and their
                      theoretical understanding, it remains unclear how the
                      interplay of recurrence and non-linearities in recurrent
                      cortical networks contributes to their function. The purpose
                      of this work is to present a solvable recurrent network
                      model that links to feed forward networks. By perturbative
                      methods we transform the time-continuous, recurrent dynamics
                      into an effective feed-forward structure of linear and
                      non-linear temporal kernels. The resulting analytical
                      expressions allow us to build optimal time-series
                      classifiers from random reservoir networks. Firstly, this
                      allows us to optimize not only the readout vectors, but also
                      the input projection, demonstrating a strong potential
                      performance gain. Secondly, the analysis exposes how the
                      second order stimulus statistics is a crucial element that
                      interacts with the non-linearity of the dynamics and boosts
                      performance.},
      cin          = {INM-6 / IAS-6 / INM-10},
      cid          = {I:(DE-Juel1)INM-6-20090406 / I:(DE-Juel1)IAS-6-20130828 /
                      I:(DE-Juel1)INM-10-20170113},
      pnm          = {574 - Theory, modelling and simulation (POF3-574) / HBP
                      SGA3 - Human Brain Project Specific Grant Agreement 3
                      (945539) / neuroIC002 - Recurrence and stochasticity for
                      neuro-inspired computation (EXS-SF-neuroIC002) / Advanced
                      Computing Architectures $(aca_20190115)$ / RenormalizedFlows
                      - Transparent Deep Learning with Renormalized Flows
                      (BMBF-01IS19077A) / SDS005 - Towards an integrated data
                      science of complex natural systems (PF-JARA-SDS005) / PhD no
                      Grant - Doktorand ohne besondere Förderung
                      (PHD-NO-GRANT-20170405)},
      pid          = {G:(DE-HGF)POF3-574 / G:(EU-Grant)945539 /
                      G:(DE-82)EXS-SF-neuroIC002 / $G:(DE-Juel1)aca_20190115$ /
                      G:(DE-Juel-1)BMBF-01IS19077A / G:(DE-Juel-1)PF-JARA-SDS005 /
                      G:(DE-Juel1)PHD-NO-GRANT-20170405},
      typ          = {PUB:(DE-HGF)25},
      eprint       = {2010.06247},
      howpublished = {arXiv:2010.06247},
      archivePrefix = {arXiv},
      SLACcitation = {$\%\%CITATION$ = $arXiv:2010.06247;\%\%$},
      url          = {https://juser.fz-juelich.de/record/885634},
}