% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@INPROCEEDINGS{Nestler:1006605,
      author       = {Nestler, Sandra and Keup, Christian and Dahmen, David and
                      Gilson, Matthieu and Rauhut, Holger and Helias, Moritz and
                      Bartolomaeus, Wiebke and Boutaib, Youness and Bouss, Peter
                      and Merger, Claudia Lioba and Fischer, Kirsten and Rene,
                      Alexandre and Schirrmeister, Robin and Ball, Tonio},
      title        = {{N}eural networks learning structure in temporal signals},
      reportid     = {FZJ-2023-01742},
      year         = {2023},
      abstract     = {Cortical networks are strongly recurrent, and neurons have
                      intrinsic temporal dynamics. This sets them apart from deep
                      feed-forward networks. Despite the tremendous progress in
                      the application of feed-forward networks and their
                      theoretical understanding, it remains unclear how the
                      interplay of recurrence and non-linearities in recurrent
                      cortical networks contributes to their function. The purpose
                      of this work is to present a solvable recurrent network
                      model that links to feed forward networks. By perturbative
                      methods we transform the time-continuous,recurrent dynamics
                      into an effective feed-forward structure of linear and
                      non-linear temporal kernels. The resulting analytical
                      expressions allow us to build optimal time-series
                      classifiers from random reservoir networks. Firstly, this
                      allows us to optimize not only the readout vectors, but also
                      the input projection, demonstrating a strong potential
                      performance gain. Secondly, the analysis exposes how the
                      second order stimulus statistics is a crucial element that
                      interacts with the non-linearity of the dynamics and boosts
                      performance.},
      organization  = {Seminar Talk, Barak Lab, Haifa
                       (Israel)},
      subtyp        = {Invited},
      cin          = {INM-6 / IAS-6 / INM-10},
      cid          = {I:(DE-Juel1)INM-6-20090406 / I:(DE-Juel1)IAS-6-20130828 /
                      I:(DE-Juel1)INM-10-20170113},
      pnm          = {5231 - Neuroscientific Foundations (POF4-523) / 5232 -
                      Computational Principles (POF4-523) / 5234 - Emerging NC
                      Architectures (POF4-523) / HBP SGA3 - Human Brain Project
                      Specific Grant Agreement 3 (945539) / ACA - Advanced
                      Computing Architectures (SO-092) / RenormalizedFlows -
                      Transparent Deep Learning with Renormalized Flows
                      (BMBF-01IS19077A) / SDS005 - Towards an integrated data
                      science of complex natural systems (PF-JARA-SDS005) /
                      neuroIC002 - Recurrence and stochasticity for neuro-inspired
                      computation (EXS-SF-neuroIC002)},
      pid          = {G:(DE-HGF)POF4-5231 / G:(DE-HGF)POF4-5232 /
                      G:(DE-HGF)POF4-5234 / G:(EU-Grant)945539 / G:(DE-HGF)SO-092
                      / G:(DE-Juel-1)BMBF-01IS19077A / G:(DE-Juel-1)PF-JARA-SDS005
                      / G:(DE-82)EXS-SF-neuroIC002},
      typ          = {PUB:(DE-HGF)31},
      url          = {https://juser.fz-juelich.de/record/1006605},
}