% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@ARTICLE{Nestler:1010690,
      author       = {Nestler, Sandra and Helias, Moritz and Gilson, Matthieu},
      title        = {{S}tatistical temporal pattern extraction by neuronal
                      architecture},
      journal      = {Physical review research},
      volume       = {5},
      number       = {3},
      issn         = {2643-1564},
      address      = {College Park, MD},
      publisher    = {APS},
      reportid     = {FZJ-2023-03196},
      pages        = {033177},
      year         = {2023},
      abstract     = {Neuronal systems need to process temporal signals. Here, we
                      show how higher-order temporal (co)fluctuationscan be
                      employed to represent and process information. Concretely,
                      we demonstrate that a simple biologicallyinspired
                      feedforward neuronal model can extract information from up
                      to the third-order cumulant to performtime series
                      classification. This model relies on a weighted linear
                      summation of synaptic inputs followed bya nonlinear gain
                      function. Training both the synaptic weights and the
                      nonlinear gain function exposes how thenonlinearity allows
                      for the transfer of higher-order correlations to the mean,
                      which in turn enables the synergisticuse of information
                      encoded in multiple cumulants to maximize the classification
                      accuracy. The approach isdemonstrated both on synthetic and
                      real-world datasets of multivariate time series. Moreover,
                      we show thatthe biologically inspired architecture makes
                      better use of the number of trainable parameters than a
                      classicalmachine-learning scheme. Our findings emphasize the
                      benefit of biological neuronal architectures, paired
                      withdedicated learning algorithms, for the processing of
                      information embedded in higher-order statistical cumulantsof
                      temporal (co)fluctuations.},
      cin          = {INM-6 / IAS-6 / INM-10},
      ddc          = {530},
      cid          = {I:(DE-Juel1)INM-6-20090406 / I:(DE-Juel1)IAS-6-20130828 /
                      I:(DE-Juel1)INM-10-20170113},
      pnm          = {5231 - Neuroscientific Foundations (POF4-523) / 5232 -
                      Computational Principles (POF4-523) / 5234 - Emerging NC
                      Architectures (POF4-523) / HBP SGA3 - Human Brain Project
                      Specific Grant Agreement 3 (945539) / ACA - Advanced
                      Computing Architectures (SO-092) / RenormalizedFlows -
                      Transparent Deep Learning with Renormalized Flows
                      (BMBF-01IS19077A) / SDS005 - Towards an integrated data
                      science of complex natural systems (PF-JARA-SDS005) / DFG
                      project 491111487 - Open-Access-Publikationskosten / 2022 -
                      2024 / Forschungszentrum Jülich (OAPKFZJ) (491111487)},
      pid          = {G:(DE-HGF)POF4-5231 / G:(DE-HGF)POF4-5232 /
                      G:(DE-HGF)POF4-5234 / G:(EU-Grant)945539 / G:(DE-HGF)SO-092
                      / G:(DE-Juel-1)BMBF-01IS19077A / G:(DE-Juel-1)PF-JARA-SDS005
                      / G:(GEPRIS)491111487},
      typ          = {PUB:(DE-HGF)16},
      UT           = {WOS:001074650500002},
      doi          = {10.1103/PhysRevResearch.5.033177},
      url          = {https://juser.fz-juelich.de/record/1010690},
}