% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@ARTICLE{Fischer:912163,
      author       = {Fischer, Kirsten and René, Alexandre and Keup, Christian
                      and Layer, Moritz and Dahmen, David and Helias, Moritz},
      title        = {{D}ecomposing neural networks as mappings of correlation
                      functions},
      journal      = {Physical review research},
      volume       = {4},
      number       = {4},
      issn         = {2643-1564},
      address      = {College Park, MD},
      publisher    = {APS},
      reportid     = {FZJ-2022-05381},
      pages        = {043143},
      year         = {2022},
      abstract     = {Understanding the functional principles of information
                      processing in deep neural networks continues to be a
                      challenge, in particular for networks with trained and thus
                      nonrandom weights. To address this issue, we study the
                      mapping between probability distributions implemented by a
                      deep feed-forward network. We characterize this mapping as
                      an iterated transformation of distributions, where the
                      nonlinearity in each layer transfers information between
                      different orders of correlation functions. This allows us to
                      identify essential statistics in the data, as well as
                      different information representations that can be used by
                      neural networks. Applied to an XOR task and to MNIST, we
                      show that correlations up to second order predominantly
                      capture the information processing in the internal layers,
                      while the input layer also extracts higher-order
                      correlations from the data. This analysis provides a
                      quantitative and explainable perspective on classification.},
      cin          = {INM-6 / IAS-6 / INM-10},
      ddc          = {530},
      cid          = {I:(DE-Juel1)INM-6-20090406 / I:(DE-Juel1)IAS-6-20130828 /
                      I:(DE-Juel1)INM-10-20170113},
      pnm          = {5232 - Computational Principles (POF4-523) / 5234 -
                      Emerging NC Architectures (POF4-523) / RenormalizedFlows -
                      Transparent Deep Learning with Renormalized Flows
                      (BMBF-01IS19077A) / MSNN - Theory of multi-scale neuronal
                      networks (HGF-SMHB-2014-2018) / ACA - Advanced Computing
                      Architectures (SO-092) / neuroIC002 - Recurrence and
                      stochasticity for neuro-inspired computation
                      (EXS-SF-neuroIC002)},
      pid          = {G:(DE-HGF)POF4-5232 / G:(DE-HGF)POF4-5234 /
                      G:(DE-Juel-1)BMBF-01IS19077A /
                      G:(DE-Juel1)HGF-SMHB-2014-2018 / G:(DE-HGF)SO-092 /
                      G:(DE-82)EXS-SF-neuroIC002},
      typ          = {PUB:(DE-HGF)16},
      UT           = {WOS:000933947400011},
      doi          = {10.1103/PhysRevResearch.4.043143},
      url          = {https://juser.fz-juelich.de/record/912163},
}