% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@INPROCEEDINGS{Jordan:255885,
      author       = {Jordan, Jakob and Petrovici, Mihai and Pfeil, Thomas and
                      Breitwieser, Oliver and Bytschok, Ilja and Bill, Johannes
                      and Gruebl, Andreas and Schemmel, Johannes and Meier,
                      Karlheinz and Diesmann, Markus and Tetzlaff, Tom},
      title        = {{D}eterministic neural networks as sources of uncorrelated
                      noise for probabilistic computations},
      reportid     = {FZJ-2015-05991},
      year         = {2015},
      abstract     = {Neural-network models of brain function often rely on the
                      presence ofnoise [1-5]. To date, the interplay of
                      microscopic noise sourcesand network function is only poorly
                      understood. In computersimulations and in neuromorphic
                      hardware [6-8], the number of noisesources (random-number
                      generators) is limited. In consequence, neuronsin large
                      functional network models have to share noise sources and
                      aretherefore correlated. In general, it is unclear how
                      shared-noisecorrelations affect the performance of
                      functional networkmodels. Further, there is so far no
                      solution to the problem of how alimited number of noise
                      sources can supply a large number offunctional units with
                      uncorrelated noise.Here, we investigate the performance of
                      neural Boltzmann machines[2-4]. We show that correlations in
                      the background activity aredetrimental to the sampling
                      performance and that the deviations fromthe target
                      distribution scale inversely with the number of
                      noisesources. Further, we show that this problem can be
                      overcome byreplacing the finite ensemble of independent
                      noise sources by arecurrent neural network with the same
                      number of units. As shownrecently, inhibitory feedback,
                      abundant in biological neural networks,serves as a powerful
                      decorrelation mechanism [9,10]: Shared-noisecorrelations are
                      actively suppressed by the network dynamics. Byexploiting
                      this effect, the network performance is
                      significantlyimproved. Hence, recurrent neural networks can
                      serve as naturalfinite-size noise sources for functional
                      neural networks, both inbiological and in synthetic
                      neuromorphic substrates. Finally weinvestigate the impact of
                      sampling network parameters on its abilityto faithfully
                      represent a given well-defined distribution. We showthat
                      sampling networks with sufficiently strong negative feedback
                      canintrinsically suppress correlations in the background
                      activity, andthereby improve their performance
                      substantially.Acknowledgments: Partially supported by the
                      Helmholtz Associationportfolio theme SMHB, the Jülich
                      Aachen Research Alliance (JARA), EUGrant 269921
                      (BrainScaleS), The Austrian Science Fund FWF
                      #I753-N23(PNEUMA), The Manfred Stärk Foundation, and EU
                      Grant 604102 (HumanBrain Project, HBP).[1] Rolls ET, Deco G:
                      The noisy brain. Oxford University Press, 2010[2] Hinton GE,
                      Sejnowski TJ, Ackley DH: Boltzmann machines:
                      constraintsatisfaction networks that learn. Technical
                      report, Carnegie-MellonUniversity, 1984[3] Buesing L, Bill
                      J, Nessler B, Maass W: Neural Dynamics asSampling: A Model
                      for Stochastic Computation in Recurrent Networks ofSpiking
                      Neurons. PloS CB, 2011, 7, e1002211.[4] Petrovici MA, Bill
                      J, Bytschok I, Schemmel J, Meier K: Stochasticinference with
                      deterministic spiking neurons. arXiv, 2013,
                      1311.3211v1[q-bio.NC][5] Probst D, Petrovici MA, Bytschok I,
                      Bill J, Pecevski D, SchemmelJ, Meier K: Probabilistic
                      inference in discrete spaces can beimplemented into networks
                      of LIF neurons. Front. Comput. Neurosci.,2015, 9:13.[6]
                      Schemmel J, Bruederle D, Gruebl A, Hock M, Meier K, Millner
                      S: AWafer-Scale Neuromorphic Hardware System for Large-Scale
                      NeuralModeling. Proceedings of the 2010 International
                      Symposium on Circuitsand Systems (ISCAS), IEEE Press, 2010,
                      1947-1950[7] Bruederle D, Petrovici M, Vogginger B, Ehrlich
                      M, Pfeil T, MillnerS, Gruebl A, Wendt K, Mueller E, Schwartz
                      MO et al.: A comprehensiveworkflow for general-purpose
                      neural modeling with highly configurableneuromorphic
                      hardware systems. Biological Cybernetics, 2011,
                      104,263-296[8] Petrovici MA, Vogginger B, Mueller P,
                      Breitwieser O, Lundqvist M,Muller L, Ehrlich M, Destexhe A,
                      Lansner A, Schueffny R et al.:Characterization and
                      Compensation of Network-Level Anomalies inMixed-Signal
                      Neuromorphic Modeling Platforms. PLoS ONE, 2014,
                      9(10):e108590.[9] Renart A, De La Rocha J, Bartho P,
                      Hollender L, Parga N, Reyes A,Harris KD: The asynchronous
                      State in Cortical Circuits. Science, 2010,327: 587-590[10]
                      Tetzlaff T, Helias M, Einevoll G, Diesmann M: Decorrelation
                      ofneural-network activity by inhibitory feedback. PloS CB,
                      2012, 8,e1002596},
      month         = {Jul},
      date          = {2015-07-18},
      organization  = {CNS, Prague (Chech Republic), 18 Jul
                       2015 - 23 Jul 2015},
      cin          = {INM-6 / IAS-6},
      cid          = {I:(DE-Juel1)INM-6-20090406 / I:(DE-Juel1)IAS-6-20130828},
      pnm          = {574 - Theory, modelling and simulation (POF3-574) / 899 -
                      ohne Topic (POF2-899) / SMHB - Supercomputing and Modelling
                      for the Human Brain (HGF-SMHB-2013-2017) / HBP - The Human
                      Brain Project (604102) / BRAINSCALES - Brain-inspired
                      multiscale computation in neuromorphic hybrid systems
                      (269921)},
      pid          = {G:(DE-HGF)POF3-574 / G:(DE-HGF)POF2-899 /
                      G:(DE-Juel1)HGF-SMHB-2013-2017 / G:(EU-Grant)604102 /
                      G:(EU-Grant)269921},
      typ          = {PUB:(DE-HGF)1},
      url          = {https://juser.fz-juelich.de/record/255885},
}