% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@INPROCEEDINGS{Yegenoglu:866218,
      author       = {Yegenoglu, Alper and Diaz, Sandra and Klijn, Wouter and
                      Peyser, Alexander and Subramoney, Anand and Maas, Wolfgang
                      and Visconti, Giuseppe and Herty, Michael},
      title        = {{L}earning to {L}earn on {H}igh {P}erformance {C}omputing},
      reportid     = {FZJ-2019-05385},
      year         = {2019},
      abstract     = {The simulation of biological neural networks (BNN) is
                      essential to neuroscience. The complexity of the brain's
                      structure and activity combined with the practical limits of
                      in-vivo measurements have led to the development of
                      computational models which allow us to decompose, analyze
                      and understand its elements and their
                      interactions.Impressive progress has recently been made in
                      non-spiking but brain-like learning capabilities in ANNs [1,
                      3]. A substantial part of this progress arises from
                      computing-intense learning-to-learn (L2L) [2, 4, 5] or
                      meta-learning methods. L2L is a specific algorithm for
                      acquiring constraints to improve learning performance. L2L
                      can be decomposed into an optimizee program (such as a
                      Kalman filter) which learns specific tasks and an optimizer
                      algorithm which searches for generalized hyperparameters for
                      the optimizee. The optimizer learns to improve the
                      optimizee’s performance over distinct tasks as measured by
                      a fitness function (Fig 1).We have developed an
                      implementation of L2L on High Performance Computing (HPC)
                      [6] for hyperparameter optimization of spiking BNNs as well
                      as hyperparameter search for general neuroscientific
                      analytics. This tool takes advantage of large-scale
                      parallelization by deploying an ensemble of optimizees to
                      understand and analyze mathematical models of BNNs. Improved
                      performance for structural plasticity has been found in NEST
                      simulations comparing several techniques including gradient
                      descent, cross entropy, and evolutionary strategies.},
      month         = {Oct},
      date          = {2019-10-19},
      organization  = {Society for Neuroscience Meeting 2019,
                       Chicago (USA), 19 Oct 2019 - 23 Oct
                       2019},
      subtyp        = {After Call},
      cin          = {JSC},
      cid          = {I:(DE-Juel1)JSC-20090406},
      pnm          = {511 - Computational Science and Mathematical Methods
                      (POF3-511) / HBP SGA2 - Human Brain Project Specific Grant
                      Agreement 2 (785907) / SMHB - Supercomputing and Modelling
                      for the Human Brain (HGF-SMHB-2013-2017) / CSD-SSD - Center
                      for Simulation and Data Science (CSD) - School for
                      Simulation and Data Science (SSD) (CSD-SSD-20190612) / SLNS
                      - SimLab Neuroscience (Helmholtz-SLNS) / PhD no Grant -
                      Doktorand ohne besondere Förderung (PHD-NO-GRANT-20170405)},
      pid          = {G:(DE-HGF)POF3-511 / G:(EU-Grant)785907 /
                      G:(DE-Juel1)HGF-SMHB-2013-2017 /
                      G:(DE-Juel1)CSD-SSD-20190612 / G:(DE-Juel1)Helmholtz-SLNS /
                      G:(DE-Juel1)PHD-NO-GRANT-20170405},
      typ          = {PUB:(DE-HGF)24},
      url          = {https://juser.fz-juelich.de/record/866218},
}