% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@INPROCEEDINGS{vanderVlag:903876,
      author       = {van der Vlag, Michiel and Diaz, Sandra and Woodman,
                      Marmaduje and Fousek, Jan and Peyser, Alexander and Jirsa,
                      Viktor},
      title        = {{R}ate{ML}, a spin-off of the {N}euro{ML} and {LEMS}
                      {D}omain {S}pecific {L}anguages, tailored to generate
                      rate-based-models suited for simulators such as the
                      {V}irtual {B}rain ({TVB}) featuring high performance
                      computing and parameter sweep capabilities.},
      reportid     = {FZJ-2021-05509},
      year         = {2020},
      abstract     = {With this poster we present RateML, a spin-off of the
                      NeuroML and LEMS Domain Specific Languages, tailored to
                      generate rate-based-models suited for simulators such as the
                      Virtual Brain (TVB) featuring high performance computing and
                      parameter sweep capabilities. RateML has been developed to
                      abstract modelling from the implementation or deployment on
                      hardware of the to-be-simulated TVB brain model. Using
                      RateML, code can be produced targeting different target
                      languages and exploiting the computational capabilities of
                      specific computing paradigms and hardware.RateML is based on
                      the existing domain specific language 'LEMS'. Low Entropy
                      Model Specification (LEMS) is an XML based language for
                      specifying generic models of hybrid dynamical systems which
                      extends a sibling language, ‘NeuroML’ (NeuroML), by
                      providing representations for variation in cell dynamics in
                      time; in other words, equations for cell dynamics. It
                      enables users to generate rate-based brain models from an
                      XML file, in which the generic features of TVB models can be
                      addressed without needing extended knowledge regarding the
                      optimal programming or simulation of such models. In figure
                      1 an example of a Kuramoto model in XML is shown.A TVB
                      simulation often entails the exploration of many parameters
                      to fit the simulated dynamics to empirical data e.g. EEG/MRI
                      data. These big data exploration simulations are best aided
                      by a high-performance compute solution. As well as regular
                      (Python) TVB model generation, CUDA code can be generated in
                      which certain variables can be designated with a specific
                      range for parameter exploration. Such extensive parameter
                      exploration can then be executed with a high degree of
                      parallelization on a GPU. For example, for a brain model
                      with 68 nodes, in a single kernel invocation on a V100 GPU,
                      it is possible to simulate roughly 30,000 parallel instances
                      using the Kuramoto model exploring the combinations of 173
                      coupling and 173 speed parameters in seconds. For the
                      Epileptor, a model which is very memory demanding due to the
                      fact that it has 6 state variables, roughly 5,000 parameter
                      combinations can be explored in a single kernel. The models
                      used in these experiments are generated by RateML.Thus,
                      RateML can produce a) Python code compatible with the TVB
                      framework, b) CUDA code which can be run directly on GPUs to
                      perform high performance parameter fitting, and c) in the
                      future code for Bayesian inversion.},
      month         = {Sep},
      date          = {2020-09-29},
      organization  = {Bernstein Conference, Online
                       (Germany), 29 Sep 2020 - 1 Oct 2020},
      subtyp        = {Other},
      keywords     = {Computational Neuroscience (Other) / Neurons, networks,
                      dynamical systems (Other)},
      cin          = {JSC},
      cid          = {I:(DE-Juel1)JSC-20090406},
      pnm          = {5111 - Domain-Specific Simulation $\&$ Data Life Cycle Labs
                      (SDLs) and Research Groups (POF4-511) / SLNS - SimLab
                      Neuroscience (Helmholtz-SLNS)},
      pid          = {G:(DE-HGF)POF4-5111 / G:(DE-Juel1)Helmholtz-SLNS},
      typ          = {PUB:(DE-HGF)24},
      doi          = {10.12751/NNCN.BC2020.0272},
      url          = {https://juser.fz-juelich.de/record/903876},
}