% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@ARTICLE{Gasper:171937,
      author       = {Gasper, F. and Goergen, K. and Kollet, S. and Shrestha, P.
                      and Sulis, M. and Rihani, J. and Geimer, M.},
      title        = {{I}mplementation and scaling of the fully coupled
                      {T}errestrial {S}ystems {M}odeling {P}latform
                      ({T}err{S}ys{MP}) in a massively parallel supercomputing
                      environment – a case study on {JUQUEEN} ({IBM} {B}lue
                      {G}ene/{Q})},
      journal      = {Geoscientific model development discussions},
      volume       = {7},
      number       = {3},
      issn         = {1991-962X},
      address      = {Katlenburg-Lindau},
      publisher    = {Copernicus},
      reportid     = {FZJ-2014-05491},
      pages        = {3545 - 3573},
      year         = {2014},
      abstract     = {Continental-scale hyper-resolution simulations constitute a
                      grand challenge in characterizing non-linear feedbacks of
                      states and fluxes of the coupled water, energy, and
                      biogeochemical cycles of terrestrial systems. Tackling this
                      challenge requires advanced coupling and supercomputing
                      technologies for earth system models that are discussed in
                      this study, utilizing the example of the implementation of
                      the newly developed Terrestrial Systems Modeling Platform
                      (TerrSysMP) on JUQUEEN (IBM Blue Gene/Q) of the Jülich
                      Supercomputing Centre, Germany. The applied coupling
                      strategies rely on the Multiple Program Multiple Data (MPMD)
                      paradigm and require memory and load balancing
                      considerations in the exchange of the coupling fields
                      between different component models and allocation of
                      computational resources, respectively. These considerations
                      can be reached with advanced profiling and tracing tools
                      leading to the efficient use of massively parallel computing
                      environments, which is then mainly determined by the
                      parallel performance of individual component models.
                      However, the problem of model I/O and initialization in the
                      peta-scale range requires major attention, because this
                      constitutes a true big data challenge in the perspective of
                      future exa-scale capabilities, which is unsolved.},
      cin          = {IBG-3 / JSC},
      ddc          = {910},
      cid          = {I:(DE-Juel1)IBG-3-20101118 / I:(DE-Juel1)JSC-20090406},
      pnm          = {246 - Modelling and Monitoring Terrestrial Systems: Methods
                      and Technologies (POF2-246) / 255 - Terrestrial Systems:
                      From Observation to Prediction (POF3-255) / 411 -
                      Computational Science and Mathematical Methods (POF2-411) /
                      ATMLPP - ATML Parallel Performance (ATMLPP)},
      pid          = {G:(DE-HGF)POF2-246 / G:(DE-HGF)POF3-255 /
                      G:(DE-HGF)POF2-411 / G:(DE-Juel-1)ATMLPP},
      typ          = {PUB:(DE-HGF)16},
      doi          = {10.5194/gmdd-7-3545-2014},
      url          = {https://juser.fz-juelich.de/record/171937},
}