% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@INPROCEEDINGS{Brmmel:1024153,
      author       = {Brömmel, Dirk and Fritz, Jakob and Speck, Robert},
      title        = {{I}ntegrated {C}ontinuous {B}enchmarking},
      reportid     = {FZJ-2024-01995},
      year         = {2024},
      abstract     = {When developing research software, it is often relevant to
                      track its performance over time. It is even vital when
                      targeting high-performance computing (HPC). Changes to the
                      software itself, the used toolchains, or the system setup
                      should not compromise how fast users obtain their results.
                      Ideally, performance or scalability should only ever
                      increase. Hence benchmarking should be an integral part of
                      testing, in particular for HPC codes. At the same time,
                      up-to-date benchmarks that are publicly available can
                      advertise the code and inform users how to set-up the
                      software in the most ideal way or whether they are achieving
                      the expected performance.To limit the burden on developers,
                      the aforementioned steps should be automated within
                      continuous integration (CI) practices, introducing
                      continuous benchmarking (CB) to it. For HPC, an added
                      complexity is the requirement of more than the usual CI
                      backends, with access to longer running steps and more
                      resources than available on a single node. Reusing test
                      cases that are easily run by hand is another simplification
                      for developers that may not be familiar with the research
                      field. We show our solution to CB that we use at the Juelich
                      Supercomputing Centre (JSC), where we combine the already
                      implemented benchmarking via the Juelich Benchmarking
                      Environment (JUBE) with properly authenticated CI steps
                      running on the supercomputing systems at JSC. The combined
                      results, including the evolution over time, are then further
                      processed and displayed on pages published via CI.},
      month         = {Mar},
      date          = {2024-03-05},
      organization  = {4th Conference for Research Software
                       Engineering in Germany, Würzburg
                       (Germany), 5 Mar 2024 - 7 Mar 2024},
      subtyp        = {After Call},
      cin          = {JSC},
      cid          = {I:(DE-Juel1)JSC-20090406},
      pnm          = {5112 - Cross-Domain Algorithms, Tools, Methods Labs (ATMLs)
                      and Research Groups (POF4-511) / RGRSE - RG Research
                      Software Engineering for HPC (RG RSE) (RG-RSE)},
      pid          = {G:(DE-HGF)POF4-5112 / G:(DE-Juel-1)RG-RSE},
      typ          = {PUB:(DE-HGF)6},
      doi          = {10.34734/FZJ-2024-01995},
      url          = {https://juser.fz-juelich.de/record/1024153},
}