% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@ARTICLE{Herten:1047382,
      author       = {Herten, Andreas and Pearce, Olga and Guimaraes, Filipe},
      title        = {{A}n {HPC} benchmark survey and taxonomy for
                      characterization},
      journal      = {The international journal of high performance computing
                      applications},
      volume       = {n/a},
      number       = {n/a},
      issn         = {1094-3420},
      address      = {Thousand Oaks, Calif.},
      publisher    = {Sage Science Press},
      reportid     = {FZJ-2025-04270},
      pages        = {10943420251351424},
      year         = {2025},
      note         = {Full survey table available as supplemental material at
                      journal, equivalent to arXiv preprint:
                      https://arxiv.org/abs/2509.08347},
      abstract     = {The field of High-Performance Computing (HPC) is defined by
                      providing computing devices with highest performance for a
                      variety of demanding scientific users. The tight co-design
                      relationship between HPC providers and users propels the
                      field forward, paired with technological improvements,
                      achieving continuously higher performance and resource
                      utilization. A key device for system architects,
                      architecture researchers, and scientific users are
                      benchmarks, allowing for well-defined assessment of
                      hardware, software, and algorithms. Many benchmarks exist in
                      the community, from individual niche benchmarks testing
                      specific features, to large-scale benchmark suites for whole
                      procurements. We survey the available HPC benchmarks,
                      summarizing them in table form with key details and concise
                      categorization, also through an interactive website. For
                      categorization, we present a benchmark taxonomy for
                      well-defined characterization of benchmarks.The interactive
                      table of the survey is available at
                      https://fzj-jsc.github.io/benchmark-survey/},
      cin          = {JSC},
      ddc          = {004},
      cid          = {I:(DE-Juel1)JSC-20090406},
      pnm          = {5122 - Future Computing $\&$ Big Data Systems (POF4-512) /
                      5112 - Cross-Domain Algorithms, Tools, Methods Labs (ATMLs)
                      and Research Groups (POF4-511) / ATML-X-DEV - ATML
                      Accelerating Devices (ATML-X-DEV) / ATMLAO - ATML
                      Application Optimization and User Service Tools (ATMLAO)},
      pid          = {G:(DE-HGF)POF4-5122 / G:(DE-HGF)POF4-5112 /
                      G:(DE-Juel-1)ATML-X-DEV / G:(DE-Juel-1)ATMLAO},
      typ          = {PUB:(DE-HGF)16},
      doi          = {10.1177/10943420251351424},
      url          = {https://juser.fz-juelich.de/record/1047382},
}