% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@INPROCEEDINGS{Lhrs:878080,
author = {Lührs, Sebastian},
title = {{A}utomated benchmarking with {JUBE}},
reportid = {FZJ-2020-02622},
year = {2020},
abstract = {Benchmarking is a common task to evaluate the hard- and
software environment of an HPC system during its procurement
phase. As HPC systems also evolve over time by updating
libraries, software packages or by installing new hardware
components, all those system changes can influence the
performance of user applications as well. This leads to the
need for an automatic benchmarking environment to allow a
continuous performance evaluation.The JUBE benchmarking
environment provides a flexible, lightweight, script based
framework to setup benchmark tasks on top of generic
benchmark applications or by using full user applications.
The environment allows controlling major aspects of the
benchmark execution such as the parameter variation
handling, the workflow execution, data handling,
asynchronous execution to support HPC job submission, and
the benchmark result extraction.The talk will present the
capabilities of the current generation of the JUBE
environment. It will present the general basics how to port
and configure a benchmark application to make it available
within JUBE and will discuss possible use cases such as
fully automated scheduled benchmark setups.},
month = {Jun},
date = {2020-06-18},
organization = {HPC Knowledge Meeting '20, Barcelona
(Spain), 18 Jun 2020 - 19 Jun 2020},
subtyp = {Invited},
cin = {JSC},
cid = {I:(DE-Juel1)JSC-20090406},
pnm = {511 - Computational Science and Mathematical Methods
(POF3-511)},
pid = {G:(DE-HGF)POF3-511},
typ = {PUB:(DE-HGF)6},
url = {https://juser.fz-juelich.de/record/878080},
}