% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@ARTICLE{Bode:1021623,
author = {Bode, Mathis and Göbbert, Jens Henrik},
title = {{A}cceleration of complex high-performance computing
ensemble simulations with super-resolution-based subfilter
models},
journal = {Computers $\&$ fluids},
volume = {271},
issn = {0045-7930},
address = {Amsterdam [u.a.]},
publisher = {Elsevier Science},
reportid = {FZJ-2024-00886},
pages = {106150},
year = {2024},
abstract = {Direct numerical simulation (DNS) of fluid flow problems
has been one of the most important applications of
high-performance computing (HPC) in the last decades. For
example, turbulent flows require the simultaneous resolution
of multiple spatial and temporal scales as all scales are
coupled, resulting in very large simulations with enormous
degrees of freedom. Another example is reactive flows, which
typically result in a large system of coupled differential
equations and multiple transport equations that must be
solved simultaneously. In addition, many flows exhibit
chaotic behavior, meaning that only statistical ensembles of
results can be compared, further increasing the
computational time. In this work, a combined HPC/deep
learning (DL) workflow is presented that drastically reduces
the overall computational time required while still
providing acceptable accuracy.Traditionally, all the
simulations required to compute ensemble statistics are
performed using expensive DNS. The idea behind the combined
HPC/DL workflow is to reduce the number of expensive DNSs by
developing a DL-assisted large-eddy simulation (LES)
approach that uses a sophisticated DL network, called
PIESRGAN, as a subfilter model for all unclosed terms and is
accurate enough to substitute DNSs. The remaining DNSs are
thus used in two ways: first, as data contributing to the
ensemble statistics, and second, as data used to train the
DL network. It was found that in many cases two remaining
DNSs are sufficient for training the LES approach. The cost
of the DL-supported LES is usually more than one order of
magnitude cheaper than the DNS, which drastically speeds up
the workflow, even considering the overhead for training the
DL network.},
cin = {JSC},
ddc = {004},
cid = {I:(DE-Juel1)JSC-20090406},
pnm = {5112 - Cross-Domain Algorithms, Tools, Methods Labs (ATMLs)
and Research Groups (POF4-511) / CoEC - Center of Excellence
in Combustion (952181)},
pid = {G:(DE-HGF)POF4-5112 / G:(EU-Grant)952181},
typ = {PUB:(DE-HGF)16},
UT = {WOS:001164689600001},
doi = {10.1016/j.compfluid.2023.106150},
url = {https://juser.fz-juelich.de/record/1021623},
}