001021120 001__ 1021120
001021120 005__ 20250401102818.0
001021120 0247_ $$2doi$$a10.5194/gmd-17-261-2024
001021120 0247_ $$2ISSN$$a1991-959X
001021120 0247_ $$2ISSN$$a1991-9603
001021120 0247_ $$2datacite_doi$$a10.34734/FZJ-2024-00574
001021120 0247_ $$2WOS$$aWOS:001166577100001
001021120 037__ $$aFZJ-2024-00574
001021120 082__ $$a550
001021120 1001_ $$0P:(DE-Juel1)187016$$aBishnoi, Abhiraj$$b0$$eCorresponding author
001021120 245__ $$aEarth system modeling on modular supercomputing architecture: coupled atmosphere–ocean simulations with ICON 2.6.6-rc
001021120 260__ $$aKatlenburg-Lindau$$bCopernicus$$c2024
001021120 3367_ $$2DRIVER$$aarticle
001021120 3367_ $$2DataCite$$aOutput Types/Journal article
001021120 3367_ $$0PUB:(DE-HGF)16$$2PUB:(DE-HGF)$$aJournal Article$$bjournal$$mjournal$$s1707807350_20606
001021120 3367_ $$2BibTeX$$aARTICLE
001021120 3367_ $$2ORCID$$aJOURNAL_ARTICLE
001021120 3367_ $$00$$2EndNote$$aJournal Article
001021120 520__ $$aThe confrontation of complex Earth system model (ESM) codes with novel supercomputing architectures poses challenges to efficient modeling and job submission strategies. The modular setup of these models naturally fits a modular supercomputing architecture (MSA), which tightly integrates heterogeneous hardware resources into a larger and more flexible high-performance computing (HPC) system. While parts of the ESM codes can easily take advantage of the increased parallelism and communication capabilities of modern GPUs, others lag behind due to the long development cycles or are better suited to run on classical CPUs due to their communication and memory usage patterns. To better cope with these imbalances between the development of the model components, we performed benchmark campaigns on the Jülich Wizard for European Leadership Science (JUWELS) modular HPC system. We enabled the weather and climate model Icosahedral Nonhydrostatic (ICON) to run in a coupled atmosphere–ocean setup, where the ocean and the model I/O is running on the CPU Cluster, while the atmosphere is simulated simultaneously on the GPUs of JUWELS Booster (ICON-MSA). Both atmosphere and ocean are running globally with a resolution of 5 km. In our test case, an optimal configuration in terms of model performance (core hours per simulation day) was found for the combination of 84 GPU nodes on the JUWELS Booster module to simulate the atmosphere and 80 CPU nodes on the JUWELS Cluster module, of which 63 nodes were used for the ocean simulation and the remaining 17 nodes were reserved for I/O. With this configuration the waiting times of the coupler were minimized. Compared to a simulation performed on CPUs only, the MSA approach reduces energy consumption by 45 % with comparable runtimes. ICON-MSA is able to scale up to a significant portion of the JUWELS system, making best use of the available computing resources. A maximum throughput of 170 simulation days per day (SDPD) was achieved when running ICON on 335 JUWELS Booster nodes and 268 Cluster nodes.
001021120 536__ $$0G:(DE-HGF)POF4-5111$$a5111 - Domain-Specific Simulation & Data Life Cycle Labs (SDLs) and Research Groups (POF4-511)$$cPOF4-511$$fPOF IV$$x0
001021120 536__ $$0G:(DE-HGF)POF4-5122$$a5122 - Future Computing & Big Data Systems (POF4-512)$$cPOF4-512$$fPOF IV$$x1
001021120 536__ $$0G:(DE-Juel-1)aidas_20200731$$aAIDAS - Joint Virtual Laboratory for AI, Data Analytics and Scalable Simulation (aidas_20200731)$$caidas_20200731$$x2
001021120 588__ $$aDataset connected to CrossRef, Journals: juser.fz-juelich.de
001021120 7001_ $$0P:(DE-Juel1)3709$$aStein, Olaf$$b1$$eCorresponding author
001021120 7001_ $$0P:(DE-Juel1)156465$$aMeyer, Catrin I.$$b2
001021120 7001_ $$0P:(DE-HGF)0$$aRedler, René$$b3
001021120 7001_ $$0P:(DE-Juel1)132090$$aEicker, Norbert$$b4$$ufzj
001021120 7001_ $$0P:(DE-HGF)0$$aHaak, Helmuth$$b5
001021120 7001_ $$0P:(DE-Juel1)129125$$aHoffmann, Lars$$b6
001021120 7001_ $$0P:(DE-HGF)0$$aKlocke, Daniel$$b7
001021120 7001_ $$0P:(DE-HGF)0$$aKornblueh, Luis$$b8
001021120 7001_ $$0P:(DE-Juel1)142361$$aSuarez, Estela$$b9$$ufzj
001021120 773__ $$0PERI:(DE-600)2456725-5$$a10.5194/gmd-17-261-2024$$gVol. 17, no. 1, p. 261 - 273$$n1$$p261 - 273$$tGeoscientific model development$$v17$$x1991-959X$$y2024
001021120 8564_ $$uhttps://juser.fz-juelich.de/record/1021120/files/Invoice_Helmholtz-PUC-2024-8.pdf
001021120 8564_ $$uhttps://juser.fz-juelich.de/record/1021120/files/FZJ-2024-00574.pdf$$yOpenAccess
001021120 8564_ $$uhttps://juser.fz-juelich.de/record/1021120/files/Invoice_Helmholtz-PUC-2024-8.gif?subformat=icon$$xicon
001021120 8564_ $$uhttps://juser.fz-juelich.de/record/1021120/files/Invoice_Helmholtz-PUC-2024-8.jpg?subformat=icon-1440$$xicon-1440
001021120 8564_ $$uhttps://juser.fz-juelich.de/record/1021120/files/Invoice_Helmholtz-PUC-2024-8.jpg?subformat=icon-180$$xicon-180
001021120 8564_ $$uhttps://juser.fz-juelich.de/record/1021120/files/Invoice_Helmholtz-PUC-2024-8.jpg?subformat=icon-640$$xicon-640
001021120 8564_ $$uhttps://juser.fz-juelich.de/record/1021120/files/FZJ-2024-00574.gif?subformat=icon$$xicon$$yOpenAccess
001021120 8564_ $$uhttps://juser.fz-juelich.de/record/1021120/files/FZJ-2024-00574.jpg?subformat=icon-1440$$xicon-1440$$yOpenAccess
001021120 8564_ $$uhttps://juser.fz-juelich.de/record/1021120/files/FZJ-2024-00574.jpg?subformat=icon-180$$xicon-180$$yOpenAccess
001021120 8564_ $$uhttps://juser.fz-juelich.de/record/1021120/files/FZJ-2024-00574.jpg?subformat=icon-640$$xicon-640$$yOpenAccess
001021120 8767_ $$8Helmholtz-PUC-2024-8$$92024-01-15$$a1200200190$$d2024-01-15$$eAPC$$jZahlung erfolgt
001021120 909CO $$ooai:juser.fz-juelich.de:1021120$$pdnbdelivery$$popenCost$$pVDB$$pdriver$$pOpenAPC$$popen_access$$popenaire
001021120 9101_ $$0I:(DE-588b)5008462-8$$6P:(DE-Juel1)3709$$aForschungszentrum Jülich$$b1$$kFZJ
001021120 9101_ $$0I:(DE-588b)5008462-8$$6P:(DE-Juel1)156465$$aForschungszentrum Jülich$$b2$$kFZJ
001021120 9101_ $$0I:(DE-HGF)0$$6P:(DE-HGF)0$$aExternal Institute$$b3$$kExtern
001021120 9101_ $$0I:(DE-588b)5008462-8$$6P:(DE-Juel1)132090$$aForschungszentrum Jülich$$b4$$kFZJ
001021120 9101_ $$0I:(DE-HGF)0$$6P:(DE-HGF)0$$aExternal Institute$$b5$$kExtern
001021120 9101_ $$0I:(DE-588b)5008462-8$$6P:(DE-Juel1)129125$$aForschungszentrum Jülich$$b6$$kFZJ
001021120 9101_ $$0I:(DE-HGF)0$$6P:(DE-HGF)0$$aExternal Institute$$b7$$kExtern
001021120 9101_ $$0I:(DE-HGF)0$$6P:(DE-HGF)0$$aExternal Institute$$b8$$kExtern
001021120 9101_ $$0I:(DE-588b)5008462-8$$6P:(DE-Juel1)142361$$aForschungszentrum Jülich$$b9$$kFZJ
001021120 9131_ $$0G:(DE-HGF)POF4-511$$1G:(DE-HGF)POF4-510$$2G:(DE-HGF)POF4-500$$3G:(DE-HGF)POF4$$4G:(DE-HGF)POF$$9G:(DE-HGF)POF4-5111$$aDE-HGF$$bKey Technologies$$lEngineering Digital Futures – Supercomputing, Data Management and Information Security for Knowledge and Action$$vEnabling Computational- & Data-Intensive Science and Engineering$$x0
001021120 9131_ $$0G:(DE-HGF)POF4-512$$1G:(DE-HGF)POF4-510$$2G:(DE-HGF)POF4-500$$3G:(DE-HGF)POF4$$4G:(DE-HGF)POF$$9G:(DE-HGF)POF4-5122$$aDE-HGF$$bKey Technologies$$lEngineering Digital Futures – Supercomputing, Data Management and Information Security for Knowledge and Action$$vSupercomputing & Big Data Infrastructures$$x1
001021120 9141_ $$y2024
001021120 915pc $$0PC:(DE-HGF)0000$$2APC$$aAPC keys set
001021120 915pc $$0PC:(DE-HGF)0001$$2APC$$aLocal Funding
001021120 915pc $$0PC:(DE-HGF)0002$$2APC$$aDFG OA Publikationskosten
001021120 915pc $$0PC:(DE-HGF)0003$$2APC$$aDOAJ Journal
001021120 915__ $$0StatID:(DE-HGF)0160$$2StatID$$aDBCoverage$$bEssential Science Indicators$$d2023-10-25
001021120 915__ $$0LIC:(DE-HGF)CCBY4$$2HGFVOC$$aCreative Commons Attribution CC BY 4.0
001021120 915__ $$0StatID:(DE-HGF)0501$$2StatID$$aDBCoverage$$bDOAJ Seal$$d2022-12-20T09:29:04Z
001021120 915__ $$0StatID:(DE-HGF)0500$$2StatID$$aDBCoverage$$bDOAJ$$d2022-12-20T09:29:04Z
001021120 915__ $$0StatID:(DE-HGF)0113$$2StatID$$aWoS$$bScience Citation Index Expanded$$d2023-10-25
001021120 915__ $$0StatID:(DE-HGF)0700$$2StatID$$aFees$$d2023-10-25
001021120 915__ $$0StatID:(DE-HGF)0510$$2StatID$$aOpenAccess
001021120 915__ $$0StatID:(DE-HGF)0561$$2StatID$$aArticle Processing Charges$$d2023-10-25
001021120 915__ $$0StatID:(DE-HGF)0030$$2StatID$$aPeer Review$$bDOAJ : Open peer review$$d2022-12-20T09:29:04Z
001021120 915__ $$0StatID:(DE-HGF)0200$$2StatID$$aDBCoverage$$bSCOPUS$$d2024-12-21
001021120 915__ $$0StatID:(DE-HGF)0300$$2StatID$$aDBCoverage$$bMedline$$d2024-12-21
001021120 915__ $$0StatID:(DE-HGF)0600$$2StatID$$aDBCoverage$$bEbsco Academic Search$$d2024-12-21
001021120 915__ $$0StatID:(DE-HGF)0030$$2StatID$$aPeer Review$$bASC$$d2024-12-21
001021120 915__ $$0StatID:(DE-HGF)0199$$2StatID$$aDBCoverage$$bClarivate Analytics Master Journal List$$d2024-12-21
001021120 915__ $$0StatID:(DE-HGF)1150$$2StatID$$aDBCoverage$$bCurrent Contents - Physical, Chemical and Earth Sciences$$d2024-12-21
001021120 915__ $$0StatID:(DE-HGF)0150$$2StatID$$aDBCoverage$$bWeb of Science Core Collection$$d2024-12-21
001021120 920__ $$lyes
001021120 9201_ $$0I:(DE-Juel1)JSC-20090406$$kJSC$$lJülich Supercomputing Center$$x0
001021120 980__ $$ajournal
001021120 980__ $$aVDB
001021120 980__ $$aUNRESTRICTED
001021120 980__ $$aI:(DE-Juel1)JSC-20090406
001021120 980__ $$aAPC
001021120 9801_ $$aAPC
001021120 9801_ $$aFullTexts