diff --git a/.github/workflows/draft-pdf.yml b/.github/workflows/draft-pdf.yml
new file mode 100644
index 00000000000..df0b3e28e51
--- /dev/null
+++ b/.github/workflows/draft-pdf.yml
@@ -0,0 +1,23 @@
+on: [push]
+
+jobs:
+ paper:
+ runs-on: ubuntu-latest
+ name: Paper Draft
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build draft PDF
+ uses: openjournals/openjournals-draft-action@master
+ with:
+ journal: joss
+ # This should be the path to the paper within your repo.
+ paper-path: src/docs/JOSS/paper.md
+ - name: Upload
+ uses: actions/upload-artifact@v4
+ with:
+ name: paper
+ # This is the output path where Pandoc will write the compiled
+ # PDF. Note, this should be the same directory as the input
+ # paper.md
+ path: src/docs/JOSS/paper.pdf
\ No newline at end of file
diff --git a/CITATION.cff b/CITATION.cff
index 3f4d284506a..71d0b13d7d5 100644
--- a/CITATION.cff
+++ b/CITATION.cff
@@ -6,6 +6,7 @@ authors:
orcid: "https://orcid.org/0000-0002-2536-7867"
- family-names: "Corbett"
given-names: "Benjamin Curtice"
+ orcid: "https://orcid.org/0009-0008-7108-9651"
- family-names: "Klevtsov"
given-names: "Sergey"
orcid: "https://orcid.org/0000-0001-9044-1827"
@@ -23,6 +24,7 @@ authors:
orcid: "https://orcid.org/0000-0002-6103-4605"
- family-names: "Tobin"
given-names: "William"
+ orcid: "https://orcid.org/0009-0001-3960-6064"
- family-names: "White"
given-names: "Joshua"
orcid: "https://orcid.org/0000-0003-3491-142X"
@@ -40,6 +42,7 @@ authors:
orcid: "https://orcid.org/0000-0002-5380-2563"
- family-names: "Han"
given-names: "Brian"
+ orcid: "https://orcid.org/0009-0002-8549-7644"
- family-names: "Gross"
given-names: "Herve"
orcid: "https://orcid.org/0000-0002-1747-2018"
@@ -48,17 +51,21 @@ authors:
orcid: "https://orcid.org/0000-0002-8833-9425"
- family-names: "Mazuyer"
given-names: "Antoine"
+ orcid: "https://orcid.org/0000-0002-0329-3385"
- family-names: "Besset"
given-names: "Julien"
- family-names: "Citrain"
given-names: "Aurelien"
+ orcid: "https://orcid.org/0009-0006-3742-1425"
- family-names: "Vargas"
given-names: "Arturo"
+ orcid: "https://orcid.org/0000-0001-8001-5517"
- family-names: "Cremon"
given-names: "Matthias"
orcid: "https://orcid.org/0000-0001-7458-6401"
- family-names: "Hao"
given-names: "Yue"
+ orcid: "https://orcid.org/0000-0002-4543-8618"
- family-names: "Khait"
given-names: "Mark"
- family-names: "Lacoste"
@@ -67,6 +74,7 @@ authors:
given-names: "Shabnam"
- family-names: "Frambati"
given-names: "Stefano"
+ orchid: "https://orcid.org/0000-0003-0683-1203"
- family-names: "N'diaye"
given-names: "Mamadou"
- family-names: "Nguyen"
@@ -75,24 +83,31 @@ authors:
given-names: "Hannah Mairs"
- family-names: "Crook"
given-names: "Cameron Mikel"
+ orcid: "https://orcid.org/0000-0002-5366-6418"
- family-names: "Jin"
given-names: "Tao"
+ orcid: "https://orcid.org/0000-0001-6658-8941"
- family-names: "Froehly"
given-names: "Algiane"
- family-names: "Homel"
given-names: "Michael"
+ orcid: "https://orcid.org/0000-0002-0399-0092"
- family-names: "Magri"
given-names: "Victor Paludetto"
+ orcid: "https://orcid.org/0000-0002-3389-523X"
- family-names: "Ju"
given-names: "Isaac"
+ orcid: "https://orcid.org/0000-0003-4110-7472"
- family-names: "Rey"
- given-names: "Mel"
+ given-names: "Melvin"
- family-names: "Povolny"
given-names: "Stefan"
- family-names: "Wu"
given-names: "Hui"
+ orcid: "https://orcid.org/0000-0002-9575-3886"
- family-names: "Bui"
given-names: "Quan"
+ orcid: "https://orcid.org/0000-0003-2648-0586"
- family-names: "Tang"
given-names: "Hewei"
- family-names: "Camargo"
@@ -103,6 +118,7 @@ authors:
given-names: "Wu"
- family-names: "Ren"
given-names: "Guotong"
+ orcid: "https://orcid.org/0000-0002-5821-9158"
- family-names: "Yang"
given-names: "Li"
- family-names: "Taeho"
@@ -119,26 +135,31 @@ authors:
given-names: "Bertrand"
- family-names: "Fei"
given-names: "Fan"
+ orcid: "https://orcid.org/0000-0001-7273-4458"
- family-names: "Meng"
given-names: "Jie"
- family-names: "Untereiner"
given-names: "Lionel"
+ orcid: "https://orcid.org/0000-0002-8025-2616"
- family-names: "Raji"
given-names: "Oluwatobi Quadri"
- family-names: "Karimi-Fard"
given-names: "Mohammad"
+ orcid: "https://orcid.org/0000-0001-5707-165X"
- family-names: "Fuss"
given-names: "Gaetan"
- family-names: "Huang"
given-names: "Jixian"
- family-names: "Frigo"
given-names: "Matteo"
+ orcid: "https://orcid.org/0000-0001-8150-1090"
- family-names: "Martinez"
given-names: "Paloma"
- family-names: "Kachuma"
given-names: "Dickson"
- family-names: "Tomin"
given-names: "Pavel"
+ orchid: "https://orcid.org/0000-0003-4862-4288"
- family-names: "Byer"
given-names: "Thomas James"
- family-names: "Ligocki"
@@ -155,12 +176,14 @@ authors:
given-names: "Arnaud"
- family-names: "Costa"
given-names: "Andre Macieira Braga"
+ orcid: "https://orcid.org/0009-0001-1623-4253"
- family-names: "Pellerin"
given-names: "Jeanne"
- family-names: "Aronson"
given-names: "Ryan"
- family-names: "Osei-Kuffuor"
given-names: "Daniel"
+ orcid: "https://orcid.org/0000-0002-6111-6205"
title: "GEOSX"
version: 1.1.0
doi: "10.5281/zenodo.7151031"
diff --git a/src/docs/JOSS/MeshHierarchy.png b/src/docs/JOSS/MeshHierarchy.png
new file mode 100644
index 00000000000..41fe079b8be
Binary files /dev/null and b/src/docs/JOSS/MeshHierarchy.png differ
diff --git a/src/docs/JOSS/MeshHierarchy.svg b/src/docs/JOSS/MeshHierarchy.svg
new file mode 100644
index 00000000000..241ff616c90
--- /dev/null
+++ b/src/docs/JOSS/MeshHierarchy.svg
@@ -0,0 +1,72 @@
+
\ No newline at end of file
diff --git a/src/docs/JOSS/RW_final.pdf b/src/docs/JOSS/RW_final.pdf
new file mode 100644
index 00000000000..5b4160099bb
Binary files /dev/null and b/src/docs/JOSS/RW_final.pdf differ
diff --git a/src/docs/JOSS/RW_final.svg b/src/docs/JOSS/RW_final.svg
new file mode 100644
index 00000000000..bfea492fa46
--- /dev/null
+++ b/src/docs/JOSS/RW_final.svg
@@ -0,0 +1,340 @@
+
+
+
+
diff --git a/src/docs/JOSS/RW_mesh.png b/src/docs/JOSS/RW_mesh.png
new file mode 100644
index 00000000000..aea4b60890a
Binary files /dev/null and b/src/docs/JOSS/RW_mesh.png differ
diff --git a/src/docs/JOSS/RW_results.pdf b/src/docs/JOSS/RW_results.pdf
new file mode 100644
index 00000000000..9abc270c4d5
Binary files /dev/null and b/src/docs/JOSS/RW_results.pdf differ
diff --git a/src/docs/JOSS/nearwell_scaling_frontier.pdf b/src/docs/JOSS/nearwell_scaling_frontier.pdf
new file mode 100644
index 00000000000..d3c69391159
Binary files /dev/null and b/src/docs/JOSS/nearwell_scaling_frontier.pdf differ
diff --git a/src/docs/JOSS/paper.bib b/src/docs/JOSS/paper.bib
new file mode 100644
index 00000000000..947f1a07979
--- /dev/null
+++ b/src/docs/JOSS/paper.bib
@@ -0,0 +1,196 @@
+@article{Settgast:2017,
+ author = {Settgast, Randolph R. and Fu, Pengcheng and Walsh, Stuart D.C. and White, Joshua A. and Annavarapu, Chandrasekhar and Ryerson, Frederick J.},
+ title = {A fully coupled method for massively parallel simulation of hydraulically driven fractures in 3-dimensions},
+ journal = {International Journal for Numerical and Analytical Methods in Geomechanics},
+ volume = {41},
+ number = {5},
+ pages = {627-653},
+ year = {2017},
+ doi = {10.1002/nag.2557}
+}
+
+@Manual{libgeos,
+ title = {{GEOS} computational geometry library},
+ author = {{GEOS contributors}},
+ organization = {Open Source Geospatial Foundation},
+ year = {2021},
+ url = {https://libgeos.org/},
+ doi = {10.5281/zenodo.11396894}
+}
+
+@InProceedings{Beckingsale:2019,
+ author={Beckingsale, David A. and Burmark, Jason and Hornung, Rich and Jones, Holger and Killian, William and Kunen, Adam J. and Pearce, Olga and Robinson, Peter and Ryujin, Brian S. and Scogland, Thomas R. W.},
+ booktitle={2019 IEEE/ACM International Workshop on Performance, Portability and Productivity in HPC (P3HPC)},
+ title={RAJA: Portable Performance for Large-Scale Scientific Applications},
+ pages={71-81},
+ year={2019},
+ doi={10.1109/P3HPC49587.2019.00012}}
+
+@misc{CHAI:2023,
+ author = {CHAI},
+ title = {CHAI},
+ year = {2023},
+ publisher = {GitHub},
+ journal = {GitHub repository},
+ url = {https://github.com/LLNL/chai}
+}
+
+@article{Beckingsale:2020,
+ author={Beckingsale, D. A. and McFadden, M. J. and Dahm, J. P. S. and Pankajakshan, R. and Hornung, R. D.},
+ title={Umpire: Application-focused management and coordination of complex hierarchical memory},
+ journal={IBM Journal of Research and Development},
+ volume={64},
+ number={3/4},
+ pages={15:1-15:10},
+ year={2020},
+ doi={10.1147/JRD.2019.2954403}
+}
+
+@InProceedings{hypre,
+ author = {Falgout, R. D. and Yang, U. M.},
+ title = {\textit{hypre}: a Library of High Performance Preconditioners},
+ booktitle = {Lecture Notes in Computer Science},
+ pages = {632--641},
+ year = {2002},
+ doi={10.1007/3-540-47789-6_66}
+}
+
+@Misc{petsc-web-page,
+ author = {Satish Balay and Shrirang Abhyankar and Mark~F. Adams and Steven Benson and Jed
+ Brown and Peter Brune and Kris Buschelman and Emil~M. Constantinescu and Lisandro
+ Dalcin and Alp Dener and Victor Eijkhout and Jacob Faibussowitsch and William~D.
+ Gropp and V\'{a}clav Hapla and Tobin Isaac and Pierre Jolivet and Dmitry Karpeev
+ and Dinesh Kaushik and Matthew~G. Knepley and Fande Kong and Scott Kruger and
+ Dave~A. May and Lois Curfman McInnes and Richard Tran Mills and Lawrence Mitchell
+ and Todd Munson and Jose~E. Roman and Karl Rupp and Patrick Sanan and Jason Sarich
+ and Barry~F. Smith and Stefano Zampini and Hong Zhang and Hong Zhang and Junchao
+ Zhang},
+ title = {{PETS}c {W}eb page},
+ url = {https://petsc.org/},
+ year = {2024}
+}
+
+@article{ Her_etal05,
+ title={{An overview of the Trilinos project}},
+ author={Heroux, M. A. and Bartlett, R. A. and Howle, V. E. and Hoekstra, R. J. and Hu, J. J. and Kolda, T. G. and Lehoucq, R. B. and Long, K. R. and Pawlowski, R. P. and Phipps, E. T. and Salinger, A. G. and Thornquist, H. K. and Tuminaro, R. S. and Willenbring, J. M. and Williams, A. and Stanley, K. S.},
+ journal={ACM Trans. Math. Softw.},
+ volume={31},
+ number={3},
+ pages={397--423},
+ year={2005},
+ doi={10.1145/1089014.1089021}
+}
+
+
+@article{BUI:2020,
+ author = {Bui, Quan M. and Osei-Kuffuor, Daniel and Castelletto, Nicola and White, Joshua A.},
+ title = {A Scalable Multigrid Reduction Framework for Multiphase Poromechanics of Heterogeneous Media},
+ journal = {SIAM Journal on Scientific Computing},
+ volume = {42},
+ number = {2},
+ pages = {B379-B396},
+ year = {2020},
+ doi = {10.1137/19M1256117},
+}
+
+@article{BUI:2021114111,
+ author = {Quan M. Bui and François P. Hamon and Nicola Castelletto and Daniel Osei-Kuffuor and Randolph R. Settgast and Joshua A. White},
+ title = {Multigrid reduction preconditioning framework for coupled processes in porous and fractured media},
+ journal = {Computer Methods in Applied Mechanics and Engineering},
+ volume = {387},
+ pages = {114111},
+ year = {2021},
+ doi = {10.1016/j.cma.2021.114111}
+}
+
+@book{ IPCC_2023,
+ author={{Intergovernmental Panel on Climate Change IPCC}},
+ title={{Climate Change 2022 - Mitigation of Climate Change: Working Group III Contribution to the Sixth Assessment Report of the Intergovernmental Panel on Climate Change}},
+ publisher={Cambridge University Press},
+ place={Cambridge},
+ year={2023},
+ doi = {10.1017/9781009157926}
+}
+
+@misc{GEOS_RTD,
+ title = {GEOS Documentation},
+ year = {2024},
+ url = {https://geosx-geosx.readthedocs-hosted.com/en/latest/},
+}
+
+@article{Nordbotten2024,
+abstract = {This article contains the description of, and call for participation in, the 11th Society of Petroleum Engineers Comparative Solution Project (the 11th SPE CSP, https://spe.org/csp). It is motivated by the simulation challenges associated with CO2 storage operations in geological settings of realistic complexity. The 11th SPE CSP contains three versions: Version 11A is a 2D geometry at the laboratory scale, inspired by a recent CO2 storage forecasting and validation study. For Version 11B, the 2D geometry and operational conditions from 11A are rescaled to field conditions characteristic of the Norwegian Continental Shelf. Finally, for Version 11C, the geometry of Version 11B is extruded to a full 3D field model. The CSP has a two-year timeline, being launched at the 2023 SPE Reservoir Simulation Conference and culminating at the 2025 SPE Reservoir Simulation Conference. A community effort is run in parallel to develop utility scripts and input files for common simulators to lower the threshold of participation; see the link to supplementary material on the CSP website. At the time of writing, complete input decks for one simulator are already ready for all three versions.},
+author = {Nordbotten, Jan M. and Ferno, Martin A. and Flemisch, Bernd and Kovscek, Anthony R. and Lie, Knut Andreas},
+doi = {10.2118/218015-PA},
+file = {:Users/settgast1/Documents/Mendeley Desktop/Nordbotten/2024/The 11th Society of Petroleum Engineers Comparative Solution Project Problem Definition/Nordbotten - 2024 - The 11th Society of Petroleum Engineers Comparative Solution Project Problem Definition.pdf:pdf},
+issn = {1086055X},
+journal = {SPE Journal},
+number = {5},
+pages = {2507--2524},
+title = {{The 11th Society of Petroleum Engineers Comparative Solution Project: Problem Definition}},
+volume = {29},
+year = {2024}
+}
+
+@software{ogs:6.5.2,
+ author = {Naumov, Dmitri and
+ Bilke, Lars and
+ Lehmann, Christoph and
+ Fischer, Thomas and
+ Wang, Wenqing and
+ Silbermann, Christian and
+ Thiedau, Jan and
+ Selzer, Philipp},
+ title = {OpenGeoSys},
+ month = jun,
+ year = 2024,
+ publisher = {Zenodo},
+ version = {6.5.2},
+ doi = {10.5281/zenodo.11652195},
+ url = {https://doi.org/10.5281/zenodo.11652195}
+}
+
+@article{Kochetal2020Dumux,
+title = "{DuMu\textsuperscript{x} 3 - an open-source simulator for solving flow and transport problems in porous media with a focus on model coupling}",
+journal = "Computers \& Mathematics with Applications",
+year = "2020",
+issn = "0898-1221",
+doi = "10.1016/j.camwa.2020.02.012",
+author = "Timo Koch and Dennis Glaser and Kilian Weishaupt and Sina Ackermann and Martin Beck and Beatrix Becker and Samuel Burbulla and Holger Class and Edward Coltman and Simon Emmert and Thomas Fetzer and Christoph Gruninger and Katharina Heck and Johannes Hommel and Theresa Kurz and Melanie Lipp and Farid Mohammadi and Samuel Scherrer and Martin Schneider and Gabriele Seitz and Leopold Stadler and Martin Utz and Felix Weinhardt and Bernd Flemisch",
+keywords = "Porous media, Multi-phase flow, Coupled problems, Open-source software, Research software",
+abstract = "We present version 3 of the open-source simulator for flow and transport processes in porous media DuMux. DuMux is based on the modular C++ framework Dune (Distributed and Unified Numerics Environment) and is developed as a research code with a focus on modularity and reusability. We describe recent efforts in improving the transparency and efficiency of the development process and community-building, as well as efforts towards quality assurance and reproducible research. In addition to a major redesign of many simulation components in order to facilitate setting up complex simulations in DuMux, version 3 introduces a more consistent abstraction of finite volume schemes. Finally, the new framework for multi-domain simulations is described, and three numerical examples demonstrate its flexibility."
+}
+
+@article{RASMUSSEN2021159,
+title = {{The Open Porous Media Flow reservoir simulator}},
+journal = {Computers & Mathematics with Applications},
+volume = {81},
+pages = {159-185},
+year = {2021},
+note = {Development and Application of Open-source Software for Problems with Numerical PDEs},
+issn = {0898-1221},
+doi = {10.1016/j.camwa.2020.05.014},
+url = {https://www.sciencedirect.com/science/article/pii/S0898122120302182},
+author = {Atgeirr Flø Rasmussen and Tor Harald Sandve and Kai Bao and Andreas Lauser and Joakim Hove and Bård Skaflestad and Robert Klöfkorn and Markus Blatt and Alf Birger Rustad and Ove Sævareid and Knut-Andreas Lie and Andreas Thune},
+abstract = {The Open Porous Media (OPM) initiative is a community effort that encourages open innovation and reproducible research for simulation of porous media processes. OPM coordinates collaborative software development, maintains and distributes open-source software and open data sets, and seeks to ensure that these are available under a free license in a long-term perspective. In this paper, we present OPM Flow, which is a reservoir simulator developed for industrial use, as well as some of the individual components used to make OPM Flow. The descriptions apply to the 2019.10 release of OPM.}
+}
+
+
+@article{Voskov2024, doi = {10.21105/joss.06737}, url = {https://doi.org/10.21105/joss.06737}, year = {2024}, publisher = {The Open Journal}, volume = {9}, number = {99}, pages = {6737}, author = {Denis Voskov and Ilshat Saifullin and Aleksei Novikov and Michiel Wapperom and Luisa Orozco and Gabriel Serrão Seabra and Yuan Chen and Mark Khait and Xiaocong Lyu and Xiaoming Tian and Stephan de Hoop and Artur Palha}, title = {{open Delft Advanced Research Terra Simulator (open-DARTS)}}, journal = {Journal of Open Source Software} }
+
+@inproceedings{frontier,
+author = {Atchley, Scott and Zimmer, Christopher and Lange, John and Bernholdt, David and Melesse Vergara, Veronica and Beck, Thomas and Brim, Michael and Budiardja, Reuben and Chandrasekaran, Sunita and Eisenbach, Markus and Evans, Thomas and Ezell, Matthew and Frontiere, Nicholas and Georgiadou, Antigoni and Glenski, Joe and Grete, Philipp and Hamilton, Steven and Holmen, John and Huebl, Axel and Jacobson, Daniel and Joubert, Wayne and Mcmahon, Kim and Merzari, Elia and Moore, Stan and Myers, Andrew and Nichols, Stephen and Oral, Sarp and Papatheodore, Thomas and Perez, Danny and Rogers, David M. and Schneider, Evan and Vay, Jean-Luc and Yeung, P. K.},
+title = {Frontier: Exploring Exascale},
+year = {2023},
+isbn = {9798400701092},
+publisher = {Association for Computing Machinery},
+address = {New York, NY, USA},
+url = {https://doi.org/10.1145/3581784.3607089},
+doi = {10.1145/3581784.3607089},
+abstract = {As the US Department of Energy (DOE) computing facilities began deploying petascale systems in 2008, DOE was already setting its sights on exascale. In that year, DARPA published a report on the feasibility of reaching exascale. The report authors identified several key challenges in the pursuit of exascale including power, memory, concurrency, and resiliency. That report informed the DOE's computing strategy for reaching exascale. With the deployment of Oak Ridge National Laboratory's Frontier supercomputer, we have officially entered the exascale era. In this paper, we discuss Frontier's architecture, how it addresses those challenges, and describe some early application results from Oak Ridge Leadership Computing Facility's Center of Excellence and the Exascale Computing Project.},
+booktitle = {Proceedings of the International Conference for High Performance Computing, Networking, Storage and Analysis},
+articleno = {52},
+numpages = {16},
+location = {Denver, CO, USA},
+series = {SC '23}
+}
\ No newline at end of file
diff --git a/src/docs/JOSS/paper.md b/src/docs/JOSS/paper.md
new file mode 100644
index 00000000000..751272a7f8b
--- /dev/null
+++ b/src/docs/JOSS/paper.md
@@ -0,0 +1,253 @@
+---
+title: 'GEOS: A performance portable multi-physics simulation framework for subsurface applications'
+tags:
+ - reservoir simulations
+ - computational mechanics
+ - multiphase flow
+ - C++
+authors:
+ - name: Randolph R. Settgast
+ orcid: 0000-0002-2536-7867
+ corresponding: true
+ affiliation: 1
+ - name: Ryan M. Aronson
+ orcid: 0009-0004-0785-5084
+ affiliation: "2,3"
+ - name: Julien R. Besset
+ affiliation: 7
+ - name: Andrea Borio
+ orcid: 0000-0003-2016-5403
+ affiliation: 5
+ - name: Quan M. Bui
+ orcid: 0000-0003-2648-0586
+ affiliation: 1
+ - name: Thomas J. Byer
+ affiliation: 1
+ - name: Nicola Castelletto
+ orcid: 0000-0001-6816-6769
+ affiliation: 1
+ - name: Aurélien Citrain
+ orcid: 0009-0006-3742-1425
+ affiliation: 7
+ - name: Benjamin C. Corbett
+ orcid: 0009-0008-7108-9651
+ affiliation: 1
+ - name: James Corbett
+ affiliation: 1
+ - name: Philippe Cordier
+ orcid: 0000-0002-6439-9263
+ affiliation: 2
+ - name: Matthias A. Cremon
+ orcid: 0000-0001-7458-6401
+ affiliation: 1
+ - name: Cameron M. Crook
+ orcid: 0000-0002-5366-6418
+ affiliation: 1
+ - name: Matteo Cusini
+ orcid: 0000-0002-6024-861X
+ affiliation: 1
+ - name: Fan Fei
+ orcid: 0000-0001-7273-4458
+ affiliation: 1
+ - name: Stefano Frambati
+ orcid: 0000-0003-0683-1203
+ affiliation: 7
+ - name: Jacques Franc
+ orcid: 0000-0002-8833-9425
+ affiliation: 3
+ - name: Andrea Franceschini
+ orcid: 0000-0003-4395-5125
+ affiliation: 3
+ - name: Matteo Frigo
+ orcid: 0000-0001-8150-1090
+ affiliation: 3
+ - name: Pengcheng Fu
+ orcid: 0000-0002-7408-3350
+ affiliation: 1
+ - name: Thomas Gazzola
+ orcid: 0000-0002-6103-4605
+ affiliation: 2
+ - name: Herve Gross
+ orcid: 0000-0002-1747-2018
+ affiliation: 2
+ - name: Francois Hamon
+ orcid: 0000-0001-8229-963X
+ affiliation: 2
+ - name: Brian M. Han
+ orcid: 0009-0002-8549-7644
+ affiliation: 1
+ - name: Yue Hao
+ orcid: 0000-0002-4543-8618
+ affiliation: 1
+ - name: Rasim Hasanzade
+ affiliation: "3,4"
+ - name: Michael Homel
+ orcid: 0000-0002-0399-0092
+ affiliation: 1
+ - name: Jian Huang
+ orcid: 0000-0002-5380-2563
+ affiliation: 2
+ - name: Tao Jin
+ orcid: 0000-0001-6658-8941
+ affiliation: 1
+ - name: Isaac Ju
+ orcid: 0000-0003-4110-7472
+ affiliation: 3
+ - name: Dickson Kachuma
+ affiliation: 2
+ - name: Mohammad Karimi-Fard
+ orcid: 0000-0001-5707-165X
+ affiliation: 3
+ - name: Taeho Kim
+ affiliation: 2
+ - name: Sergey Klevtsov
+ orcid: 0000-0001-9044-1827
+ affiliation: 3
+ - name: Alexandre Lapene
+ affiliation: 2
+ - name: Victor A. P. Magri
+ orcid: 0000-0002-3389-523X
+ affiliation: 1
+ - name: Antoine Mazuyer
+ orcid: 0000-0002-0329-3385
+ affiliation: "2,3"
+ - name: Mamadou N'diaye
+ affiliation: 3
+ - name: Daniel Osei-Kuffuor
+ orcid: 0000-0002-6111-6205
+ affiliation: 1
+ - name: Stefan Povolny
+ affiliation: 1
+ - name: Guotong Ren
+ orcid: 0000-0002-5821-9158
+ affiliation: 4
+ - name: Shabnam J. Semnani
+ affiliation: 6
+ - name: Chris S. Sherman
+ orcid: 0000-0003-3550-0657
+ affiliation: 1
+ - name: Melvin Rey
+ affiliation: 8
+ - name: Hamdi A. Tchelepi
+ orcid: 0000-0002-3084-6635
+ affiliation: 3
+ - name: William R. Tobin
+ orcid: 0009-0001-3960-6064
+ affiliation: 1
+ - name: Pavel Tomin
+ affiliation: 4
+ orcid: 0000-0003-4862-4288
+ - name: Lionel Untereiner
+ orcid: 0000-0002-8025-2616
+ affiliation: 8
+ - name: Arturo Vargas
+ orcid: 0000-0001-8001-5517
+ affiliation: 1
+ - name: Sohail Waziri
+ affiliation: "3,4"
+ - name: Xianhuan Wen
+ orcid: 0000-0002-6055-4553
+ affiliation: 4
+ - name: Joshua A. White
+ orcid: 0000-0003-3491-142X
+ affiliation: 1
+ - name: Hui Wu
+ orcid: 0000-0002-9575-3886
+ affiliation: 1
+affiliations:
+ - name: Lawrence Livermore National Laboratory, USA
+ index: 1
+ - name: TotalEnergies E&P Research & Technology, USA
+ index: 2
+ - name: Stanford University, USA
+ index: 3
+ - name: Chevron Technical Center, USA
+ index: 4
+ - name: Politecnico di Torino, Italy
+ index: 5
+ - name: University of California San Diego
+ index: 6
+ - name: Inria, Universite de Pau et des Pays de l’Adour
+ index: 7
+ - name: Independent
+ index: 8
+
+date: 28 May 2024
+bibliography: paper.bib
+
+---
+
+# Summary
+
+GEOS is a simulation framework focused on solving tightly coupled multi-physics problems with an initial emphasis on subsurface reservoir applications.
+Currently, GEOS supports capabilities for studying carbon sequestration, geothermal energy, hydrogen storage, and related subsurface applications.
+The unique aspect of GEOS that differentiates it from existing reservoir simulators is the ability to simulate tightly coupled compositional flow, poromechanics, fault slip, fracture propagation, and thermal effects, etc.
+Extensive documentation is available on the [GEOS documentation pages](https://geosx-geosx.readthedocs-hosted.com/en/latest) [@GEOS_RTD].
+Note that GEOS, as presented here, is a complete rewrite of the previous incarnation of the GEOS referred to in [@Settgast:2017].
+
+
+# Statement of need
+
+The threat of climate change has resulted in an increased focus on mitigating carbon emissions into the atmosphere.
+Carbon Capture and Storage (CCS) of CO~2~ in subsurface reservoirs and saline aquifers is an important component in the strategy to meet global climate goals.
+Given the 2050 net-zero emissions goals, CO~2~ storage capacities required to offset emissions is orders of magnitude greater than current levels [@IPCC_2023].
+Evaluation of reservoir performance and containment risks associated with the injection of liquefied CO~2~ in the subsurface in a reproducible and transparent manner is an important consideration when evaluating new storage sites.
+As an example of typical complexities in carbon storage reservoirs, the 11th Society of Petroleum Engineers Comparative Solution Project (SPE11) [@Nordbotten2024] provides a benchmark example for evaluating the the predictions of carbon storage simulators.
+The goal of GEOS is to provide the global community with an exascale capable open-source tool that is capable of simulating the complex coupled physics that occurs when liquefied CO~2~ is injected into a subsurface reservoir.
+To this end, GEOS is freely available and focused on the simulation of reservoir integrity through various failure mechanisms such as caprock failure, fault leakage, and wellbore failure.
+Open source projects such as OPM [@RASMUSSEN2021159], OpenGeoSys [@ogs:6.5.2], DuMux [@Kochetal2020Dumux] and Darts [@Voskov2024] are example efforts that share similar objectives.
+However, GEOS stands out in two key areas: the explicit fault modeling coupled with flow and mechanical deformation, and the focus on performance portability on platforms ranging from workstations to exascale supercomputers.
+
+
+
+
+# GEOS Components
+
+The core C++17 infrastructure provides common computer science capabilities typically required for solving differential equations using a spatially discrete method.
+The components of the infrastructure provided by GEOS include a data hierarchy, a discrete mesh data structure, a mesh-based MPI communications interface, degree-of-freedom management, IO services, and a physics package interface.
+
+By design, GEOS is intended to be a generic multi-physics simulation platform.
+The physics package interface in GEOS is intended to encapsulate the development of numerical methods applied to the solution of governing equations relevant to a problem.
+When implementing a physics package for a set of coupled physics equations, each individual physics package is first developed as a stand-alone capability.
+The single physics capabilities are then applied together in a coupled physics package and solved through a flexible strategy ranging from solving the fully monolithic system to a split operator approach.
+
+To solve the linear systems that arise from the boundary value problem, GEOS maintains a generic linear algebra interface (LAI) capable of wrapping various linear algebra packages such as hypre [@hypre], PETSc [@petsc-web-page], and Trilinos [@Her_etal05].
+Currently, in GEOS only the hypre interface is actively maintained.
+For every multi-physics problem involving the solution of a coupled linear system, GEOS currently relies on a multigrid reduction preconditioning strategy available in hypre [@BUI:2020;@BUI:2021114111].
+
+The performance portability strategy utilized by GEOS applies LLNL's suite of portability tools RAJA [@Beckingsale:2019], CHAI [@CHAI:2023], and Umpire [@Beckingsale:2020].
+The RAJA performance portability layer provides [performance portable](https://performanceportability.org) kernel launching and wrappers for reductions, atomics, and local/shared memory to achieve performance on both CPU and GPU hardware.
+The combination of CHAI/Umpire provides memory motion management for platforms with heterogeneous memory spaces (i.e., host and device memory).
+Through this strategy, GEOS has been successfully run on platforms ranging from GPU-based Exa-scale systems to CPU-based laptops with near-optimal of performance.
+
+In addition to its C++ core, the GEOS project provides a Python3 interface that allows for the integration of the simulation capabilities into complex Python workflows involving components unrelated to GEOS.
+
+# Applications
+To date GEOS has been used to simulate problems relevant to CO~2~ storage, enhanced geothermal systems, hydrogen storage, and both conventional and unconventional oil and gas extraction.
+Often these simulations involve coupling between compositional multiphase flow and transport, poroelasticity, thermal transport, and interactions with faults and fractures.
+
+As an example of a field case where GEOS has been applied, we present a coupled compositional flow/mechanics simulation of CO~2~ injection and storage at a large real-world storage site.
+Figure \ref{RW_results}a illustrates the computational mesh and Figure \ref{RW_results}b shows results after 25 years of injection.
+Simulations such as this will play a critical role in predicting the viability of potential CO~2~ storage sites.
+
+![Real world CO~2~ storage site: (a) discrete mesh, transparency is used for the overburden region to reveal the complex faulted structure of the storage reservoir; (b) results of a compositional flow simulation after 25 years of CO~2~ injection. The CO~2~ plume is shown in white near the bottom of the well. Colors in the reservoir layer indicate changes in fluid pressure, and the colors in the overburden indicate vertical displacement resulting from the injection. Note that color scales have been removed intentionally.\label{RW_results}](RW_final.pdf){ width=100% }
+
+As an example of the weak scalability of GEOS on an exascale class system, we present two weak scaling studies on a simple wellbore geometry run on the Frontier supercomputer at Oak Ridge National Laboratory.
+Frontier is comprised of 9,472 Cray EX235a nodes that each contain a single AMD EPYC 7A53 CPU and four AMD MI250X GPUs [@frontier].
+Note that each MI250X is comprised of two Graphics Compute Dies (GCD), with each GCD appearing as a GPU to the operating system.
+A more detailed discussion and instructions to reproduce the results are available in the [Performance Benchmarks](https://geosx-geosx.readthedocs-hosted.com/en/latest/docs/sphinx/advancedExamples/performanceBenchmarks/Index.html) of the GEOS documentation.
+
+The weak scaling results for mechanics are presented in (Figure \ref{fig:Frontier_scaling}a) and shows nearly flat scaling of the GEOS processes (assembly/field synchronization) up to 32,768 GPUs ($81.3 \times 10^{9}$ degrees-of-freedom).
+There is a moderate decrease in efficiency with the application of the hypre preconditioner setup and solve, but given the complexity of those algorithms, this level of scaling efficiency is excellent.
+The weak scaling results of compositional flow are presented in Figure \ref{fig:Frontier_scaling}b shows excellent scaling up to 2,048 GPUs.
+
+![Weak scaling results on ORNL/Frontier: average execution time per newton iteration vs number of GPUs for a mechanics (a) and a compositional flow (b) simulation, respectively.\label{fig:Frontier_scaling}](nearwell_scaling_frontier.pdf){ width=100% }
+
+# Acknowledgements
+This work was performed under the auspices of the U.S. Department of Energy by Lawrence Livermore National Laboratory under Contract DE-AC52-07NA27344. LLNL release number LLNL-JRNL-864747.
+
+This research was supported by the Exascale Computing Project (ECP), Project Number: 17-SC-20-SC, a collaborative effort of two DOE organizations - the Office of Science and the National Nuclear Security Administration, responsible for the planning and preparation of a capable exascale ecosystem, including software, applications, hardware, advanced system engineering and early testbed platforms, to support the nation's exascale computing imperative.
+
+Support was provided by TotalEnergies and Chevron through the FC-MAELSTROM project, a collaborative effort between Lawrence Livermore National Laboratory, TotalEnergies, Chevron, and Stanford University, aiming to develop an exascale compatible, multiscale, research-oriented simulator for modeling fully coupled flow, transport and geomechanics in geological formations.
+
+# References
diff --git a/src/docs/JOSS/scaling.py b/src/docs/JOSS/scaling.py
new file mode 100755
index 00000000000..e6636f0687d
--- /dev/null
+++ b/src/docs/JOSS/scaling.py
@@ -0,0 +1,74 @@
+#!/usr/bin/python
+
+import matplotlib.pyplot as plt
+import seaborn as sns
+
+# Set the aesthetic style of the plots
+sns.set(style="white")
+
+# Raw data extracted from the following runs:
+# Mechanics: frontier-[2002292-2002296]
+# Comp. flow: frontier-[2002210-2002213]
+def weak_scaling_plot(savename, lw=3, mks=11):
+ fig, axs = plt.subplots(1, 2, figsize=[18, 6]) # Create a figure and two subplots (rows)
+
+ # Plot 1 (Mechanics)
+ ranks1 = [8, 64, 512, 4096, 32768]
+ global_dofs1 = ['20.7M', '162M', '1.3B', '10.2B', '81.3B']
+ geos_times1 = [1.7805, 1.9492, 1.8850, 1.9575, 2.3137]
+ matrix_times1 = [0.4270, 0.4742, 0.4755, 0.4963, 0.7132]
+ hypre_setup_times1 = [0.9412, 1.1372, 1.3507, 1.6657, 2.2864]
+ hypre_solve_times1 = [0.6751, 1.0381, 1.4013, 2.0558, 3.0379]
+ total_times1 = [a + b + c + d for a, b, c, d in zip(geos_times1, matrix_times1, hypre_setup_times1, hypre_solve_times1)]
+
+ axs[0].plot(ranks1, geos_times1, 'o-', label='GEOS', linewidth=lw, markersize=mks)
+ axs[0].plot(ranks1, matrix_times1, 's-', label='Matrix creation', linewidth=lw, markersize=mks)
+ axs[0].plot(ranks1, hypre_setup_times1, '^-', label='Hypre setup', linewidth=lw, markersize=mks)
+ axs[0].plot(ranks1, hypre_solve_times1, 'v-', label='Hypre solve', linewidth=lw, markersize=mks)
+ axs[0].plot(ranks1, total_times1, 'd-', label='Total', linewidth=lw, markersize=mks)
+ axs[0].set_title('(a) Mechanics', fontsize=23, pad=10)
+ axs[0].set_xlabel('Number of GPUs (Global DOFs)', fontsize=20)
+ axs[0].set_ylabel('Time [s]', fontsize=19)
+ axs[0].set_xscale('log', base=2)
+ axs[0].set_xticks(ranks1)
+ axs[0].set_xticklabels([f"{r:,} ({d})" for r, d in zip(ranks1, global_dofs1)], fontsize=17, rotation=30)
+ axs[0].tick_params(axis='y', labelsize=17)
+ axs[0].set_ylim(bottom=0)
+ axs[0].grid(True, which='both', linestyle='--')
+
+ # Plot 2 (Compositional flow)
+ ranks2 = [4, 32, 256, 2048]
+ global_dofs2 = ['19.2M', '153M', '1.2B', '9.8B']
+ geos_times2 = [0.1909, 0.2051, 0.2109, 0.2180]
+ matrix_times2 = [0.1544, 0.1638, 0.1672, 0.1693]
+ hypre_setup_times2 = [0.5262, 0.6353, 0.7320, 0.8254]
+ hypre_solve_times2 = [0.1053, 0.1253, 0.1487, 0.1797]
+ total_times2 = [a + b + c + d for a, b, c, d in zip(geos_times2, matrix_times2, hypre_setup_times2, hypre_solve_times2)]
+
+ axs[1].plot(ranks2, geos_times2, 'o-', label='GEOS', linewidth=lw, markersize=mks)
+ axs[1].plot(ranks2, matrix_times2, 's-', label='Matrix creation', linewidth=lw, markersize=mks)
+ axs[1].plot(ranks2, hypre_setup_times2, '^-', label='Hypre setup', linewidth=lw, markersize=mks)
+ axs[1].plot(ranks2, hypre_solve_times2, 'v-', label='Hypre solve', linewidth=lw, markersize=mks)
+ axs[1].plot(ranks2, total_times2, 'd-', label='Total', linewidth=lw, markersize=mks)
+ axs[1].set_title('(b) Compositional flow', fontsize=23, pad=10)
+ axs[1].set_xlabel('Number of GPUs (Global DOFs)', fontsize=20, labelpad=14)
+ axs[1].set_ylabel('Time [s]', fontsize=19)
+ axs[1].set_xscale('log', base=2)
+ axs[1].set_xticks(ranks2)
+ axs[1].set_xticklabels([f"{r:,} ({d})" for r, d in zip(ranks2, global_dofs2)], fontsize=17, rotation=30)
+ axs[1].tick_params(axis='y', labelsize=17)
+ axs[1].set_ylim(bottom=0)
+ axs[1].grid(True, which='both', linestyle='--')
+
+ # Adding a single legend outside the plots
+ handles, labels = axs[0].get_legend_handles_labels()
+ fig.legend(handles, labels, loc='upper right', bbox_to_anchor=(1.0, 0.75), fontsize=18)
+
+ # Layout adjustment
+ plt.tight_layout(rect=[0, 0, 0.86, 1]) # Adjust the right margin to fit the legend
+ print(f"Saving figure {savename}...")
+ plt.savefig(savename)
+ plt.show()
+
+if __name__ == "__main__":
+ weak_scaling_plot("nearwell_scaling_frontier.pdf")