Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Split CCPP finalize into physics_finalize and (framework) finalize; minor CCPP cleanup and documentation updates #487

Merged
merged 5 commits into from
Mar 11, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion atmos_model.F90
Original file line number Diff line number Diff line change
Expand Up @@ -975,7 +975,10 @@ subroutine atmos_model_end (Atmos)
endif

! Fast physics (from dynamics) are finalized in atmosphere_end above;
! standard/slow physics (from CCPP) are finalized in CCPP_step 'finalize'.
! standard/slow physics (from CCPP) are finalized in CCPP_step 'physics_finalize'.
call CCPP_step (step="physics_finalize", nblks=Atm_block%nblks, ierr=ierr)
if (ierr/=0) call mpp_error(FATAL, 'Call to CCPP physics_finalize step failed')

! The CCPP framework for all cdata structures is finalized in CCPP_step 'finalize'.
call CCPP_step (step="finalize", nblks=Atm_block%nblks, ierr=ierr)
if (ierr/=0) call mpp_error(FATAL, 'Call to CCPP finalize step failed')
Expand Down
13 changes: 0 additions & 13 deletions ccpp/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,6 @@ if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES)
set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "Debug" "Release" "Coverage")
endif()

#------------------------------------------------------------------------------
# CMake Modules
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/framework/cmake")

#------------------------------------------------------------------------------
# Call to CCPP code generator
if(DEBUG)
Expand Down Expand Up @@ -54,14 +50,6 @@ if(MPI)
add_definitions(-DMPI)
endif()

#------------------------------------------------------------------------------
# Set additional flags for debug build
if(DEBUG)
if(CMAKE_Fortran_COMPILER_ID MATCHES "Intel")
set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -init=snan,arrays")
junwang-noaa marked this conversation as resolved.
Show resolved Hide resolved
endif()
endif()

#------------------------------------------------------------------------------
# Set flag for 32bit dynamics build
if(32BIT)
Expand Down Expand Up @@ -117,7 +105,6 @@ add_library(
# Compile GFS_diagnostics.F90 without optimization, this leads to out of memory errors on wcoss_dell_p3
set_property(SOURCE driver/GFS_diagnostics.F90 APPEND_STRING PROPERTY COMPILE_FLAGS "-O0")


target_link_libraries(fv3ccpp PUBLIC ccpp_framework)
target_link_libraries(fv3ccpp PUBLIC ccpp_physics)

Expand Down
3 changes: 2 additions & 1 deletion ccpp/config/ccpp_prebuild_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@

# Default build dir, relative to current working directory,
# if not specified as command-line argument
DEFAULT_BUILD_DIR = 'FV3'
DEFAULT_BUILD_DIR = 'build'

# Auto-generated makefile/cmakefile snippets that contain all type definitions
TYPEDEFS_MAKEFILE = '{build_dir}/physics/CCPP_TYPEDEFS.mk'
Expand Down Expand Up @@ -242,6 +242,7 @@
STATIC_API_SRCFILE = '{build_dir}/physics/CCPP_STATIC_API.sh'

# Directory for writing HTML pages generated from metadata files
# used by metadata2html.py for generating scientific documentation
METADATA_HTML_OUTPUT_DIR = '{build_dir}/physics/physics/docs'

# HTML document containing the model-defined CCPP variables
Expand Down
4 changes: 2 additions & 2 deletions ccpp/data/GFS_typedefs.F90
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@ module GFS_typedefs

implicit none

! To ensure that these values match what's in the physics,
! array sizes are compared during model init in GFS_rrtmg_setup_init()
! To ensure that these values match what's in the physics, array
! sizes are compared in the auto-generated physics caps in debug mode
private :: NF_AESW, NF_AELW, NSPC, NSPC1, NF_CLDS, NF_VGAS, NF_ALBD, ntrcaerm
! from module_radiation_aerosols
integer, parameter :: NF_AESW = 3
Expand Down
48 changes: 22 additions & 26 deletions ccpp/driver/CCPP_driver.F90
Original file line number Diff line number Diff line change
Expand Up @@ -101,8 +101,8 @@ subroutine CCPP_step (step, nblks, ierr)
else if (trim(step)=="physics_init") then

! Since the physics init step is independent of the blocking structure,
! we can use cdata_domain here. Since we don't use threading on the outside,
! we can allow threading inside the physics init routines.
! we can use cdata_domain. And since we don't use threading on the host
! model side, we can allow threading inside the physics init routines.
GFS_control%nthreads = nthrds

call ccpp_physics_init(cdata_domain, suite_name=trim(ccpp_suite), ierr=ierr)
Expand All @@ -116,8 +116,8 @@ subroutine CCPP_step (step, nblks, ierr)
else if (trim(step)=="timestep_init") then

! Since the physics timestep init step is independent of the blocking structure,
! we can use cdata_domain here. Since we don't use threading on the outside,
! we can allow threading inside the timestep init (time_vary) routines.
! we can use cdata_domain. And since we don't use threading on the host
! model side, we can allow threading inside the timestep init (time_vary) routines.
GFS_control%nthreads = nthrds

call ccpp_physics_timestep_init(cdata_domain, suite_name=trim(ccpp_suite), group_name="time_vary", ierr=ierr)
Expand Down Expand Up @@ -159,11 +159,11 @@ subroutine CCPP_step (step, nblks, ierr)
! *DH 20210104 !
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

! Radiation and stochastic physics
! Radiation, physics and and stochastic physics - threaded regions using blocked data structures
else if (trim(step)=="radiation" .or. trim(step)=="physics" .or. trim(step)=="stochastics") then

! Set number of threads available to physics schemes to one,
! because threads are used on the outside for blocking
! because threads are used on the host model side for blocking
GFS_control%nthreads = 1

!$OMP parallel num_threads (nthrds) &
Expand All @@ -188,8 +188,8 @@ subroutine CCPP_step (step, nblks, ierr)
call ccpp_physics_run(cdata_block(nb,ntX), suite_name=trim(ccpp_suite), group_name=trim(step), ierr=ierr2)
if (ierr2/=0) then
write(0,'(2a,3(a,i4),a)') "An error occurred in ccpp_physics_run for group ", trim(step), &
", block ", nb, " and thread ", nt, " (ntX=", ntX, ")"
write(0,'(a)') trim(cdata_block(nb,nt)%errmsg)
", block ", nb, " and thread ", nt, " (ntX=", ntX, "):"
write(0,'(a)') trim(cdata_block(nb,ntX)%errmsg)
ierr = ierr + ierr2
end if
end do
Expand All @@ -202,7 +202,7 @@ subroutine CCPP_step (step, nblks, ierr)
else if (trim(step)=="timestep_finalize") then

! Since the physics timestep finalize step is independent of the blocking structure,
! we can use cdata_domain here. Since we don't use threading on the outside,
! we can use cdata_domain. And since we don't use threading on the host model side,
! we can allow threading inside the timestep finalize (time_vary) routines.
GFS_control%nthreads = nthrds

Expand All @@ -213,27 +213,23 @@ subroutine CCPP_step (step, nblks, ierr)
return
end if

! Finalize
else if (trim(step)=="finalize") then
! Physics finalize
else if (trim(step)=="physics_finalize") then

! Loop over blocks, don't use threading on the outside but allowing threading
! inside the finalization, similar to what is done for the initialization
! Since the physics finalize step is independent of the blocking structure,
! we can use cdata_domain. And since we don't use threading on the host
! model side, we can allow threading inside the physics finalize routines.
GFS_control%nthreads = nthrds

! Fast physics are finalized in atmosphere_end, loop over
! all blocks and threads to finalize all other physics
do nt=1,nthrdsX
do nb=1,nblks
!--- Finalize CCPP physics
call ccpp_physics_finalize(cdata_block(nb,nt), suite_name=trim(ccpp_suite), ierr=ierr)
if (ierr/=0) then
write(0,'(a,i4,a,i4)') "An error occurred in ccpp_physics_finalize for block ", nb, " and thread ", nt
write(0,'(a)') trim(cdata_block(nb,nt)%errmsg)
return
end if
end do
end do
call ccpp_physics_finalize(cdata_domain, suite_name=trim(ccpp_suite), ierr=ierr)
if (ierr/=0) then
write(0,'(a)') "An error occurred in ccpp_physics_finalize"
write(0,'(a)') trim(cdata_domain%errmsg)
return
end if

! Finalize
else if (trim(step)=="finalize") then
! Deallocate cdata structure for blocks and threads
if (allocated(cdata_block)) deallocate(cdata_block)

Expand Down
2 changes: 1 addition & 1 deletion ccpp/framework