diff --git a/src/core/MOM.F90 b/src/core/MOM.F90 index caf9e11041..837714130f 100644 --- a/src/core/MOM.F90 +++ b/src/core/MOM.F90 @@ -337,7 +337,8 @@ module MOM use MOM_cpu_clock, only : CLOCK_COMPONENT, CLOCK_SUBCOMPONENT use MOM_cpu_clock, only : CLOCK_MODULE_DRIVER, CLOCK_MODULE, CLOCK_ROUTINE use MOM_coms, only : reproducing_sum -use MOM_diag_mediator, only : diag_mediator_init, enable_averaging, diag_set_thickness_ptr +use MOM_diag_mediator, only : diag_mediator_init, enable_averaging +use MOM_diag_mediator, only : diag_set_thickness_ptr, diag_update_target_grids use MOM_diag_mediator, only : disable_averaging, post_data, safe_alloc_ptr use MOM_diag_mediator, only : register_diag_field, register_static_field use MOM_diag_mediator, only : register_scalar_field @@ -747,6 +748,8 @@ subroutine step_MOM(fluxes, state, Time_start, time_interval, CS) showCallTree = callTree_showQuery() if (showCallTree) call callTree_enter("step_MOM(), MOM.F90") + print*, 'begining of step_mom: sum(h):', sum(CS%h) + ! First determine the time step that is consistent with this call. ! It is anticipated that the time step will almost always coincide ! with dt. In addition, ntstep is determined, subject to the constraint @@ -867,6 +870,8 @@ subroutine step_MOM(fluxes, state, Time_start, time_interval, CS) endif call cpu_clock_end(id_clock_other) + print*, 'before timestepping: sum(h):', sum(CS%h) + do n=1,n_max nt = nt + 1 @@ -905,10 +910,14 @@ subroutine step_MOM(fluxes, state, Time_start, time_interval, CS) ! DIABATIC_FIRST=True. Otherwise diabatic() is called after the dynamics ! and set_viscous_BBL is called as a part of the dynamic stepping. + print*, 'before set_viscous_BBL: sum(h):', sum(CS%h) + !call cpu_clock_begin(id_clock_vertvisc) call set_viscous_BBL(u, v, h, CS%tv, CS%visc, G, CS%set_visc_CSp) !call cpu_clock_end(id_clock_vertvisc) + print*, 'afterset_viscous_BBL: sum(h):', sum(CS%h) + call cpu_clock_begin(id_clock_pass) if(associated(CS%visc%Ray_u) .and. associated(CS%visc%Ray_v)) & call do_group_pass(CS%pass_ray, G%Domain ) @@ -931,12 +940,16 @@ subroutine step_MOM(fluxes, state, Time_start, time_interval, CS) call adjustments_dyn_legacy_split(u, v, h, dt, G, CS%dyn_legacy_split_CSp) endif + print*, 'before diabatic: sum(h):', sum(CS%h) + call cpu_clock_begin(id_clock_diabatic) call diabatic(u, v, h, CS%tv, fluxes, CS%visc, CS%ADp, CS%CDp, & dtdia, G, CS%diabatic_CSp) fluxes%fluxes_used = .true. call cpu_clock_end(id_clock_diabatic) + print*, 'after diabatic: sum(h):', sum(CS%h) + if (CS%id_u_preale > 0) call post_data(CS%id_u_preale, u, CS%diag) if (CS%id_v_preale > 0) call post_data(CS%id_v_preale, v, CS%diag) if (CS%id_h_preale > 0) call post_data(CS%id_h_preale, h, CS%diag) @@ -970,6 +983,10 @@ subroutine step_MOM(fluxes, state, Time_start, time_interval, CS) call do_group_pass(CS%pass_uv_T_S_h, G%Domain) call cpu_clock_end(id_clock_pass) + ! The diag mediator may need to re-generate target grids for remmapping when + ! total thickness changes. + call diag_update_target_grids(G, h, CS%diag) + if (CS%debug) then call uchksum(u,"Post-dia first u", G, haloshift=2) call vchksum(v,"Post-dia first v", G, haloshift=2) @@ -1032,6 +1049,9 @@ subroutine step_MOM(fluxes, state, Time_start, time_interval, CS) else dtth = dt*min(ntstep,n_max-n+1) endif + + print*, 'before thickness_diffuse: sum(h):', sum(CS%h) + call enable_averaging(dtth,Time_local+set_time(int(floor(dtth-dt+0.5))), CS%diag) call cpu_clock_begin(id_clock_thick_diff) if (associated(CS%VarMix)) call calc_slope_functions(h, CS%tv, dt, G, CS%VarMix) @@ -1043,6 +1063,12 @@ subroutine step_MOM(fluxes, state, Time_start, time_interval, CS) call cpu_clock_end(id_clock_pass) call disable_averaging(CS%diag) if (showCallTree) call callTree_waypoint("finished thickness_diffuse_first (step_MOM)") + + print*, 'after thickness_diffuse: sum(h):', sum(CS%h) + ! The diag mediator may need to re-generate target grids for remmapping when + ! total thickness changes. + call diag_update_target_grids(G, h, CS%diag) + endif endif @@ -1150,13 +1176,15 @@ subroutine step_MOM(fluxes, state, Time_start, time_interval, CS) endif endif + ! The diag mediator may need to re-generate target grids for remmapping when + ! total thickness changes. + call diag_update_target_grids(G, h, CS%diag) + if (CS%useMEKE) call step_forward_MEKE(CS%MEKE, h, CS%VarMix%SN_u, CS%VarMix%SN_v, & CS%visc, dt, G, CS%MEKE_CSp) - call disable_averaging(CS%diag) call cpu_clock_end(id_clock_dynamics) - CS%dt_trans = CS%dt_trans + dt if (thermo_does_span_coupling) then do_advection = (CS%dt_trans + 0.5*dt > dt_therm) @@ -1262,7 +1290,11 @@ subroutine step_MOM(fluxes, state, Time_start, time_interval, CS) call hchksum(CS%tv%S,"Post-ALE S", G, haloshift=1) call check_redundant("Post-ALE ", u, v, G) endif - endif + endif + + ! The diag mediator may need to re-generate target grids for remmapping when + ! total thickness changes. + call diag_update_target_grids(G, h, CS%diag) call cpu_clock_begin(id_clock_pass) call do_group_pass(CS%pass_uv_T_S_h, G%Domain) @@ -1920,10 +1952,6 @@ subroutine initialize_MOM(Time, param_file, dirs, CS, Time_in) endif call callTree_waypoint("state variables allocated (initialize_MOM)") - ! Set up a pointers h within diag mediator control structure, - ! this needs to occur _after_ CS%h has been allocated. - call diag_set_thickness_ptr(CS%h, diag) - ! Set the fields that are needed for bitwise identical restarting ! the time stepping scheme. call restart_init(G, param_file, CS%restart_CSp) @@ -1972,11 +2000,6 @@ subroutine initialize_MOM(Time, param_file, dirs, CS, Time_in) call cpu_clock_end(id_clock_MOM_init) call callTree_waypoint("returned from MOM_initialize_state() (initialize_MOM)") - ! Initialize the diagnostics mask arrays. - ! This step has to be done after call to MOM_initialize_state - ! and before MOM_diagnostics_init - call diag_masks_set(G, CS%missing, diag) - if (CS%use_ALE_algorithm) then ! For now, this has to follow immediately after MOM_initialize_state because ! the call to initialize_ALE can change CS%h, etc. initialize_ALE should @@ -2000,9 +2023,24 @@ subroutine initialize_MOM(Time, param_file, dirs, CS, Time_in) endif endif - ! This call sets up the diagnostic axes. - call cpu_clock_begin(id_clock_MOM_init) + ! Initialize the diagnostics mask arrays. + ! This step has to be done after call to MOM_initialize_state + ! and before MOM_diagnostics_init + call diag_masks_set(G, CS%missing, diag) + + ! Set up a pointers h within diag mediator control structure, + ! this needs to occur _after_ CS%h has been allocated. + call diag_set_thickness_ptr(CS%h, diag) + + ! This call sets up the diagnostic axes. These are needed, + ! e.g. to generate the target grids below. call set_axes_info(G, param_file, diag) + + ! The diag mediator may need to (re)generate target grids for remmapping when + ! total thickness changes. + call diag_update_target_grids(G, CS%h, diag) + + call cpu_clock_begin(id_clock_MOM_init) if (CS%use_ALE_algorithm) then call ALE_writeCoordinateFile( CS%ALE_CSp, G, dirs%output_directory ) endif @@ -2181,6 +2219,8 @@ subroutine initialize_MOM(Time, param_file, dirs, CS, Time_in) call callTree_leave("initialize_MOM()") call cpu_clock_end(id_clock_init) + print*, 'end of subroutine: sum(h):', sum(CS%h) + end subroutine initialize_MOM diff --git a/src/core/MOM_continuity.F90 b/src/core/MOM_continuity.F90 index b0556eaf06..7f68a33124 100644 --- a/src/core/MOM_continuity.F90 +++ b/src/core/MOM_continuity.F90 @@ -157,6 +157,10 @@ subroutine continuity(u, v, hin, h, uh, vh, dt, G, CS, uhbt, vhbt, OBC, & call MOM_error(FATAL, "continuity: Unrecognized value of continuity_scheme") endif + ! The diag mediator may need to re-generate target grids for remmapping when + ! total thickness changes. + call diag_update_target_grids(G, h, CS%diag) + end subroutine continuity subroutine continuity_init(Time, G, param_file, diag, CS) diff --git a/src/core/MOM_dynamics_split_RK2.F90 b/src/core/MOM_dynamics_split_RK2.F90 index 3c602a5612..8ac0cff4cf 100644 --- a/src/core/MOM_dynamics_split_RK2.F90 +++ b/src/core/MOM_dynamics_split_RK2.F90 @@ -78,7 +78,7 @@ module MOM_dynamics_split_RK2 use MOM_diag_mediator, only : diag_mediator_init, enable_averaging use MOM_diag_mediator, only : disable_averaging, post_data, safe_alloc_ptr use MOM_diag_mediator, only : register_diag_field, register_static_field -use MOM_diag_mediator, only : set_diag_mediator_grid, diag_ctrl +use MOM_diag_mediator, only : set_diag_mediator_grid, diag_ctrl, diag_update_target_grids use MOM_domains, only : MOM_domains_init use MOM_domains, only : To_South, To_West, To_All, CGRID_NE, SCALAR_PAIR use MOM_domains, only : create_group_pass, do_group_pass, group_pass_type @@ -701,6 +701,8 @@ subroutine step_MOM_dyn_split_RK2(u, v, h, tv, visc, & call cpu_clock_end(id_clock_continuity) if (showCallTree) call callTree_wayPoint("done with continuity (step_MOM_dyn_split_RK2)") + call diag_update_target_grids(G, h, CS%diag) + call cpu_clock_begin(id_clock_pass) call do_group_pass(CS%pass_hp_uv, G%Domain) if (G%nonblocking_updates) then @@ -908,6 +910,7 @@ subroutine step_MOM_dyn_split_RK2(u, v, h, tv, visc, & call do_group_pass(CS%pass_h, G%Domain) call cpu_clock_end(id_clock_pass) if (showCallTree) call callTree_wayPoint("done with continuity (step_MOM_dyn_split_RK2)") + call diag_update_target_grids(G, h, CS%diag) call cpu_clock_begin(id_clock_pass) if (G%nonblocking_updates) then @@ -1269,6 +1272,8 @@ subroutine initialize_dyn_split_RK2(u, v, h, uh, vh, eta, Time, G, param_file, & CS%h_av(:,:,:) = h(:,:,:) endif + call diag_update_target_grids(G, h, CS%diag) + call cpu_clock_begin(id_clock_pass_init) call create_group_pass(pass_av_h_uvh, CS%u_av,CS%v_av, G%Domain) call create_group_pass(pass_av_h_uvh, CS%h_av, G%Domain) diff --git a/src/framework/MOM_diag_mediator.F90 b/src/framework/MOM_diag_mediator.F90 index f8c64fc4a8..e9c499f07d 100644 --- a/src/framework/MOM_diag_mediator.F90 +++ b/src/framework/MOM_diag_mediator.F90 @@ -68,7 +68,7 @@ module MOM_diag_mediator public register_scalar_field public defineAxes, diag_masks_set public diag_set_thickness_ptr -public update_diag_target_grids +public diag_update_target_grids interface post_data module procedure post_data_3d, post_data_2d, post_data_0d @@ -622,7 +622,8 @@ subroutine post_data_3d(diag_field_id, field, diag_cs, is_static, mask) endif allocate(remapped_field(DIM_I(field),DIM_J(field), diag_cs%nz_remap)) - call update_diag_target_grids(diag_cs%G, diag_cs) + print*, 'post_data_3d: sum(diag_cs%h): ', sum(diag_cs%h) + !call diag_update_target_grids(diag_cs%G, diag_cs%h, diag_cs) call remap_diag_to_z(field, diag, diag_cs, remapped_field) if (associated(diag%mask3d)) then ! Since 3d masks do not vary in the vertical, just use as much as is @@ -643,7 +644,7 @@ end subroutine post_data_3d subroutine remap_diag_to_z(field, diag, diag_cs, remapped_field) ! Remap diagnostic field to z-star vertical grid. -! This uses grids generated by update_diag_target_grids() +! This uses grids generated by diag_update_target_grids() real, dimension(:,:,:), intent(in) :: field type(diag_type), intent(in) :: diag @@ -661,16 +662,16 @@ subroutine remap_diag_to_z(field, diag, diag_cs, remapped_field) integer :: nz_src, nz_dest integer :: i, j, k + call assert(diag_cs%remapping_initialized, & + 'remap_diag_to_z: Remmaping not initialized.') call assert(size(field, 3) == size(diag_cs%h, 3), & - 'Remap field and thickness z-axes do not match.') + 'remap_diag_to_z: Remap field and thickness z-axes do not match.') remapped_field = diag_cs%missing_value nz_src = size(field, 3) nz_dest = diag_cs%nz_remap if (is_u_axes(diag%remap_axes, diag_cs)) then - call assert(allocated(diag_cs%zi_u), & - 'remap_diag_to_z: Z grid on u points not made.') do j=diag_cs%G%jsc, diag_cs%G%jec do i=diag_cs%G%iscB, diag_cs%G%iecB if (associated(diag%mask3d)) then @@ -696,8 +697,6 @@ subroutine remap_diag_to_z(field, diag, diag_cs, remapped_field) enddo elseif (is_v_axes(diag%remap_axes, diag_cs)) then - call assert(allocated(diag_cs%zi_v), & - 'remap_diag_to_z: Z grid on v points not made.') do j=diag_cs%G%jscB, diag_cs%G%jecB do i=diag_cs%G%isc, diag_cs%G%iec if (associated(diag%mask3d)) then @@ -719,8 +718,6 @@ subroutine remap_diag_to_z(field, diag, diag_cs, remapped_field) enddo enddo else - call assert(allocated(diag_cs%zi_T), & - 'remap_diag_to_z: Z grid on T points not built.') do j=diag_cs%G%jsc, diag_cs%G%jec do i=diag_cs%G%isc, diag_cs%G%iec if (associated(diag%mask3d)) then @@ -743,29 +740,33 @@ subroutine remap_diag_to_z(field, diag, diag_cs, remapped_field) end subroutine remap_diag_to_z -subroutine update_diag_target_grids(G, diag_cs) +subroutine diag_update_target_grids(G, h, diag_cs) ! Build target grids for diagnostic remapping. ! ! The target grids only need to be made when sea surface ! height changes, not every time a diagnostic is posted/written. - type(ocean_grid_type), intent(in) :: G - type(diag_ctrl), intent(inout) :: diag_cs + type(ocean_grid_type), intent(in) :: G + real, dimension(:,:,:), intent(in) :: h + type(diag_ctrl), intent(inout) :: diag_cs ! Arguments: -! (inout) G - ocean grid structure. +! (in) G - ocean grid structure. +! (in) h - a pointer to model thickness ! (inout) diag_cs - structure used to regulate diagnostic output. - real, dimension(size(diag_cs%h, 3)) :: h_src + real, dimension(size(h, 3)) :: h_src real :: depth integer :: nz_src, nz_dest integer :: i, j, k + logical :: force nz_dest = diag_cs%nz_remap - nz_src = size(diag_cs%h, 3) + nz_src = size(h, 3) + + print*, 'diag_update_target_grids sum(h): ', sum(h) - if ((diag_cs%do_z_remapping_on_u .or. diag_cs%do_z_remapping_on_v .or. & - diag_cs%do_z_remapping_on_T) .and. (.not. diag_cs%remapping_initialized)) then + if (.not. diag_cs%remapping_initialized) then call assert(allocated(diag_cs%zi_remap), & 'update_diag_target_grids: Remapping axis not initialized') @@ -775,17 +776,17 @@ subroutine update_diag_target_grids(G, diag_cs) call setRegriddingMinimumThickness(G%Angstrom, diag_cs%regrid_cs) call setCoordinateResolution(diag_cs%zi_remap(2:) - & diag_cs%zi_remap(:nz_dest), diag_cs%regrid_cs) - diag_cs%remapping_initialized = .true. + + allocate(diag_cs%zi_u(G%iscB:G%iecB,G%jsc:G%jec,nz_dest+1)) + allocate(diag_cs%zi_v(G%isc:G%iec,G%jscB:G%jecB,nz_dest+1)) + allocate(diag_cs%zi_T(G%isc:G%iec,G%jsc:G%jec,nz_dest+1)) endif ! Build z-star grid on u points - if (diag_cs%do_z_remapping_on_u) then - if (.not. allocated(diag_cs%zi_u)) then - allocate(diag_cs%zi_u(G%iscB:G%iecB,G%jsc:G%jec,nz_dest+1)) - endif + if (diag_cs%do_z_remapping_on_u .or. (.not. diag_cs%remapping_initialized)) then do j=G%jsc, G%jec do i=G%iscB, G%iecB - h_src(:) = 0.5 * (diag_cs%h(i,j,:) + diag_cs%h(i+1,j,:)) + h_src(:) = 0.5 * (h(i,j,:) + h(i+1,j,:)) depth = 0.5 * (G%bathyT(i,j) + G%bathyT(i+1,j)) call buildGridZstarColumn(diag_cs%regrid_cs, nz_dest, depth, & sum(h_src(:)), diag_cs%zi_u(i, j, :)) @@ -795,14 +796,10 @@ subroutine update_diag_target_grids(G, diag_cs) endif ! Build z-star grid on u points - if (diag_cs%do_z_remapping_on_v) then - if (.not. allocated(diag_cs%zi_v)) then - allocate(diag_cs%zi_v(G%isc:G%iec,G%jscB:G%jecB,nz_dest+1)) - endif - + if (diag_cs%do_z_remapping_on_v .or. (.not. diag_cs%remapping_initialized)) then do j=G%jscB, G%jecB do i=G%isc, G%iec - h_src(:) = 0.5 * (diag_cs%h(i,j,:) + diag_cs%h(i,j+1,:)) + h_src(:) = 0.5 * (h(i,j,:) + h(i,j+1,:)) depth = 0.5 * (G%bathyT(i, j) + G%bathyT(i, j+1)) call buildGridZstarColumn(diag_cs%regrid_cs, nz_dest, depth, & sum(h_src(:)), diag_cs%zi_v(i, j, :)) @@ -812,22 +809,19 @@ subroutine update_diag_target_grids(G, diag_cs) endif ! Build z-star grid on T points - if (diag_cs%do_z_remapping_on_T) then - if (.not. allocated(diag_cs%zi_T)) then - allocate(diag_cs%zi_T(G%isc:G%iec,G%jsc:G%jec,nz_dest+1)) - endif - + if (diag_cs%do_z_remapping_on_T .or. (.not. diag_cs%remapping_initialized)) then do j=G%jsc, G%jec do i=G%isc, G%iec call buildGridZstarColumn(diag_cs%regrid_cs, nz_dest, G%bathyT(i, j), & - sum(diag_cs%h(i, j, :)), diag_cs%zi_T(i, j, :)) + sum(h(i, j, :)), diag_cs%zi_T(i, j, :)) diag_cs%zi_T(i, j, :) = -diag_cs%zi_T(i, j, :) enddo enddo endif -end subroutine update_diag_target_grids + diag_cs%remapping_initialized = .true. +end subroutine diag_update_target_grids subroutine post_data_3d_low(diag, field, diag_cs, is_static, mask) type(diag_type), intent(in) :: diag @@ -1478,6 +1472,7 @@ subroutine diag_mediator_init(G, param_file, diag_cs, err_msg) ! Keep a pointer to the grid, this is needed for regridding diag_cs%G => G + diag_cs%h => null() diag_cs%nz_remap = -1 diag_cs%do_z_remapping_on_u = .false. diag_cs%do_z_remapping_on_v = .false. diff --git a/src/parameterizations/lateral/MOM_thickness_diffuse.F90 b/src/parameterizations/lateral/MOM_thickness_diffuse.F90 index 6ac7db1e1e..f811040257 100644 --- a/src/parameterizations/lateral/MOM_thickness_diffuse.F90 +++ b/src/parameterizations/lateral/MOM_thickness_diffuse.F90 @@ -48,6 +48,7 @@ module MOM_thickness_diffuse use MOM_checksums, only : hchksum, uchksum, vchksum use MOM_diag_mediator, only : post_data, query_averaging_enabled, diag_ctrl use MOM_diag_mediator, only : register_diag_field, safe_alloc_ptr, time_type +use MOM_diag_mediator, only : diag_update_target_grids use MOM_error_handler, only : MOM_error, FATAL, WARNING use MOM_EOS, only : calculate_density, calculate_density_derivs use MOM_file_parser, only : get_param, log_version, param_file_type @@ -338,6 +339,10 @@ subroutine thickness_diffuse(h, uhtr, vhtr, tv, dt, G, MEKE, VarMix, CDp, CS) enddo ; enddo enddo + ! The diag mediator may need to re-generate target grids for remmapping when + ! total thickness changes. + call diag_update_target_grids(G, h, CS%diag) + if (MEKE_not_null .AND. ASSOCIATED(VarMix)) then if (ASSOCIATED(MEKE%Rd_dx_h) .and. ASSOCIATED(VarMix%Rd_dx_h)) then !$OMP parallel do default(none) shared(is,ie,js,je,MEKE,VarMix)