Skip to content

Commit

Permalink
Put diag Z grid updates all over the place. #62
Browse files Browse the repository at this point in the history
  • Loading branch information
nichannah committed Jul 16, 2015
1 parent 8bc1092 commit 218f75e
Show file tree
Hide file tree
Showing 5 changed files with 102 additions and 53 deletions.
70 changes: 55 additions & 15 deletions src/core/MOM.F90
Original file line number Diff line number Diff line change
Expand Up @@ -337,7 +337,8 @@ module MOM
use MOM_cpu_clock, only : CLOCK_COMPONENT, CLOCK_SUBCOMPONENT
use MOM_cpu_clock, only : CLOCK_MODULE_DRIVER, CLOCK_MODULE, CLOCK_ROUTINE
use MOM_coms, only : reproducing_sum
use MOM_diag_mediator, only : diag_mediator_init, enable_averaging, diag_set_thickness_ptr
use MOM_diag_mediator, only : diag_mediator_init, enable_averaging
use MOM_diag_mediator, only : diag_set_thickness_ptr, diag_update_target_grids
use MOM_diag_mediator, only : disable_averaging, post_data, safe_alloc_ptr
use MOM_diag_mediator, only : register_diag_field, register_static_field
use MOM_diag_mediator, only : register_scalar_field
Expand Down Expand Up @@ -747,6 +748,8 @@ subroutine step_MOM(fluxes, state, Time_start, time_interval, CS)
showCallTree = callTree_showQuery()
if (showCallTree) call callTree_enter("step_MOM(), MOM.F90")

print*, 'begining of step_mom: sum(h):', sum(CS%h)

! First determine the time step that is consistent with this call.
! It is anticipated that the time step will almost always coincide
! with dt. In addition, ntstep is determined, subject to the constraint
Expand Down Expand Up @@ -867,6 +870,8 @@ subroutine step_MOM(fluxes, state, Time_start, time_interval, CS)
endif
call cpu_clock_end(id_clock_other)

print*, 'before timestepping: sum(h):', sum(CS%h)

do n=1,n_max

nt = nt + 1
Expand Down Expand Up @@ -905,10 +910,14 @@ subroutine step_MOM(fluxes, state, Time_start, time_interval, CS)
! DIABATIC_FIRST=True. Otherwise diabatic() is called after the dynamics
! and set_viscous_BBL is called as a part of the dynamic stepping.

print*, 'before set_viscous_BBL: sum(h):', sum(CS%h)

!call cpu_clock_begin(id_clock_vertvisc)
call set_viscous_BBL(u, v, h, CS%tv, CS%visc, G, CS%set_visc_CSp)
!call cpu_clock_end(id_clock_vertvisc)

print*, 'afterset_viscous_BBL: sum(h):', sum(CS%h)

call cpu_clock_begin(id_clock_pass)
if(associated(CS%visc%Ray_u) .and. associated(CS%visc%Ray_v)) &
call do_group_pass(CS%pass_ray, G%Domain )
Expand All @@ -931,12 +940,16 @@ subroutine step_MOM(fluxes, state, Time_start, time_interval, CS)
call adjustments_dyn_legacy_split(u, v, h, dt, G, CS%dyn_legacy_split_CSp)
endif

print*, 'before diabatic: sum(h):', sum(CS%h)

call cpu_clock_begin(id_clock_diabatic)
call diabatic(u, v, h, CS%tv, fluxes, CS%visc, CS%ADp, CS%CDp, &
dtdia, G, CS%diabatic_CSp)
fluxes%fluxes_used = .true.
call cpu_clock_end(id_clock_diabatic)

print*, 'after diabatic: sum(h):', sum(CS%h)

if (CS%id_u_preale > 0) call post_data(CS%id_u_preale, u, CS%diag)
if (CS%id_v_preale > 0) call post_data(CS%id_v_preale, v, CS%diag)
if (CS%id_h_preale > 0) call post_data(CS%id_h_preale, h, CS%diag)
Expand Down Expand Up @@ -970,6 +983,10 @@ subroutine step_MOM(fluxes, state, Time_start, time_interval, CS)
call do_group_pass(CS%pass_uv_T_S_h, G%Domain)
call cpu_clock_end(id_clock_pass)

! The diag mediator may need to re-generate target grids for remmapping when
! total thickness changes.
call diag_update_target_grids(G, h, CS%diag)

if (CS%debug) then
call uchksum(u,"Post-dia first u", G, haloshift=2)
call vchksum(v,"Post-dia first v", G, haloshift=2)
Expand Down Expand Up @@ -1032,6 +1049,9 @@ subroutine step_MOM(fluxes, state, Time_start, time_interval, CS)
else
dtth = dt*min(ntstep,n_max-n+1)
endif

print*, 'before thickness_diffuse: sum(h):', sum(CS%h)

call enable_averaging(dtth,Time_local+set_time(int(floor(dtth-dt+0.5))), CS%diag)
call cpu_clock_begin(id_clock_thick_diff)
if (associated(CS%VarMix)) call calc_slope_functions(h, CS%tv, dt, G, CS%VarMix)
Expand All @@ -1043,6 +1063,12 @@ subroutine step_MOM(fluxes, state, Time_start, time_interval, CS)
call cpu_clock_end(id_clock_pass)
call disable_averaging(CS%diag)
if (showCallTree) call callTree_waypoint("finished thickness_diffuse_first (step_MOM)")

print*, 'after thickness_diffuse: sum(h):', sum(CS%h)
! The diag mediator may need to re-generate target grids for remmapping when
! total thickness changes.
call diag_update_target_grids(G, h, CS%diag)

endif
endif

Expand Down Expand Up @@ -1150,13 +1176,15 @@ subroutine step_MOM(fluxes, state, Time_start, time_interval, CS)
endif
endif

! The diag mediator may need to re-generate target grids for remmapping when
! total thickness changes.
call diag_update_target_grids(G, h, CS%diag)

if (CS%useMEKE) call step_forward_MEKE(CS%MEKE, h, CS%VarMix%SN_u, CS%VarMix%SN_v, &
CS%visc, dt, G, CS%MEKE_CSp)

call disable_averaging(CS%diag)
call cpu_clock_end(id_clock_dynamics)


CS%dt_trans = CS%dt_trans + dt
if (thermo_does_span_coupling) then
do_advection = (CS%dt_trans + 0.5*dt > dt_therm)
Expand Down Expand Up @@ -1262,7 +1290,11 @@ subroutine step_MOM(fluxes, state, Time_start, time_interval, CS)
call hchksum(CS%tv%S,"Post-ALE S", G, haloshift=1)
call check_redundant("Post-ALE ", u, v, G)
endif
endif
endif

! The diag mediator may need to re-generate target grids for remmapping when
! total thickness changes.
call diag_update_target_grids(G, h, CS%diag)

call cpu_clock_begin(id_clock_pass)
call do_group_pass(CS%pass_uv_T_S_h, G%Domain)
Expand Down Expand Up @@ -1920,10 +1952,6 @@ subroutine initialize_MOM(Time, param_file, dirs, CS, Time_in)
endif
call callTree_waypoint("state variables allocated (initialize_MOM)")

! Set up a pointers h within diag mediator control structure,
! this needs to occur _after_ CS%h has been allocated.
call diag_set_thickness_ptr(CS%h, diag)

! Set the fields that are needed for bitwise identical restarting
! the time stepping scheme.
call restart_init(G, param_file, CS%restart_CSp)
Expand Down Expand Up @@ -1972,11 +2000,6 @@ subroutine initialize_MOM(Time, param_file, dirs, CS, Time_in)
call cpu_clock_end(id_clock_MOM_init)
call callTree_waypoint("returned from MOM_initialize_state() (initialize_MOM)")

! Initialize the diagnostics mask arrays.
! This step has to be done after call to MOM_initialize_state
! and before MOM_diagnostics_init
call diag_masks_set(G, CS%missing, diag)

if (CS%use_ALE_algorithm) then
! For now, this has to follow immediately after MOM_initialize_state because
! the call to initialize_ALE can change CS%h, etc. initialize_ALE should
Expand All @@ -2000,9 +2023,24 @@ subroutine initialize_MOM(Time, param_file, dirs, CS, Time_in)
endif
endif

! This call sets up the diagnostic axes.
call cpu_clock_begin(id_clock_MOM_init)
! Initialize the diagnostics mask arrays.
! This step has to be done after call to MOM_initialize_state
! and before MOM_diagnostics_init
call diag_masks_set(G, CS%missing, diag)

! Set up a pointers h within diag mediator control structure,
! this needs to occur _after_ CS%h has been allocated.
call diag_set_thickness_ptr(CS%h, diag)

! This call sets up the diagnostic axes. These are needed,
! e.g. to generate the target grids below.
call set_axes_info(G, param_file, diag)

! The diag mediator may need to (re)generate target grids for remmapping when
! total thickness changes.
call diag_update_target_grids(G, CS%h, diag)

call cpu_clock_begin(id_clock_MOM_init)
if (CS%use_ALE_algorithm) then
call ALE_writeCoordinateFile( CS%ALE_CSp, G, dirs%output_directory )
endif
Expand Down Expand Up @@ -2181,6 +2219,8 @@ subroutine initialize_MOM(Time, param_file, dirs, CS, Time_in)
call callTree_leave("initialize_MOM()")
call cpu_clock_end(id_clock_init)

print*, 'end of subroutine: sum(h):', sum(CS%h)

end subroutine initialize_MOM


Expand Down
4 changes: 4 additions & 0 deletions src/core/MOM_continuity.F90
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,10 @@ subroutine continuity(u, v, hin, h, uh, vh, dt, G, CS, uhbt, vhbt, OBC, &
call MOM_error(FATAL, "continuity: Unrecognized value of continuity_scheme")
endif

! The diag mediator may need to re-generate target grids for remmapping when
! total thickness changes.
call diag_update_target_grids(G, h, CS%diag)

end subroutine continuity

subroutine continuity_init(Time, G, param_file, diag, CS)
Expand Down
7 changes: 6 additions & 1 deletion src/core/MOM_dynamics_split_RK2.F90
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ module MOM_dynamics_split_RK2
use MOM_diag_mediator, only : diag_mediator_init, enable_averaging
use MOM_diag_mediator, only : disable_averaging, post_data, safe_alloc_ptr
use MOM_diag_mediator, only : register_diag_field, register_static_field
use MOM_diag_mediator, only : set_diag_mediator_grid, diag_ctrl
use MOM_diag_mediator, only : set_diag_mediator_grid, diag_ctrl, diag_update_target_grids
use MOM_domains, only : MOM_domains_init
use MOM_domains, only : To_South, To_West, To_All, CGRID_NE, SCALAR_PAIR
use MOM_domains, only : create_group_pass, do_group_pass, group_pass_type
Expand Down Expand Up @@ -701,6 +701,8 @@ subroutine step_MOM_dyn_split_RK2(u, v, h, tv, visc, &
call cpu_clock_end(id_clock_continuity)
if (showCallTree) call callTree_wayPoint("done with continuity (step_MOM_dyn_split_RK2)")

call diag_update_target_grids(G, h, CS%diag)

call cpu_clock_begin(id_clock_pass)
call do_group_pass(CS%pass_hp_uv, G%Domain)
if (G%nonblocking_updates) then
Expand Down Expand Up @@ -908,6 +910,7 @@ subroutine step_MOM_dyn_split_RK2(u, v, h, tv, visc, &
call do_group_pass(CS%pass_h, G%Domain)
call cpu_clock_end(id_clock_pass)
if (showCallTree) call callTree_wayPoint("done with continuity (step_MOM_dyn_split_RK2)")
call diag_update_target_grids(G, h, CS%diag)

call cpu_clock_begin(id_clock_pass)
if (G%nonblocking_updates) then
Expand Down Expand Up @@ -1269,6 +1272,8 @@ subroutine initialize_dyn_split_RK2(u, v, h, uh, vh, eta, Time, G, param_file, &
CS%h_av(:,:,:) = h(:,:,:)
endif

call diag_update_target_grids(G, h, CS%diag)

call cpu_clock_begin(id_clock_pass_init)
call create_group_pass(pass_av_h_uvh, CS%u_av,CS%v_av, G%Domain)
call create_group_pass(pass_av_h_uvh, CS%h_av, G%Domain)
Expand Down
Loading

0 comments on commit 218f75e

Please sign in to comment.