diff --git a/Externals.cfg b/Externals.cfg index 371d8a1ecd..24f921c578 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -56,13 +56,6 @@ repo_url = https://github.com/NOAA-EMC/GDASApp.git protocol = git required = False -[GLDAS] -tag = fd8ba62 -local_path = sorc/gldas.fd -repo_url = https://github.com/NOAA-EMC/GLDAS.git -protocol = git -required = False - [EMC-gfs_wafs] hash = 014a0b8 local_path = sorc/gfs_wafs.fd diff --git a/docs/source/components.rst b/docs/source/components.rst index 3ebd575a82..9e4377f739 100644 --- a/docs/source/components.rst +++ b/docs/source/components.rst @@ -1,5 +1,5 @@ ########################### -Global Workflow Components +Global Workflow Components ########################### The global-workflow is a combination of several components working together to prepare, analyze, produce, and post-process forecast data. @@ -13,7 +13,7 @@ The major components of the system are: * Post-processing * Verification -The Global Workflow repository contains the workflow and script layers. After running the checkout script, the code and additional offline scripts for the analysis, forecast, and post-processing components will be present. Any non-workflow component is known as a sub-module. All of the sub-modules of the system reside in their respective repositories on GitHub. The global-workflow sub-modules are obtained by running the checkout script found under the /sorc folder. +The Global Workflow repository contains the workflow and script layers. After running the checkout script, the code and additional offline scripts for the analysis, forecast, and post-processing components will be present. Any non-workflow component is known as a sub-module. All of the sub-modules of the system reside in their respective repositories on GitHub. The global-workflow sub-modules are obtained by running the checkout script found under the /sorc folder. ====================== Component repositories @@ -21,12 +21,11 @@ Component repositories Components checked out via sorc/checkout.sh: -* **GFS UTILS** (https://github.com/ufs-community/gfs_utils): Utility codes needed by Global Workflow to run the GFS configuration -* **UFS-Weather-Model** (https://github.com/ufs-community/ufs-weather-model): This is the core model used by the Global-Workflow to provide forecasts. The UFS-weather-model repository is an umbrella repository consisting of cooupled component earth systeme that are all checked out when we check out the code at the top level of the repoitory +* **GFS UTILS** (https://github.com/ufs-community/gfs_utils): Utility codes needed by Global Workflow to run the GFS configuration +* **UFS-Weather-Model** (https://github.com/ufs-community/ufs-weather-model): This is the core model used by the Global-Workflow to provide forecasts. The UFS-weather-model repository is an umbrella repository consisting of cooupled component earth systeme that are all checked out when we check out the code at the top level of the repoitory * **GSI** (https://github.com/NOAA-EMC/GSI): This is the core code base for atmospheric Data Assimilation -* **GSI UTILS** (https://github.com/NOAA-EMC/GSI-Utils): Utility codes needed by GSI to create analysis -* **GSI Monitor** (https://github.com/NOAA-EMC/GSI-Monitor): These tools monitor the GSI package's data assimilation, detecting and reporting missing data sources, low observation counts, and high penalty values -* **GLDAS** (https://github.com/NOAA-EMC/GLDAS): Code base for Land Data Assimiation +* **GSI UTILS** (https://github.com/NOAA-EMC/GSI-Utils): Utility codes needed by GSI to create analysis +* **GSI Monitor** (https://github.com/NOAA-EMC/GSI-Monitor): These tools monitor the GSI package's data assimilation, detecting and reporting missing data sources, low observation counts, and high penalty values * **GDAS** (https://github.com/NOAA-EMC/GDASApp): Jedi based Data Assimilation system. This system is currently being developed for marine Data Assimilation and in time will replace GSI for atmospheric data assimilation as well * **UFS UTILS** (https://github.com/ufs-community/UFS_UTILS): Utility codes needed for UFS-weather-model * **Verif global** (https://github.com/NOAA-EMC/EMC_verif-global): Verification package to evaluate GFS parallels. It uses MET and METplus. At this moment the verification package is limited to providing atmospheric metrics only @@ -43,7 +42,7 @@ External dependencies Libraries ^^^^^^^^^ -All the libraries that are needed to run the end to end Global Workflow are built using a package manager. Currently these are served via HPC-STACK but will soon be available via SPACK-STACK. These libraries are already available on supported NOAA HPC platforms +All the libraries that are needed to run the end to end Global Workflow are built using a package manager. Currently these are served via HPC-STACK but will soon be available via SPACK-STACK. These libraries are already available on supported NOAA HPC platforms Find information on official installations of HPC-STACK here: diff --git a/docs/source/configure.rst b/docs/source/configure.rst index 4b1409fcdd..477e95cec7 100644 --- a/docs/source/configure.rst +++ b/docs/source/configure.rst @@ -10,7 +10,7 @@ The global-workflow configs contain switches that change how the system runs. Ma | APP | Model application | ATM | YES | See case block in config.base for options | +----------------+------------------------------+---------------+-------------+---------------------------------------------------+ | DOIAU | Enable 4DIAU for control | YES | NO | Turned off for cold-start first half cycle | -| | with 3 increments | | | | +| | with 3 increments | | | | +----------------+------------------------------+---------------+-------------+---------------------------------------------------+ | DOHYBVAR | Run EnKF | YES | YES | Don't recommend turning off | +----------------+------------------------------+---------------+-------------+---------------------------------------------------+ @@ -26,9 +26,6 @@ The global-workflow configs contain switches that change how the system runs. Ma | DO_GEMPAK | Run job to produce GEMPAK | NO | YES | downstream processing, ops only | | | products | | | | +----------------+------------------------------+---------------+-------------+---------------------------------------------------+ -| DO_GLDAS | Run GLDAS to spin up land | YES | YES | Spins up for 84hrs if sflux files not available | -| | ICs | | | | -+----------------+------------------------------+---------------+-------------+---------------------------------------------------+ | DO_VRFY | Run vrfy job | NO | YES | Whether to include vrfy job (GSI monitoring, | | | | | | tracker, VSDB, fit2obs) | +----------------+------------------------------+---------------+-------------+---------------------------------------------------+ diff --git a/docs/source/jobs.rst b/docs/source/jobs.rst index ae7e1cd68a..67863bb9a2 100644 --- a/docs/source/jobs.rst +++ b/docs/source/jobs.rst @@ -3,7 +3,7 @@ GFS Configuration ################# .. figure:: _static/GFS_v16_flowchart.png - + Schematic flow chart for GFS v16 in operations The sequence of jobs that are run for an end-to-end (analysis+forecast+post processing+verification) GFS configuration using the Global Workflow is shown above. The system utilizes a collection of scripts that perform the tasks for each step. @@ -12,7 +12,7 @@ For any cycle the system consists of two suites -- the "gdas" suite which provid An experimental run is different from operations in the following ways: -* Workflow manager: operations utilizes `ecFlow `__, while development currently utilizes `ROCOTO `__. Note, experiments can also be run using ecFlow on platforms with ecFlow servers established. +* Workflow manager: operations utilizes `ecFlow `__, while development currently utilizes `ROCOTO `__. Note, experiments can also be run using ecFlow on platforms with ecFlow servers established. * Dump step is not run as it has already been completed during the real-time production runs and dump data is available in the global dump archive on supported machines. @@ -25,7 +25,7 @@ An experimental run is different from operations in the following ways: Downstream jobs (e.g. awips, gempak, etc.) are not included in the diagram. Those jobs are not normally run in developmental tests. ============================= -Jobs in the GFS Configuration +Jobs in the GFS Configuration ============================= +-------------------+-----------------------------------------------------------------------------------------------------------------------+ | JOB NAME | PURPOSE | @@ -65,8 +65,6 @@ Jobs in the GFS Configuration +-------------------+-----------------------------------------------------------------------------------------------------------------------+ | fcst | Runs the forecast (with or without one-way waves). | +-------------------+-----------------------------------------------------------------------------------------------------------------------+ -| gldas | Runs the Global Land Data Assimilation System (GLDAS). | -+-------------------+-----------------------------------------------------------------------------------------------------------------------+ | metpN | Runs MET/METplus verification via EMC_verif-global. | +-------------------+-----------------------------------------------------------------------------------------------------------------------+ | prep | Runs the data preprocessing prior to the analysis (storm relocation if needed and generation of prepbufr file). | diff --git a/ecf/defs/gfs_00.def b/ecf/defs/gfs_00.def index b564c6e260..2ff0a785a7 100644 --- a/ecf/defs/gfs_00.def +++ b/ecf/defs/gfs_00.def @@ -2224,10 +2224,6 @@ trigger /prod/primary/00/obsproc/v1.0/gdas/atmos/dump/jobsproc_gdas_atmos_dump:release_sfcprep endfamily endfamily - family init - task jgdas_atmos_gldas - trigger ../analysis/jgdas_atmos_analysis == complete - endfamily family analysis task jgdas_atmos_analysis trigger /prod/primary/00/obsproc/v1.0/gdas/atmos/prep/jobsproc_gdas_atmos_prep == complete and ../obsproc/prep/jgdas_atmos_emcsfc_sfc_prep == complete @@ -2354,7 +2350,7 @@ endfamily endfamily task jgdas_forecast - trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete and ./atmos/init/jgdas_atmos_gldas == complete + trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete endfamily family enkfgdas edit RUN 'gdas' diff --git a/ecf/defs/gfs_06.def b/ecf/defs/gfs_06.def index 29b896d769..4524d28374 100644 --- a/ecf/defs/gfs_06.def +++ b/ecf/defs/gfs_06.def @@ -2224,10 +2224,6 @@ trigger /prod/primary/06/obsproc/v1.0/gdas/atmos/dump/jobsproc_gdas_atmos_dump:release_sfcprep endfamily endfamily - family init - task jgdas_atmos_gldas - trigger ../analysis/jgdas_atmos_analysis == complete - endfamily family analysis task jgdas_atmos_analysis trigger /prod/primary/06/obsproc/v1.0/gdas/atmos/prep/jobsproc_gdas_atmos_prep == complete and ../obsproc/prep/jgdas_atmos_emcsfc_sfc_prep == complete @@ -2354,7 +2350,7 @@ endfamily endfamily task jgdas_forecast - trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete and ./atmos/init/jgdas_atmos_gldas == complete + trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete endfamily family enkfgdas edit RUN 'gdas' diff --git a/ecf/defs/gfs_12.def b/ecf/defs/gfs_12.def index f04e1f79a6..6e366bffcd 100644 --- a/ecf/defs/gfs_12.def +++ b/ecf/defs/gfs_12.def @@ -2225,10 +2225,6 @@ trigger /prod/primary/12/obsproc/v1.0/gdas/atmos/dump/jobsproc_gdas_atmos_dump:release_sfcprep endfamily endfamily - family init - task jgdas_atmos_gldas - trigger ../analysis/jgdas_atmos_analysis == complete - endfamily family analysis task jgdas_atmos_analysis trigger /prod/primary/12/obsproc/v1.0/gdas/atmos/prep/jobsproc_gdas_atmos_prep == complete and ../obsproc/prep/jgdas_atmos_emcsfc_sfc_prep == complete @@ -2355,7 +2351,7 @@ endfamily endfamily task jgdas_forecast - trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete and ./atmos/init/jgdas_atmos_gldas == complete + trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete endfamily family enkfgdas edit RUN 'gdas' diff --git a/ecf/defs/gfs_18.def b/ecf/defs/gfs_18.def index cefe301707..0a8c52cf7e 100644 --- a/ecf/defs/gfs_18.def +++ b/ecf/defs/gfs_18.def @@ -2224,10 +2224,6 @@ trigger /prod/primary/18/obsproc/v1.0/gdas/atmos/dump/jobsproc_gdas_atmos_dump:release_sfcprep endfamily endfamily - family init - task jgdas_atmos_gldas - trigger ../analysis/jgdas_atmos_analysis == complete - endfamily family analysis task jgdas_atmos_analysis trigger /prod/primary/18/obsproc/v1.0/gdas/atmos/prep/jobsproc_gdas_atmos_prep == complete and ../obsproc/prep/jgdas_atmos_emcsfc_sfc_prep == complete @@ -2354,7 +2350,7 @@ endfamily endfamily task jgdas_forecast - trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete and ./atmos/init/jgdas_atmos_gldas == complete + trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete endfamily family enkfgdas edit RUN 'gdas' diff --git a/ecf/defs/prod00.def b/ecf/defs/prod00.def index 20e391faee..f36fee8ed0 100644 --- a/ecf/defs/prod00.def +++ b/ecf/defs/prod00.def @@ -2259,10 +2259,6 @@ suite prod00 trigger ../dump/jgdas_atmos_dump:release_sfcprep endfamily endfamily - family init - task jgdas_atmos_gldas - trigger ../analysis/jgdas_atmos_analysis == complete - endfamily family analysis task jgdas_atmos_analysis trigger ../obsproc/prep/jgdas_atmos_prep == complete and ../obsproc/prep/jgdas_atmos_emcsfc_sfc_prep == complete @@ -2389,7 +2385,7 @@ suite prod00 endfamily endfamily task jgdas_forecast - trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete and ./atmos/init/jgdas_atmos_gldas == complete + trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete endfamily family enkfgdas edit CYC '00' diff --git a/ecf/defs/prod06.def b/ecf/defs/prod06.def index 1d8a767a9e..9ba8b46332 100644 --- a/ecf/defs/prod06.def +++ b/ecf/defs/prod06.def @@ -2262,10 +2262,6 @@ suite prod06 trigger ../dump/jgdas_atmos_dump:release_sfcprep endfamily endfamily - family init - task jgdas_atmos_gldas - trigger ../analysis/jgdas_atmos_analysis == complete - endfamily family analysis task jgdas_atmos_analysis trigger ../obsproc/prep/jgdas_atmos_prep == complete and ../obsproc/prep/jgdas_atmos_emcsfc_sfc_prep == complete @@ -2392,7 +2388,7 @@ suite prod06 endfamily endfamily task jgdas_forecast - trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete and ./atmos/init/jgdas_atmos_gldas == complete + trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete endfamily family enkfgdas edit CYC '06' diff --git a/ecf/defs/prod12.def b/ecf/defs/prod12.def index 3d44ed917d..1c058e2640 100644 --- a/ecf/defs/prod12.def +++ b/ecf/defs/prod12.def @@ -2259,10 +2259,6 @@ suite prod12 trigger ../dump/jgdas_atmos_dump:release_sfcprep endfamily endfamily - family init - task jgdas_atmos_gldas - trigger ../analysis/jgdas_atmos_analysis == complete - endfamily family analysis task jgdas_atmos_analysis trigger ../obsproc/prep/jgdas_atmos_prep == complete and ../obsproc/prep/jgdas_atmos_emcsfc_sfc_prep == complete @@ -2389,7 +2385,7 @@ suite prod12 endfamily endfamily task jgdas_forecast - trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete and ./atmos/init/jgdas_atmos_gldas == complete + trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete endfamily family enkfgdas edit CYC '12' diff --git a/ecf/defs/prod18.def b/ecf/defs/prod18.def index ba0b951044..a7f715b016 100644 --- a/ecf/defs/prod18.def +++ b/ecf/defs/prod18.def @@ -2259,10 +2259,6 @@ suite prod18 trigger ../dump/jgdas_atmos_dump:release_sfcprep endfamily endfamily - family init - task jgdas_atmos_gldas - trigger ../analysis/jgdas_atmos_analysis == complete - endfamily family analysis task jgdas_atmos_analysis trigger ../obsproc/prep/jgdas_atmos_prep == complete and ../obsproc/prep/jgdas_atmos_emcsfc_sfc_prep == complete @@ -2389,7 +2385,7 @@ suite prod18 endfamily endfamily task jgdas_forecast - trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete and ./atmos/init/jgdas_atmos_gldas == complete + trigger ./atmos/analysis/jgdas_atmos_analysis:release_fcst and ./wave/prep/jgdas_wave_prep == complete endfamily family enkfgdas edit CYC '18' diff --git a/ecf/scripts/gdas/atmos/init/jgdas_atmos_gldas.ecf b/ecf/scripts/gdas/atmos/init/jgdas_atmos_gldas.ecf deleted file mode 100755 index 0834533051..0000000000 --- a/ecf/scripts/gdas/atmos/init/jgdas_atmos_gldas.ecf +++ /dev/null @@ -1,69 +0,0 @@ -#PBS -S /bin/bash -#PBS -N %RUN%_atmos_gldas_%CYC% -#PBS -j oe -#PBS -q %QUEUE% -#PBS -A %PROJ%-%PROJENVIR% -#PBS -l walltime=00:20:00 -#PBS -l select=1:mpiprocs=112:ompthreads=1:ncpus=112 -#PBS -l place=vscatter:exclhost -#PBS -l debug=true - -model=gfs -%include -%include - -set -x - -export NET=%NET:gfs% -export RUN=%RUN% -export CDUMP=%RUN% - -############################################################ -# Load modules -############################################################ -module load PrgEnv-intel/${PrgEnv_intel_ver} -module load craype/${craype_ver} -module load intel/${intel_ver} -module load cray-mpich/${cray_mpich_ver} -module load cray-pals/${cray_pals_ver} -module load cfp/${cfp_ver} -module load libjpeg/${libjpeg_ver} -module load hdf5/${hdf5_ver} -module load netcdf/${netcdf_ver} -module load grib_util/${grib_util_ver} -module load wgrib2/${wgrib2_ver} - -module list - -############################################################# -# environment settings -############################################################# -export cyc=%CYC% -export cycle=t%CYC%z -export USE_CFP=YES - -############################################################ -# CALL executable job script here -############################################################ -${HOMEgfs}/jobs/JGDAS_ATMOS_GLDAS -if [ $? -ne 0 ]; then - ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" - ecflow_client --abort - exit -fi - -%include -%manual -###################################################################### -# Purpose: To execute the job that prepares initial condition for -# gdas. -###################################################################### - -###################################################################### -# Job specific troubleshooting instructions: -# see generic troubleshoot manual page -# -###################################################################### - -# include manual page below -%end diff --git a/env/HERA.env b/env/HERA.env index 4c11ad3fc3..9e35b4e38e 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -150,27 +150,6 @@ elif [[ "${step}" = "sfcanl" ]]; then npe_sfcanl=${ntiles:-6} export APRUN_CYCLE="${launcher} -n ${npe_sfcanl}" -elif [[ "${step}" = "gldas" ]]; then - - export USE_CFP="NO" - export CFP_MP="YES" - - nth_max=$((npe_node_max / npe_node_gldas)) - - export NTHREADS_GLDAS=${nth_gldas:-${nth_max}} - [[ ${NTHREADS_GLDAS} -gt ${nth_max} ]] && export NTHREADS_GLDAS=${nth_max} - export APRUN_GLDAS="${launcher} -n ${npe_gldas}" - - export NTHREADS_GAUSSIAN=${nth_gaussian:-1} - [[ ${NTHREADS_GAUSSIAN} -gt ${nth_max} ]] && export NTHREADS_GAUSSIAN=${nth_max} - export APRUN_GAUSSIAN="${launcher} -n ${npe_gaussian}" - -# Must run data processing with exactly the number of tasks as time -# periods being processed. - - npe_gldas_data_proc=$((gldas_spinup_hours + 12)) - export APRUN_GLDAS_DATA_PROC="${launcher} -n ${npe_gldas_data_proc} ${mpmd_opt}" - elif [[ "${step}" = "eobs" ]]; then export MKL_NUM_THREADS=4 diff --git a/env/JET.env b/env/JET.env index 8c296d315a..a74828915c 100755 --- a/env/JET.env +++ b/env/JET.env @@ -137,10 +137,6 @@ elif [[ "${step}" = "sfcanl" ]]; then npe_sfcanl=${ntiles:-6} export APRUN_CYCLE="${launcher} -n ${npe_sfcanl}" -elif [[ "${step}" = "gldas" ]]; then - - echo "WARNING: ${step} is not enabled on ${machine}!" - elif [[ "${step}" = "eobs" ]]; then export MKL_NUM_THREADS=4 diff --git a/env/ORION.env b/env/ORION.env index 332b1ca42b..17d08e0658 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -149,26 +149,6 @@ elif [[ "${step}" = "sfcanl" ]]; then npe_sfcanl=${ntiles:-6} export APRUN_CYCLE="${launcher} -n ${npe_sfcanl}" -elif [[ "${step}" = "gldas" ]]; then - - export USE_CFP="NO" - - nth_max=$((npe_node_max / npe_node_gldas)) - - export NTHREADS_GLDAS=${nth_gldas:-${nth_max}} - [[ ${NTHREADS_GLDAS} -gt ${nth_max} ]] && export NTHREADS_GLDAS=${nth_max} - export APRUN_GLDAS="${launcher} -n ${npe_gldas}" - - export NTHREADS_GAUSSIAN=${nth_gaussian:-1} - [[ ${NTHREADS_GAUSSIAN} -gt ${nth_max} ]] && export NTHREADS_GAUSSIAN=${nth_max} - export APRUN_GAUSSIAN="${launcher} -n ${npe_gaussian}" - -# Must run data processing with exactly the number of tasks as time -# periods being processed. - - npe_gldas_data_proc=$((gldas_spinup_hours + 12)) - export APRUN_GLDAS_DATA_PROC="${launcher} -n ${npe_gldas_data_proc} ${mpmd_opt}" - elif [[ "${step}" = "eobs" ]]; then export MKL_NUM_THREADS=4 diff --git a/env/S4.env b/env/S4.env index 68f4e0c468..c2f82630d6 100755 --- a/env/S4.env +++ b/env/S4.env @@ -123,27 +123,6 @@ elif [[ "${step}" = "sfcanl" ]]; then npe_sfcanl=${ntiles:-6} export APRUN_CYCLE="${launcher} -n ${npe_sfcanl}" -elif [[ "${step}" = "gldas" ]]; then - - export USE_CFP="NO" - export CFP_MP="YES" - - nth_max=$((npe_node_max / npe_node_gldas)) - - export NTHREADS_GLDAS=${nth_gldas:-${nth_max}} - [[ ${NTHREADS_GLDAS} -gt ${nth_max} ]] && export NTHREADS_GLDAS=${nth_max} - export APRUN_GLDAS="${launcher} -n ${npe_gldas}" - - export NTHREADS_GAUSSIAN=${nth_gaussian:-1} - [[ ${NTHREADS_GAUSSIAN} -gt ${nth_max} ]] && export NTHREADS_GAUSSIAN=${nth_max} - export APRUN_GAUSSIAN="${launcher} -n ${npe_gaussian}" - -# Must run data processing with exactly the number of tasks as time -# periods being processed. - - npe_gldas_data_proc=$((gldas_spinup_hours + 12)) - export APRUN_GLDAS_DATA_PROC="${launcher} -n ${npe_gldas_data_proc} ${mpmd_opt}" - elif [[ "${step}" = "eobs" ]]; then export MKL_NUM_THREADS=4 diff --git a/env/WCOSS2.env b/env/WCOSS2.env index d69f031016..f609ea0249 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -115,27 +115,6 @@ elif [[ "${step}" = "sfcanl" ]]; then npe_sfcanl=${ntiles:-6} export APRUN_CYCLE="${launcher} -n ${npe_sfcanl}" -elif [[ "${step}" = "gldas" ]]; then - - export USE_CFP="YES" - export CFP_MP="NO" - - nth_max=$((npe_node_max / npe_node_gldas)) - - export NTHREADS_GLDAS=${nth_gldas:-${nth_max}} - [[ ${NTHREADS_GLDAS} -gt ${nth_max} ]] && export NTHREADS_GLDAS=${nth_max} - export APRUN_GLDAS="${launcher} -n ${npe_gldas} -ppn ${npe_node_gldas} --cpu-bind depth --depth ${NTHREADS_GLDAS}" - - export NTHREADS_GAUSSIAN=${nth_gaussian:-1} - [[ ${NTHREADS_GAUSSIAN} -gt ${nth_max} ]] && export NTHREADS_GAUSSIAN=${nth_max} - export APRUN_GAUSSIAN="${launcher} -n ${npe_gaussian} -ppn ${npe_node_gaussian} --cpu-bind depth --depth ${NTHREADS_GAUSSIAN}" - - # Must run data processing with exactly the number of tasks as time - # periods being processed. - export USE_CFP=${USE_CFP:-"YES"} - npe_gldas_data_proc=$((gldas_spinup_hours + 12)) - export APRUN_GLDAS_DATA_PROC="${launcher} -np ${npe_gldas_data_proc} ${mpmd_opt}" - elif [[ "${step}" = "eobs" ]]; then export OMP_PLACES=cores diff --git a/jobs/JGDAS_ATMOS_GLDAS b/jobs/JGDAS_ATMOS_GLDAS deleted file mode 100755 index dee6b4c9e3..0000000000 --- a/jobs/JGDAS_ATMOS_GLDAS +++ /dev/null @@ -1,85 +0,0 @@ -#! /usr/bin/env bash - -source "${HOMEgfs:?}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "gldas" -c "base gldas" - -if [[ "${cyc:?}" -ne "${gldas_cyc:?}" ]]; then - echo "GLDAS only runs for ${gldas_cyc} cycle; Skip GLDAS step for cycle ${cyc}" - rm -Rf "${DATA}" - exit 0 -fi - -gldas_spinup_hours=${gldas_spinup_hours-:72} -xtime=$((gldas_spinup_hours+12)) -if [[ "${CDATE}" -le "$(${NDATE:?} +"${xtime}" "${SDATE:?}")" ]]; then - echo "GLDAS needs fluxes as forcing from cycles in previous ${xtime} hours" - echo "starting from ${SDATE}. This gldas cycle is skipped" - rm -Rf "${DATA}" - exit 0 -fi - - -############################################## -# Set variables used in the exglobal script -############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gdas"}} -export COMPONENT="atmos" - - -############################################## -# Begin JOB SPECIFIC work -############################################## -export gldas_ver=${gldas_ver:-v2.3.0} -export HOMEgldas=${HOMEgldas:-${HOMEgfs}} -export FIXgldas=${FIXgldas:-${HOMEgldas}/fix/gldas} -export PARMgldas=${PARMgldas:-${HOMEgldas}/parm/gldas} -export EXECgldas=${EXECgldas:-${HOMEgldas}/exec} -export USHgldas=${USHgldas:-${HOMEgldas}/ush} -export PARA_CONFIG=${HOMEgfs}/parm/config/config.gldas - -if [[ "${RUN_ENVIR}" = "nco" ]]; then - export COMIN=${COMIN:-${ROTDIR}/${RUN}.${PDY}/${cyc}/atmos} - export COMOUT=${COMOUT:-${ROTDIR}/${RUN}.${PDY}/${cyc}/atmos} -else - export COMIN="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos" - export COMOUT="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos" -fi -if [[ ! -d ${COMOUT} ]]; then - mkdir -p "${COMOUT}" - chmod 775 "${COMOUT}" -fi - -export COMINgdas=${COMINgdas:-${ROTDIR}} -export DCOMIN=${DCOMIN:-${DCOMROOT:-"/lfs/h1/ops/prod/dcom"}} - -export model=${model:-noah} -export MODEL=${MODEL:-"${model} |tr 'a-z' 'A-Z'"} - - -############################################################### -# Run relevant exglobal script - -${GLDASSH:-${HOMEgldas}/scripts/exgdas_atmos_gldas.sh} -status=$? -[[ ${status} -ne 0 ]] && exit "${status}" - -############################################## -# End JOB SPECIFIC work -############################################## - -############################################## -# Final processing -############################################## -if [[ -e "${pgmout}" ]] ; then - cat "${pgmout}" -fi - -########################################## -# Remove the Temporary working directory -########################################## -cd "${DATAROOT}" || exit 1 -[[ ${KEEPDATA:?} = "NO" ]] && rm -rf "${DATA}" - -exit 0 - diff --git a/jobs/rocoto/gldas.sh b/jobs/rocoto/gldas.sh deleted file mode 100755 index 8d8bb903bb..0000000000 --- a/jobs/rocoto/gldas.sh +++ /dev/null @@ -1,21 +0,0 @@ -#! /usr/bin/env bash - -source "$HOMEgfs/ush/preamble.sh" - -############################################################### -# Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh -status=$? -[[ $status -ne 0 ]] && exit $status - -export job="gldas" -export jobid="${job}.$$" - -############################################################### -# Execute the JJOB. GLDAS only runs once per day. - -$HOMEgfs/jobs/JGDAS_ATMOS_GLDAS -status=$? - - -exit $status diff --git a/parm/config/gefs/config.base.emc.dyn b/parm/config/gefs/config.base.emc.dyn index 8ebcafe6cf..4a4101a156 100644 --- a/parm/config/gefs/config.base.emc.dyn +++ b/parm/config/gefs/config.base.emc.dyn @@ -230,7 +230,7 @@ fi # Output frequency of the forecast model (for cycling) export FHMIN=0 export FHMAX=9 -export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) # Cycle to run EnKF (set to BOTH for both gfs and gdas) export EUPD_CYC="gdas" @@ -278,15 +278,6 @@ export IAU_DELTHRS_ENKF=6 # Use Jacobians in eupd and thereby remove need to run eomg export lobsdiag_forenkf=".true." -# run GLDAS to spin up land ICs -export DO_GLDAS="NO" -export gldas_cyc=00 - -# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 -if [[ ${DO_GLDAS} = "YES" ]]; then - export FHOUT=1 -fi - # if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA # export DO_WAVE="NO" # echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" diff --git a/parm/config/gefs/config.resources b/parm/config/gefs/config.resources index 35a713e939..9dd5c6e737 100644 --- a/parm/config/gefs/config.resources +++ b/parm/config/gefs/config.resources @@ -13,7 +13,7 @@ if [[ $# -ne 1 ]]; then echo "atmensanlinit atmensanlrun atmensanlfinal" echo "landanlprep landanlinit landanlrun landanlfinal" echo "aeroanlinit aeroanlrun aeroanlfinal" - echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" echo "eobs ediag eomg eupd ecen esfc efcs epos earc" echo "init_chem mom6ic ocnpost" echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" @@ -496,19 +496,6 @@ elif [[ ${step} = "sfcanl" ]]; then export npe_node_sfcanl export is_exclusive=True -elif [[ ${step} = "gldas" ]]; then - - export wtime_gldas="00:10:00" - export npe_gldas=112 - export nth_gldas=1 - npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) - export npe_node_gldas - export npe_gaussian=96 - export nth_gaussian=1 - npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) - export npe_node_gaussian - export is_exclusive=True - elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then export is_exclusive=True diff --git a/parm/config/gfs/config.arch b/parm/config/gfs/config.arch index c705e0b7ed..6a0f6306a8 100644 --- a/parm/config/gfs/config.arch +++ b/parm/config/gfs/config.arch @@ -6,7 +6,7 @@ echo "BEGIN: config.arch" # Get task specific resources -. $EXPDIR/config.resources arch +. "${EXPDIR}/config.resources" arch export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} @@ -15,15 +15,10 @@ export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} #--online archive of nemsio files for fit2obs verification export FITSARC="YES" export FHMAX_FITS=132 -[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} #--starting and ending hours of previous cycles to be removed from rotating directory export RMOLDSTD=144 export RMOLDEND=24 -#--keep forcing data for running gldas step -if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then - [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 -fi - echo "END: config.arch" diff --git a/parm/config/gfs/config.base.emc.dyn b/parm/config/gfs/config.base.emc.dyn index 863df89170..8d085d0deb 100644 --- a/parm/config/gfs/config.base.emc.dyn +++ b/parm/config/gfs/config.base.emc.dyn @@ -230,7 +230,7 @@ fi # Output frequency of the forecast model (for cycling) export FHMIN=0 export FHMAX=9 -export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) # Cycle to run EnKF (set to BOTH for both gfs and gdas) export EUPD_CYC="gdas" @@ -278,15 +278,6 @@ export IAU_DELTHRS_ENKF=6 # Use Jacobians in eupd and thereby remove need to run eomg export lobsdiag_forenkf=".true." -# run GLDAS to spin up land ICs -export DO_GLDAS="NO" -export gldas_cyc=00 - -# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 -if [[ ${DO_GLDAS} = "YES" ]]; then - export FHOUT=1 -fi - # if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA # export DO_WAVE="NO" # echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" diff --git a/parm/config/gfs/config.base.nco.static b/parm/config/gfs/config.base.nco.static index a2620e5ab1..7d726cc62e 100644 --- a/parm/config/gfs/config.base.nco.static +++ b/parm/config/gfs/config.base.nco.static @@ -183,10 +183,6 @@ export IAU_DELTHRS_ENKF=6 # Use Jacobians in eupd and thereby remove need to run eomg export lobsdiag_forenkf=".true." -# run GLDAS to spin up land ICs -export DO_GLDAS=YES -export gldas_cyc=00 - # run wave component export DO_WAVE=YES export WAVE_CDUMP="both" diff --git a/parm/config/gfs/config.gldas b/parm/config/gfs/config.gldas deleted file mode 100644 index c51829d9fc..0000000000 --- a/parm/config/gfs/config.gldas +++ /dev/null @@ -1,16 +0,0 @@ -#! /usr/bin/env bash - -########## config.gldas ########## -# GDAS gldas step specific - -echo "BEGIN: config.gldas" - -# Get task specific resources -. $EXPDIR/config.resources gldas - -export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh -export gldas_spinup_hours=72 -export CPCGAUGE=$DMPDIR -export FINDDATE=$USHgfs/finddate.sh - -echo "END: config.gldas" diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 3eb9c8535e..5a6c734b98 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -13,7 +13,7 @@ if [[ $# -ne 1 ]]; then echo "atmensanlinit atmensanlrun atmensanlfinal" echo "landanlprep landanlinit landanlrun landanlfinal" echo "aeroanlinit aeroanlrun aeroanlfinal" - echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" echo "eobs ediag eomg eupd ecen esfc efcs epos earc" echo "init_chem mom6ic ocnpost" echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" @@ -496,19 +496,6 @@ elif [[ ${step} = "sfcanl" ]]; then export npe_node_sfcanl export is_exclusive=True -elif [[ ${step} = "gldas" ]]; then - - export wtime_gldas="00:10:00" - export npe_gldas=112 - export nth_gldas=1 - npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) - export npe_node_gldas - export npe_gaussian=96 - export nth_gaussian=1 - npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) - export npe_node_gaussian - export is_exclusive=True - elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then export is_exclusive=True diff --git a/parm/config/gfs/config.resources.nco.static b/parm/config/gfs/config.resources.nco.static index e6cd2ef73e..d98e985b95 100644 --- a/parm/config/gfs/config.resources.nco.static +++ b/parm/config/gfs/config.resources.nco.static @@ -8,7 +8,7 @@ if [ $# -ne 1 ]; then echo "Must specify an input task argument to set resource variables!" echo "argument can be any one of the following:" - echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "anal analcalc analdiag fcst post vrfy metp arch echgres" echo "eobs ediag eomg eupd ecen esfc efcs epos earc" echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" echo "wavegempak waveawipsbulls waveawipsgridded" @@ -144,16 +144,6 @@ elif [ $step = "analdiag" ]; then export npe_node_analdiag=$npe_analdiag export memory_analdiag="48GB" -elif [ $step = "gldas" ]; then - - export wtime_gldas="00:10:00" - export npe_gldas=112 - export nth_gldas=1 - export npe_node_gldas=$npe_gldas - export npe_gaussian=96 - export nth_gaussian=1 - export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) - elif [ $step = "fcst" ]; then export wtime_fcst="01:30:00" diff --git a/scripts/exgdas_atmos_gldas.sh b/scripts/exgdas_atmos_gldas.sh deleted file mode 100755 index ba56e323aa..0000000000 --- a/scripts/exgdas_atmos_gldas.sh +++ /dev/null @@ -1,332 +0,0 @@ -#! /usr/bin/env bash - -################################################################################ -#### UNIX Script Documentation Block -# . . -# Script name: exgdas_atmos_gldas.sh -# Script description: Runs the global land analysis -# -################################################################################ - -source "${HOMEgfs:?}/ush/preamble.sh" - -################################# -# Set up UTILITIES -################################# -export FINDDATE=${FINDDATE:-/apps/ops/prod/nco/core/prod_util.v2.0.13/ush/finddate.sh} -export utilexec=${utilexec:-/apps/ops/prod/libs/intel/19.1.3.304/grib_util/1.2.3/bin} -export CNVGRIB=${CNVGRIB:-${utilexec}/cnvgrib} -export WGRIB=${WGRIB:-${utilexec}/wgrib} -export WGRIB2=${WGRIB2:-/apps/ops/prod/libs/intel/19.1.3.304/wgrib2/2.0.7/bin/wgrib2} -export COPYGB=${COPYGB:-${utilexec}/copygb} -export NDATE=${NDATE:-/apps/ops/prod/nco/core/prod_util.v2.0.13/exec/ndate} -export DCOMIN=${DCOMIN:-${DCOMROOT:-"/lfs/h1/ops/prod/dcom"}} -export CPCGAUGE=${CPCGAUGE:-/lfs/h2/emc/global/noscrub/emc.global/dump} -export COMINgdas=${COMINgdas:-${ROTDIR}} -export OFFLINE_GLDAS=${OFFLINE_GLDAS:-"NO"} -export ERRSCRIPT=${ERRSCRIPT:-"eval [[ ${err} = 0 ]]"} - - -################################# -# Set up the running environment -################################# -export USE_CFP=${USE_CFP:-"NO"} -export assim_freq=${assim_freq:-6} -export gldas_spinup_hours=${gldas_spinup_hours:-72} - -# Local date variables -gldas_cdate=${CDATE:?} -gldas_eymd=$(echo "${gldas_cdate}" |cut -c 1-8) -gldas_ecyc=$(echo "${gldas_cdate}" |cut -c 9-10) -gldas_sdate=$(${NDATE} -"${gldas_spinup_hours}" "${CDATE}") -gldas_symd=$(echo "${gldas_sdate}" |cut -c 1-8) -gldas_scyc=$(echo "${gldas_sdate}" |cut -c 9-10) - -iau_cdate=${CDATE} -if [[ "${DOIAU:?}" = "YES" ]]; then - IAU_OFFSET=${IAU_OFFSET:-0} - IAUHALH=$((IAU_OFFSET/2)) - iau_cdate=$(${NDATE} -"${IAUHALH}" "${CDATE}") -fi -iau_eymd=$(echo "${iau_cdate}" |cut -c 1-8) -iau_ecyc=$(echo "${iau_cdate}" |cut -c 9-10) -echo "GLDAS runs from ${gldas_sdate} to ${iau_cdate}" - -CASE=${CASE:-C768} -res=$(echo "${CASE}" |cut -c2-5) -JCAP=$((2*res-2)) -nlat=$((2*res)) -nlon=$((4*res)) - -export USHgldas=${USHgldas:?} -export FIXgldas=${FIXgldas:-${HOMEgfs}/fix/gldas} -export topodir=${topodir:-${HOMEgfs}/fix/orog/${CASE}} - -DATA=${DATA:-${pwd}/gldastmp$$} -mkdata=NO -if [[ ! -d "${DATA}" ]]; then - mkdata=YES - mkdir -p "${DATA}" -fi -cd "${DATA}" || exit 1 -export RUNDIR=${DATA} - - -################################# -GDAS=${RUNDIR}/force -mkdir -p "${GDAS}" - -input1=${COMINgdas}/gdas.${gldas_symd}/${gldas_scyc}/atmos/RESTART -input2=${COMINgdas}/gdas.${gldas_eymd}/${gldas_ecyc}/atmos/RESTART -[[ -d ${RUNDIR} ]] && rm -fr "${RUNDIR}/FIX" -[[ -f ${RUNDIR}/LIS ]] && rm -fr "${RUNDIR}/LIS" -[[ -d ${RUNDIR}/input ]] && rm -fr "${RUNDIR}/input" -mkdir -p "${RUNDIR}/input" -ln -fs "${GDAS}" "${RUNDIR}/input/GDAS" -ln -fs "${EXECgldas:?}/gldas_model" "${RUNDIR}/LIS" - -# Set FIXgldas subfolder -ln -fs "${FIXgldas}/frac_grid/FIX_T${JCAP}" "${RUNDIR}/FIX" - -#--------------------------------------------------------------- -### 1) Get gdas 6-tile netcdf restart file and gdas forcing data -#--------------------------------------------------------------- - -"${USHgldas}/gldas_get_data.sh" "${gldas_sdate}" "${gldas_cdate}" -export err=$? -${ERRSCRIPT} || exit 2 - -#--------------------------------------------------------------- -### 2) Get CPC daily precip and temporally disaggreated -#--------------------------------------------------------------- - -"${USHgldas}/gldas_forcing.sh" "${gldas_symd}" "${gldas_eymd}" -export err=$? -${ERRSCRIPT} || exit 3 - -# spatially disaggregated - -if [[ "${JCAP}" -eq 1534 ]]; then - gds='255 4 3072 1536 89909 0 128 -89909 -117 117 768 0 0 0 0 0 0 0 0 0 255 0 0 0 0 0' -elif [[ "${JCAP}" -eq 766 ]]; then - gds='255 4 1536 768 89821 0 128 -89821 -234 234 384 0 0 0 0 0 0 0 0 0 255 0 0 0 0 0' -elif [[ "${JCAP}" -eq 382 ]]; then - gds='255 4 768 384 89641 0 128 -89641 -469 469 192 0 0 0 0 0 0 0 0 0 255 0 0 0 0 0' -elif [[ "${JCAP}" -eq 190 ]]; then - gds='255 4 384 192 89284 0 128 -89284 -938 938 96 0 0 0 0 0 0 0 0 0 255 0 0 0 0 0' -else - echo "JCAP=${JCAP} not supported, exit" - export err=4 - ${ERRSCRIPT} || exit 4 -fi - -echo "${JCAP}" -echo "${gds}" -ymdpre=$(sh "${FINDDATE}" "${gldas_symd}" d-1) -ymdend=$(sh "${FINDDATE}" "${gldas_eymd}" d-2) -ymd=${ymdpre} - -if [[ "${USE_CFP}" = "YES" ]] ; then - rm -f ./cfile - touch ./cfile -fi - -while [[ "${ymd}" -le "${ymdend}" ]]; do - if [[ "${ymd}" -ne "${ymdpre}" ]]; then - if [[ "${USE_CFP}" = "YES" ]] ; then - echo "${COPYGB} -i3 '-g${gds}' -x ${GDAS}/cpc.${ymd}/precip.gldas.${ymd}00 ${RUNDIR}/cmap.gdas.${ymd}00" >> ./cfile - echo "${COPYGB} -i3 '-g${gds}' -x ${GDAS}/cpc.${ymd}/precip.gldas.${ymd}06 ${RUNDIR}/cmap.gdas.${ymd}06" >> ./cfile - else - ${COPYGB} -i3 -g"${gds}" -x "${GDAS}/cpc.${ymd}/precip.gldas.${ymd}00" "${RUNDIR}/cmap.gdas.${ymd}00" - ${COPYGB} -i3 -g"${gds}" -x "${GDAS}/cpc.${ymd}/precip.gldas.${ymd}06" "${RUNDIR}/cmap.gdas.${ymd}06" - fi - fi - if [[ "${ymd}" -ne "${ymdend}" ]]; then - if [[ "${USE_CFP}" = "YES" ]] ; then - echo "${COPYGB} -i3 '-g${gds}' -x ${GDAS}/cpc.${ymd}/precip.gldas.${ymd}12 ${RUNDIR}/cmap.gdas.${ymd}12" >> ./cfile - echo "${COPYGB} -i3 '-g${gds}' -x ${GDAS}/cpc.${ymd}/precip.gldas.${ymd}18 ${RUNDIR}/cmap.gdas.${ymd}18" >> ./cfile - else - ${COPYGB} -i3 -g"${gds}" -x "${GDAS}/cpc.${ymd}/precip.gldas.${ymd}12" "${RUNDIR}/cmap.gdas.${ymd}12" - ${COPYGB} -i3 -g"${gds}" -x "${GDAS}/cpc.${ymd}/precip.gldas.${ymd}18" "${RUNDIR}/cmap.gdas.${ymd}18" - fi - fi - ymd=$(sh "${FINDDATE}" "${ymd}" d+1) -done - -if [[ "${USE_CFP}" = "YES" ]] ; then - ${APRUN_GLDAS_DATA_PROC:?} ./cfile -fi - -# create configure file -"${USHgldas}/gldas_liscrd.sh" "${gldas_sdate}" "${iau_cdate}" "${JCAP}" -export err=$? -${ERRSCRIPT} || exit 4 - - -#--------------------------------------------------------------- -### 3) Produce initials noah.rst from 6-tile gdas restart files -#--------------------------------------------------------------- -rm -f fort.41 fort.141 fort.11 fort.12 - -# 3a) create gdas2gldas input file - -cat >> fort.141 << EOF - &config - data_dir_input_grid="${input1}" - sfc_files_input_grid="${gldas_symd}.${gldas_scyc}0000.sfcanl_data.tile1.nc","${gldas_symd}.${gldas_scyc}0000.sfcanl_data.tile2.nc","${gldas_symd}.${gldas_scyc}0000.sfcanl_data.tile3.nc","${gldas_symd}.${gldas_scyc}0000.sfcanl_data.tile4.nc","${gldas_symd}.${gldas_scyc}0000.sfcanl_data.tile5.nc","${gldas_symd}.${gldas_scyc}0000.sfcanl_data.tile6.nc" - mosaic_file_input_grid="${CASE}_mosaic.nc" - orog_dir_input_grid="${topodir}/" - orog_files_input_grid="${CASE}_oro_data.tile1.nc","${CASE}_oro_data.tile2.nc","${CASE}_oro_data.tile3.nc","${CASE}_oro_data.tile4.nc","${CASE}_oro_data.tile5.nc","${CASE}_oro_data.tile6.nc" - i_target=${nlon} - j_target=${nlat} - model="${model:?}" - / -EOF -cp fort.141 fort.41 - - -# 3b) Use gdas2gldas to generate nemsio file - -export OMP_NUM_THREADS=1 -export pgm=gdas2gldas -# shellcheck disable=SC1091 -. prep_step -# shellcheck disable= -${APRUN_GAUSSIAN:?} "${EXECgldas}/gdas2gldas" 1>&1 2>&2 -export err=$? -${ERRSCRIPT} || exit 5 - - -# 3c)gldas_rst to generate noah.rst - -sfcanl=sfc.gaussian.nemsio -ln -fs "FIX/lmask_gfs_T${JCAP}.bfsa" fort.11 -ln -fs "${sfcanl}" fort.12 -export pgm=gldas_rst -# shellcheck disable=SC1091 -. prep_step -# shellcheck disable= -"${EXECgldas}/gldas_rst" 1>&1 2>&2 -export err=$? -${ERRSCRIPT} || exit 6 - -mv "${sfcanl}" "${sfcanl}.${gldas_symd}" - - -#--------------------------------------------------------------- -### 4) run noah/noahmp model -#--------------------------------------------------------------- -export pgm=LIS -# shellcheck disable=SC1091 -. prep_step -# shellcheck disable= -${APRUN_GLDAS:?} ./LIS 1>&1 2>&2 -export err=$? -${ERRSCRIPT} || exit 7 - - -#--------------------------------------------------------------- -### 5) using gdas2gldas to generate nemsio file for gldas_eymd -### use gldas_post to replace soil moisture and temperature -### use gldas2gdas to produce 6-tile restart file -#--------------------------------------------------------------- -rm -f fort.41 fort.241 fort.42 - -# 5a) create input file for gdas2gldas - -cat >> fort.241 << EOF - &config - data_dir_input_grid="${input2}" - sfc_files_input_grid="${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile1.nc","${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile2.nc","${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile3.nc","${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile4.nc","${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile5.nc","${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile6.nc" - mosaic_file_input_grid="${CASE}_mosaic.nc" - orog_dir_input_grid="${topodir}/" - orog_files_input_grid="${CASE}_oro_data.tile1.nc","${CASE}_oro_data.tile2.nc","${CASE}_oro_data.tile3.nc","${CASE}_oro_data.tile4.nc","${CASE}_oro_data.tile5.nc","${CASE}_oro_data.tile6.nc" - i_target=${nlon} - j_target=${nlat} - model="${model:?}" - / -EOF -cp fort.241 fort.41 - -# 5b) use gdas2gldas to produce nemsio file - -export OMP_NUM_THREADS=1 -export pgm=gdas2gldas -# shellcheck disable=SC1091 -. prep_step -# shellcheck disable= -${APRUN_GAUSSIAN} "${EXECgldas}/gdas2gldas" 1>&1 2>&2 -export err=$? -${ERRSCRIPT} || exit 8 - - -# 5c) use gldas_post to replace soil moisture and temperature - -yyyy=$(echo "${iau_eymd}" | cut -c1-4) -gbin=${RUNDIR}/EXP901/NOAH/${yyyy}/${iau_eymd}/LIS.E901.${iau_eymd}${iau_ecyc}.NOAHgbin -sfcanl=sfc.gaussian.nemsio -rm -rf fort.11 fort.12 -ln -fs "${gbin}" fort.11 -ln -fs "${sfcanl}" fort.12 - -export pgm=gldas_post -# shellcheck disable=SC1091 -. prep_step -# shellcheck disable= -"${EXECgldas}/gldas_post" 1>&1 2>&2 -export err=$? -${ERRSCRIPT} || exit 9 - -cp fort.22 ./gldas.nemsio -mv fort.22 "${sfcanl}.gldas" - - -# 5d) use gldas2gdas to create 6-tile restart tiles - -cat >> fort.42 << EOF - &config - orog_dir_gdas_grid="${topodir}/" - mosaic_file_gdas_grid="${CASE}_mosaic.nc" - / -EOF - -# copy/link gdas netcdf tiles -k=1; while [[ "${k}" -le 6 ]]; do - cp "${input2}/${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile${k}.nc" "./sfc_data.tile${k}.nc" - k=$((k+1)) -done - -# copy soil type -ln -fs "FIX/stype_gfs_T${JCAP}.bfsa" "stype_gfs_T${JCAP}.bfsa" - -export OMP_NUM_THREADS=1 -export pgm=gldas2gdas -# shellcheck disable=SC1091 -. prep_step -# shellcheck disable= -${APRUN_GAUSSIAN} "${EXECgldas}/gldas2gdas" 1>&1 2>&2 -export err=$? -${ERRSCRIPT} || exit 10 - - -# 5e) archive gldas results - -if [[ "${OFFLINE_GLDAS}" = "YES" ]]; then - "${USHgldas}/gldas_archive.sh" "${gldas_symd}" "${gldas_eymd}" - export err=$? - ${ERRSCRIPT} || exit 11 -else - k=1; while [[ "${k}" -le 6 ]]; do - mv "${input2}/${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile${k}.nc" "${input2}/${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile${k}.nc_bfgldas" - cp "sfc_data.tile${k}.nc" "${input2}/${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile${k}.nc" - k=$((k+1)) - done -fi - - -#------------------------------------------------------------------ -# Clean up before leaving -if [[ "${mkdata}" = "YES" ]]; then rm -rf "${DATA}"; fi - -exit "${err}" - diff --git a/scripts/exglobal_archive.sh b/scripts/exglobal_archive.sh index 2204799067..730563e256 100755 --- a/scripts/exglobal_archive.sh +++ b/scripts/exglobal_archive.sh @@ -306,15 +306,12 @@ if [[ "${DELETE_COM_IN_ARCHIVE_JOB:-YES}" == NO ]] ; then fi # Step back every assim_freq hours and remove old rotating directories -# for successful cycles (defaults from 24h to 120h). If GLDAS is -# active, retain files needed by GLDAS update. Independent of GLDAS, -# retain files needed by Fit2Obs +# for successful cycles (defaults from 24h to 120h). +# Retain files needed by Fit2Obs # TODO: This whole section needs to be revamped to remove marine component # directories and not look at the rocoto log. -DO_GLDAS=${DO_GLDAS:-"NO"} GDATEEND=$(${NDATE} -"${RMOLDEND:-24}" "${PDY}${cyc}") GDATE=$(${NDATE} -"${RMOLDSTD:-120}" "${PDY}${cyc}") -GLDAS_DATE=$(${NDATE} -96 "${PDY}${cyc}") RTOFS_DATE=$(${NDATE} -48 "${PDY}${cyc}") function remove_files() { # TODO: move this to a new location @@ -372,9 +369,6 @@ while [ "${GDATE}" -le "${GDATEEND}" ]; do # Atmos exclude_list="cnvstat atmanl.nc" - if [[ ${DO_GLDAS} == "YES" ]] && [[ ${RUN} =~ "gdas" ]] && [[ "${GDATE}" -ge "${GLDAS_DATE}" ]]; then - exclude_list="${exclude_list} sflux sfcanl" - fi templates=$(compgen -A variable | grep 'COM_ATMOS_.*_TMPL') for template in ${templates}; do YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" diff --git a/sorc/build_all.sh b/sorc/build_all.sh index af15be7b1f..eaba2485d3 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -90,11 +90,6 @@ fi source ./partial_build.sh ${_verbose_opt} ${_partial_opt} # shellcheck disable= -# Disable gldas on Jet -if [[ ${MACHINE_ID} =~ jet.* ]]; then - Build_gldas="false" -fi - #------------------------------------ # Exception Handling Init #------------------------------------ @@ -264,26 +259,6 @@ if [[ ${Build_ufs_utils} == 'true' ]]; then err=$((err + rc)) fi -#------------------------------------ -# build gldas -#------------------------------------ -if [[ -d gldas.fd ]]; then - if [[ ${Build_gldas} == 'true' ]]; then - echo " .... Building gldas .... " - # shellcheck disable=SC2086,SC2248 - ./build_gldas.sh ${_verbose_opt} > "${logs_dir}/build_gldas.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building gldas." - echo "The log file is in ${logs_dir}/build_gldas.log" - fi - err=$((err + rc)) - fi -else - echo " .... Skip building gldas .... " -fi - #------------------------------------ # build gfs_wafs - optional checkout #------------------------------------ diff --git a/sorc/build_gldas.sh b/sorc/build_gldas.sh deleted file mode 100755 index 05963b9348..0000000000 --- a/sorc/build_gldas.sh +++ /dev/null @@ -1,16 +0,0 @@ -#! /usr/bin/env bash -set -eux - -script_dir=$(dirname "${BASH_SOURCE[0]}") -cd "${script_dir}" || exit 1 - -# Check final exec folder exists -if [[ ! -d "../exec" ]]; then - mkdir ../exec -fi - -cd gldas.fd/sorc -./build_all_gldas.sh - -exit - diff --git a/sorc/checkout.sh b/sorc/checkout.sh index 2dd4929694..f7476ac49a 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -171,7 +171,6 @@ fi if [[ ${checkout_gsi} == "YES" || ${checkout_gdas} == "YES" ]]; then checkout "gsi_utils.fd" "https://github.com/NOAA-EMC/GSI-Utils.git" "322cc7b"; errs=$((errs + $?)) checkout "gsi_monitor.fd" "https://github.com/NOAA-EMC/GSI-Monitor.git" "45783e3"; errs=$((errs + $?)) - checkout "gldas.fd" "https://github.com/NOAA-EMC/GLDAS.git" "fd8ba62"; errs=$((errs + $?)) fi if [[ ${checkout_wafs} == "YES" ]]; then diff --git a/sorc/gfs_build.cfg b/sorc/gfs_build.cfg index c56db1f71f..d789d5ec51 100644 --- a/sorc/gfs_build.cfg +++ b/sorc/gfs_build.cfg @@ -7,7 +7,6 @@ Building gsi_utils (gsi_utils) ........................ yes Building gsi_monitor (gsi_monitor) .................... yes Building gdas (gdas) .................................. yes - Building gldas (gldas) ................................ yes Building UPP (upp) .................................... yes Building ufs_utils (ufs_utils) ........................ yes Building gfs_wafs (gfs_wafs) .......................... yes diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index f783726dec..0ca0ba3415 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -90,7 +90,6 @@ for dir in aer \ cice \ cpl \ datm \ - gldas \ gsi \ lut \ mom6 \ @@ -119,11 +118,6 @@ fi #--------------------------------------- #--add files from external repositories #--------------------------------------- -cd "${top_dir}/parm" || exit 1 - if [[ -d "${script_dir}/gldas.fd" ]]; then - [[ -d gldas ]] && rm -rf gldas - ${LINK} "${script_dir}/gldas.fd/parm" gldas - fi cd "${top_dir}/parm/post" || exit 1 for file in postxconfig-NT-GEFS-ANL.txt postxconfig-NT-GEFS-F00.txt postxconfig-NT-GEFS.txt postxconfig-NT-GFS-ANL.txt \ postxconfig-NT-GFS-F00-TWO.txt postxconfig-NT-GFS-F00.txt postxconfig-NT-GFS-FLUX-F00.txt postxconfig-NT-GFS-FLUX.txt \ @@ -282,13 +276,6 @@ if [[ -d "${script_dir}/gsi_monitor.fd" ]]; then done fi -if [[ -d "${script_dir}/gldas.fd" ]]; then - for gldasexe in gdas2gldas gldas2gdas gldas_forcing gldas_model gldas_post gldas_rst; do - [[ -s "${gldasexe}" ]] && rm -f "${gldasexe}" - ${LINK} "${script_dir}/gldas.fd/exec/${gldasexe}" . - done -fi - # GDASApp if [[ -d "${script_dir}/gdas.cd" ]]; then declare -a JEDI_EXE=("fv3jedi_addincrement.x" \ @@ -429,13 +416,6 @@ cd "${script_dir}" || exit 8 ${SLINK} gfs_wafs.fd/sorc/wafs_setmissing.fd wafs_setmissing.fd fi - if [[ -d gldas.fd ]]; then - for prog in gdas2gldas.fd gldas2gdas.fd gldas_forcing.fd gldas_model.fd gldas_post.fd gldas_rst.fd ;do - [[ -d "${prog}" ]] && rm -rf "${prog}" - ${SLINK} "gldas.fd/sorc/${prog}" "${prog}" - done - fi - #------------------------------ # copy $HOMEgfs/parm/config/config.base.nco.static as config.base for operations # config.base in the $HOMEgfs/parm/config has no use in development diff --git a/sorc/partial_build.sh b/sorc/partial_build.sh index 0d4657136d..f212ae4cb4 100755 --- a/sorc/partial_build.sh +++ b/sorc/partial_build.sh @@ -9,7 +9,6 @@ declare -a Build_prg=("Build_ufs_model" \ "Build_gsi_monitor" \ "Build_ww3_prepost" \ "Build_gdas" \ - "Build_gldas" \ "Build_upp" \ "Build_ufs_utils" \ "Build_gfs_wafs" \ @@ -99,7 +98,7 @@ parse_cfg() { sel_prg=${sel_prg//${del}/ } } done - if [[ ${del} == "" ]]; then + if [[ ${del} == "" ]]; then { short_prg=${sel_prg} found=false diff --git a/ush/gldas_forcing.sh b/ush/gldas_forcing.sh deleted file mode 100755 index ca5562f459..0000000000 --- a/ush/gldas_forcing.sh +++ /dev/null @@ -1,118 +0,0 @@ -#! /usr/bin/env bash -########################################################################### -# this script gets cpc daily precipitation and using gdas hourly precipitation -# to disaggregate daily value into hourly value -########################################################################### - -source "${HOMEgfs:?}/ush/preamble.sh" - -bdate=$1 -edate=$2 - -# HOMEgldas - gldas directory -# EXECgldas - gldas exec directory -# PARMgldas - gldas param directory -# FIXgldas - gldas fix field directory -export LISDIR=${HOMEgldas:?} -export fpath=${RUNDIR:?}/force -export xpath=${RUNDIR:?}/force -export WGRIB=${WGRIB:?} -export COPYGB=${COPYGB:?} -export ERRSCRIPT=${ERRSCRIPT:-"eval [[ ${err} = 0 ]]"} - -#------------------------------- -#--- extract variables of each timestep and create forcing files -sdate=${bdate} -edate=$(sh "${FINDDATE:?}" "${edate}" d-1) -while [[ "${sdate}" -lt "${edate}" ]] ; do - - sdat0=$(sh "${FINDDATE:?}" "${sdate}" d-1) - [[ ! -d ${xpath}/cpc.${sdate} ]] && mkdir -p "${xpath}/cpc.${sdate}" - [[ ! -d ${xpath}/cpc.${sdat0} ]] && mkdir -p "${xpath}/cpc.${sdat0}" - - cd "${xpath}" || exit - rm -f fort.* grib.* - - COMPONENT=${COMPONENT:-"atmos"} - pathp1=${CPCGAUGE:?}/gdas.${sdate}/00/${COMPONENT} - pathp2=${DCOMIN:?}/${sdate}/wgrbbul/cpc_rcdas - cpc_precip="PRCP_CU_GAUGE_V1.0GLB_0.125deg.lnx.${sdate}.RT" - if [[ "${RUN_ENVIR:?}" = "emc" ]] && [[ "${sdate}" -gt "${bdate}" ]]; then - cpc_precip="PRCP_CU_GAUGE_V1.0GLB_0.125deg.lnx.${sdate}.RT_early" - fi - cpc=${pathp1}/${cpc_precip} - if [[ ! -s "${cpc}" ]]; then cpc=${pathp2}/${cpc_precip} ; fi - if [[ "${RUN_ENVIR:?}" = "nco" ]]; then cpc=${pathp2}/${cpc_precip} ; fi - if [[ ! -s "${cpc}" ]]; then - echo "WARNING: GLDAS MISSING ${cpc}, WILL NOT RUN." - exit 3 - fi - cp "${cpc}" "${xpath}/cpc.${sdate}/." - - sflux=${fpath}/gdas.${sdat0}/gdas1.t12z.sfluxgrbf06 - prate=gdas.${sdat0}12 - ${WGRIB} -s "${sflux}" | grep "PRATE:sfc" | ${WGRIB} -i "${sflux}" -grib -o "${prate}" - - sflux=${fpath}/gdas.${sdat0}/gdas1.t18z.sfluxgrbf06 - prate=gdas.${sdat0}18 - ${WGRIB} -s "${sflux}" | grep "PRATE:sfc" | ${WGRIB} -i "${sflux}" -grib -o "${prate}" - - sflux=${fpath}/gdas.${sdate}/gdas1.t00z.sfluxgrbf06 - prate=gdas.${sdate}00 - ${WGRIB} -s "${sflux}" | grep "PRATE:sfc" | ${WGRIB} -i "${sflux}" -grib -o "${prate}" - - sflux=${fpath}/gdas.${sdate}/gdas1.t06z.sfluxgrbf06 - prate=gdas.${sdate}06 - ${WGRIB} -s "${sflux}" | grep "PRATE:sfc" | ${WGRIB} -i "${sflux}" -grib -o "${prate}" - - if [[ "${USE_CFP:?}" = "YES" ]] ; then - rm -f ./cfile - touch ./cfile - { - echo "${COPYGB} -i3 '-g255 0 2881 1441 90000 0 128 -90000 360000 125 125' -x gdas.${sdat0}12 grib.12" - echo "${COPYGB} -i3 '-g255 0 2881 1441 90000 0 128 -90000 360000 125 125' -x gdas.${sdat0}18 grib.18" - echo "${COPYGB} -i3 '-g255 0 2881 1441 90000 0 128 -90000 360000 125 125' -x gdas.${sdate}00 grib.00" - echo "${COPYGB} -i3 '-g255 0 2881 1441 90000 0 128 -90000 360000 125 125' -x gdas.${sdate}06 grib.06" - } >> ./cfile - ${APRUN_GLDAS_DATA_PROC:?} ./cfile - else - ${COPYGB} -i3 '-g255 0 2881 1441 90000 0 128 -90000 360000 125 125' -x gdas."${sdat0}"12 grib.12 - ${COPYGB} -i3 '-g255 0 2881 1441 90000 0 128 -90000 360000 125 125' -x gdas."${sdat0}"18 grib.18 - ${COPYGB} -i3 '-g255 0 2881 1441 90000 0 128 -90000 360000 125 125' -x gdas."${sdate}"00 grib.00 - ${COPYGB} -i3 '-g255 0 2881 1441 90000 0 128 -90000 360000 125 125' -x gdas."${sdate}"06 grib.06 - fi - - rm -f fort.10 - touch fort.10 - echo "${sdat0}" >> fort.10 - echo "${sdate}" >> fort.10 - - export pgm=gldas_forcing - # shellcheck disable=SC1091 - . prep_step - # shellcheck disable= - - ${WGRIB} -d -bin grib.12 -o fort.11 - ${WGRIB} -d -bin grib.18 -o fort.12 - ${WGRIB} -d -bin grib.00 -o fort.13 - ${WGRIB} -d -bin grib.06 -o fort.14 - - ln -fs "${xpath}/cpc.${sdate}/${cpc_precip}" fort.15 - - "${EXECgldas:?}/gldas_forcing" 1>&1 2>&2 - - export err=$? - ${ERRSCRIPT} || exit 3 - - cp fort.21 "${xpath}/cpc.${sdat0}/precip.gldas.${sdat0}12" - cp fort.22 "${xpath}/cpc.${sdat0}/precip.gldas.${sdat0}18" - cp fort.23 "${xpath}/cpc.${sdate}/precip.gldas.${sdate}00" - cp fort.24 "${xpath}/cpc.${sdate}/precip.gldas.${sdate}06" - - rm -f fort.* grib.* - - sdate=$(sh "${FINDDATE}" "${sdate}" d+1) -done -#------------------------------- - -exit "${err}" diff --git a/ush/gldas_get_data.sh b/ush/gldas_get_data.sh deleted file mode 100755 index 3416309119..0000000000 --- a/ush/gldas_get_data.sh +++ /dev/null @@ -1,76 +0,0 @@ -#! /usr/bin/env bash -######################################################### -# This script generate gldas forcing from gdas prod sflux -######################################################### - -source "${HOMEgfs:?}/ush/preamble.sh" - -bdate=$1 -edate=$2 - -if [[ "${USE_CFP:-"NO"}" = "YES" ]] ; then - touch ./cfile -fi - -### COMINgdas = prod gdas sflux grib2 -### RUNDIR = gldas forcing in grib2 format -### RUNDIR/force = gldas forcing in grib1 format -export COMPONENT=${COMPONENT:-atmos} -fpath=${RUNDIR:?} -gpath=${RUNDIR}/force -cycint=${assim_freq:-6} - -# get gdas flux files to force gldas. -# CPC precipitation is from 12z to 12z. One more day of gdas data is -# needed to disaggregate daily CPC precipitation values to hourly values -cdate=$(${NDATE:?} -12 "${bdate}") - -iter=0 - -#------------------------------- -while [[ "${cdate}" -lt "${edate}" ]]; do - - ymd=$(echo "${cdate}" |cut -c 1-8) - cyc=$(echo "${cdate}" |cut -c 9-10) - [[ ! -d ${fpath}/gdas.${ymd} ]] && mkdir -p "${fpath}/gdas.${ymd}" - [[ ! -d ${gpath}/gdas.${ymd} ]] && mkdir -p "${gpath}/gdas.${ymd}" - - f=1 - while [[ "${f}" -le "${cycint}" ]]; do - rflux=${COMINgdas:?}/gdas.${ymd}/${cyc}/${COMPONENT}/gdas.t${cyc}z.sfluxgrbf00${f}.grib2 - fflux=${fpath}/gdas.${ymd}/gdas.t${cyc}z.sfluxgrbf0${f}.grib2 - gflux=${gpath}/gdas.${ymd}/gdas1.t${cyc}z.sfluxgrbf0${f} - if [[ ! -s "${rflux}" ]];then - echo "WARNING: GLDAS MISSING ${rflux}, WILL NOT RUN." - exit 2 - fi - rm -f "${fflux}" "${gflux}" - touch "${fflux}" "${gflux}" - - fcsty=anl - if [[ "${f}" -ge 1 ]]; then fcsty=fcst; fi - - if [[ "${USE_CFP:-"NO"}" = "YES" ]] ; then - if [[ "${CFP_MP:-"NO"}" = "YES" ]]; then - echo "${iter} ${USHgldas:?}/gldas_process_data.sh ${iter} ${rflux} ${fcsty} ${fflux} ${gflux} ${f}" >> ./cfile - else - echo "${USHgldas:?}/gldas_process_data.sh ${iter} ${rflux} ${fcsty} ${fflux} ${gflux} ${f}" >> ./cfile - fi - else - "${USHgldas:?}/gldas_process_data.sh" "${iter}" "${rflux}" "${fcsty}" "${fflux}" "${gflux}" "${f}" - fi - - iter=$((iter+1)) - f=$((f+1)) - done - -#------------------------------- - cdate=$(${NDATE} +"${cycint}" "${cdate}") -done -#------------------------------- - -if [[ "${USE_CFP:-"NO"}" = "YES" ]] ; then - ${APRUN_GLDAS_DATA_PROC:?} ./cfile -fi - -exit $? diff --git a/ush/gldas_liscrd.sh b/ush/gldas_liscrd.sh deleted file mode 100755 index 7c0f446035..0000000000 --- a/ush/gldas_liscrd.sh +++ /dev/null @@ -1,46 +0,0 @@ -#! /usr/bin/env bash - -source "${HOMEgfs:?}/ush/preamble.sh" - -if [[ $# -lt 3 ]]; then - echo usage "$0" yyyymmddhh1 yyyymmddhh2 126/382/574/1534 - exit $? -fi - -date1=$1 -date2=$2 -grid=$3 - -yyyy1=$(echo "${date1}" | cut -c 1-4) -mm1=$(echo "${date1}" | cut -c 5-6) -dd1=$(echo "${date1}" | cut -c 7-8) -hh1=$(echo "${date1}" | cut -c 9-10) -yyyy2=$(echo "${date2}" | cut -c 1-4) -mm2=$(echo "${date2}" | cut -c 5-6) -dd2=$(echo "${date2}" | cut -c 7-8) -hh2=$(echo "${date2}" | cut -c 9-10) - -PARM_LM=${PARMgldas:?} -LISCARD=lis.crd - -rm -f "${LISCARD}" -touch "${LISCARD}" -{ - cat "${PARM_LM}/lis.crd.T${grid}.tmp.1" - echo "LIS%t%SSS = 0 " - echo "LIS%t%SMN = 00 " - echo "LIS%t%SHR = ${hh1} " - echo "LIS%t%SDA = ${dd1} " - echo "LIS%t%SMO = ${mm1} " - echo "LIS%t%SYR = ${yyyy1}" - echo "LIS%t%ENDCODE = 1 " - echo "LIS%t%ESS = 0 " - echo "LIS%t%EMN = 00 " - echo "LIS%t%EHR = ${hh2} " - echo "LIS%t%EDA = ${dd2} " - echo "LIS%t%EMO = ${mm2} " - echo "LIS%t%EYR = ${yyyy2}" - cat "${PARM_LM}/lis.crd.T${grid}.tmp.2" -} >> "${LISCARD}" - -exit 0 diff --git a/ush/gldas_process_data.sh b/ush/gldas_process_data.sh deleted file mode 100755 index 4770170a97..0000000000 --- a/ush/gldas_process_data.sh +++ /dev/null @@ -1,34 +0,0 @@ -#! /usr/bin/env bash - -source "${HOMEgfs:?}/ush/preamble.sh" "$1" - -rflux=$2 -fcsty=$3 -fflux=$4 -gflux=$5 -f=$6 - -WGRIB2=${WGRIB2:?} -CNVGRIB=${CNVGRIB:?} - -${WGRIB2} "${rflux}" | grep "TMP:1 hybrid" | grep "${fcsty}" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "SPFH:1 hybrid" | grep "${fcsty}" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "UGRD:1 hybrid" | grep "${fcsty}" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "VGRD:1 hybrid" | grep "${fcsty}" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "HGT:1 hybrid" | grep "${fcsty}" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "PRES:surface" | grep "${fcsty}" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "PRATE:surface" | grep ave | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "VEG:surface" | grep "${fcsty}" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "SFCR:surface" | grep "${fcsty}" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "SFEXC:surface" | grep "${fcsty}" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "TMP:surface" | grep "${fcsty}" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "WEASD:surface" | grep "${fcsty}" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "SNOD:surface" | grep "${fcsty}" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" - -${WGRIB2} "${rflux}" | grep "DSWRF:surface:${f} hour fcst" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "DLWRF:surface:${f} hour fcst" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" -${WGRIB2} "${rflux}" | grep "USWRF:surface:${f} hour fcst" | ${WGRIB2} -i "${rflux}" -append -grib "${fflux}" - -${CNVGRIB} -g21 "${fflux}" "${gflux}" - -exit $? diff --git a/ush/hpssarch_gen.sh b/ush/hpssarch_gen.sh index 9b55fe04bd..f9a15e2307 100755 --- a/ush/hpssarch_gen.sh +++ b/ush/hpssarch_gen.sh @@ -241,7 +241,7 @@ if [[ ${type} = "gfs" ]]; then head="gfs.t${cyc}z." rm -f gfs_flux_1p00.txt - rm -f ocn_ice_grib2_0p5.txt + rm -f ocn_ice_grib2_0p5.txt rm -f ocn_ice_grib2_0p25.txt rm -f ocn_2D.txt rm -f ocn_3D.txt @@ -335,7 +335,7 @@ if [[ ${type} == "gdas" ]]; then if [[ -s "${COM_ATMOS_ANALYSIS}/${head}radstat" ]]; then echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}radstat" fi - for fstep in prep anal gldas fcst vrfy radmon minmon oznmon; do + for fstep in prep anal fcst vrfy radmon minmon oznmon; do if [[ -s "${ROTDIR}/logs/${PDY}${cyc}/gdas${fstep}.log" ]]; then echo "./logs/${PDY}${cyc}/gdas${fstep}.log" fi @@ -391,7 +391,7 @@ if [[ ${type} == "gdas" ]]; then echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile3.nc" echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile4.nc" echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile5.nc" - echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile6.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile6.nc" } >> gdas_restarta.txt #.................. @@ -410,7 +410,7 @@ if [[ ${type} == "gdas" ]]; then #........................... { echo "${COM_WAVE_GRID/${ROTDIR}\//}/${head}*" - echo "${COM_WAVE_STATION/${ROTDIR}\//}/${head}*" + echo "${COM_WAVE_STATION/${ROTDIR}\//}/${head}*" } >> gdaswave.txt echo "${COM_WAVE_RESTART/${ROTDIR}\//}/*" >> gdaswave_restart.txt @@ -452,7 +452,7 @@ if [[ ${type} == "gdas" ]]; then #........................... { echo "${COM_ICE_HISTORY/${ROTDIR}\//}/${head}*" - echo "${COM_ICE_INPUT/${ROTDIR}\//}/ice_in" + echo "${COM_ICE_INPUT/${ROTDIR}\//}/ice_in" } >> gdasice.txt echo "${COM_ICE_RESTART/${ROTDIR}\//}/*" >> gdasice_restart.txt @@ -498,8 +498,8 @@ if [[ ${type} == "enkfgdas" || ${type} == "enkfgfs" ]]; then if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}radstat.ensmean" ]]; then echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}radstat.ensmean" fi - for FHR in $nfhrs; do # loop over analysis times in window - if [ $FHR -eq 6 ]; then + for FHR in ${nfhrs}; do # loop over analysis times in window + if [[ ${FHR} -eq 6 ]]; then if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}atmanl.ensmean.nc" ]]; then echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}atmanl.ensmean.nc" fi @@ -513,7 +513,7 @@ if [[ ${type} == "enkfgdas" || ${type} == "enkfgfs" ]]; then if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}atmi00${FHR}.ensmean.nc" ]]; then echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}atmi00${FHR}.ensmean.nc" fi - fi + fi done # loop over FHR for fstep in eobs ecen esfc eupd efcs epos ; do echo "logs/${PDY}${cyc}/${RUN}${fstep}*.log" @@ -571,7 +571,7 @@ if [[ ${type} == "enkfgdas" || ${type} == "enkfgfs" ]]; then echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}atmanl.nc" fi if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}ratminc.nc" ]] ; then - echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratminc.nc" + echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratminc.nc" fi fi } >> "${RUN}_grp${n}.txt" diff --git a/versions/fix.ver b/versions/fix.ver index 02b0953a94..7bcc4f75af 100644 --- a/versions/fix.ver +++ b/versions/fix.ver @@ -10,7 +10,6 @@ export datm_ver=20220805 export gdas_crtm_ver=20220805 export gdas_fv3jedi_ver=20220805 export gdas_gsibec_ver=20221031 -export gldas_ver=20220920 export glwu_ver=20220805 export gsi_ver=20230112 export lut_ver=20220805 diff --git a/workflow/applications.py b/workflow/applications.py index 0cc6831c0c..dfec3a6a18 100644 --- a/workflow/applications.py +++ b/workflow/applications.py @@ -99,7 +99,6 @@ def __init__(self, conf: Configuration) -> None: self.do_ocean = _base.get('DO_OCN', False) self.do_ice = _base.get('DO_ICE', False) self.do_aero = _base.get('DO_AERO', False) - self.do_gldas = _base.get('DO_GLDAS', False) self.do_bufrsnd = _base.get('DO_BUFRSND', False) self.do_gempak = _base.get('DO_GEMPAK', False) self.do_awips = _base.get('DO_AWIPS', False) @@ -192,9 +191,6 @@ def _cycled_configs(self): configs += ['sfcanl', 'analcalc', 'fcst', 'post', 'vrfy', 'fit2obs', 'arch'] - if self.do_gldas: - configs += ['gldas'] - if self.do_hybvar: if self.do_jediatmens: configs += ['atmensanlinit', 'atmensanlrun', 'atmensanlfinal'] @@ -368,7 +364,6 @@ def _get_cycled_task_names(self): if self.do_jedilandda: gdas_gfs_common_tasks_before_fcst += ['landanlinit', 'landanlprep', 'landanlrun', 'landanlfinal'] - gldas_tasks = ['gldas'] wave_prep_tasks = ['waveinit', 'waveprep'] wave_bndpnt_tasks = ['wavepostbndpnt', 'wavepostbndpntbll'] wave_post_tasks = ['wavepostsbs', 'wavepostpnt'] @@ -388,9 +383,6 @@ def _get_cycled_task_names(self): if not self.do_jediatmvar: gdas_tasks += ['analdiag'] - if self.do_gldas: - gdas_tasks += gldas_tasks - if self.do_wave and 'gdas' in self.wave_cdumps: gdas_tasks += wave_prep_tasks diff --git a/workflow/prod.yml b/workflow/prod.yml index 04ddca61bb..98755d2f86 100644 --- a/workflow/prod.yml +++ b/workflow/prod.yml @@ -238,7 +238,6 @@ suites: - task: jgdas_atmos_analysis event: release_fcst - task: jgdas_wave_prep - - task: jgdas_atmos_gldas atmos: obsproc: dump: @@ -250,11 +249,6 @@ suites: triggers: - task: jobsproc_gdas_atmos_dump event: release_sfcprep - init: - tasks: - jgdas_atmos_gldas: - triggers: - - task: jgdas_atmos_analysis analysis: tasks: jgdas_atmos_analysis: diff --git a/workflow/rocoto/workflow_tasks.py b/workflow/rocoto/workflow_tasks.py index 3520c717db..12166689cc 100644 --- a/workflow/rocoto/workflow_tasks.py +++ b/workflow/rocoto/workflow_tasks.py @@ -12,7 +12,7 @@ class Tasks: SERVICE_TASKS = ['arch', 'earc'] VALID_TASKS = ['aerosol_init', 'coupled_ic', - 'prep', 'anal', 'sfcanl', 'analcalc', 'analdiag', 'gldas', 'arch', + 'prep', 'anal', 'sfcanl', 'analcalc', 'analdiag', 'arch', 'atmanlinit', 'atmanlrun', 'atmanlfinal', 'ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy', 'earc', 'ecen', 'echgres', 'ediag', 'efcs', @@ -669,18 +669,6 @@ def ocnanalvrfy(self): return task - def gldas(self): - - deps = [] - dep_dict = {'type': 'task', 'name': f'{self.cdump}sfcanl'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - - resources = self.get_resource('gldas') - task = create_wf_task('gldas', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) - - return task - def fcst(self): fcst_map = {'forecast-only': self._fcst_forecast_only, @@ -738,10 +726,6 @@ def _fcst_cycled(self): dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalpost'} dependencies.append(rocoto.add_dependency(dep_dict)) - if self.app_config.do_gldas and self.cdump in ['gdas']: - dep_dict = {'type': 'task', 'name': f'{self.cdump}gldas'} - dependencies.append(rocoto.add_dependency(dep_dict)) - if self.app_config.do_wave and self.cdump in self.app_config.wave_cdumps: dep_dict = {'type': 'task', 'name': f'{self.cdump}waveprep'} dependencies.append(rocoto.add_dependency(dep_dict))