diff --git a/HOWtoRUN b/HOWtoRUN
new file mode 100644
index 0000000000..712dc9257c
--- /dev/null
+++ b/HOWtoRUN
@@ -0,0 +1,8 @@
+cd ./ecf/hurricane/stats
+(1) change your job Account for each ecf files
+(2) qsub jevs_hurricane_global_det_tcgen_stats.ecf
+ qsub jevs_hurricane_global_det_tropcyc_stats.ecf
+ qsub jevs_hurricane_global_ens_tropcyc_stats.ecf
+ qsub jevs_hurricane_regional_tropcyc_stats.ecf
+(3) See the stats file and plots under:
+ /lfs/h2/emc/ptmp/$USER/com/evs
diff --git a/ecf/hurricane/stats/jevs_hurricane_global_det_tcgen_stats.ecf b/ecf/hurricane/stats/jevs_hurricane_global_det_tcgen_stats.ecf
new file mode 100755
index 0000000000..20e3f10561
--- /dev/null
+++ b/ecf/hurricane/stats/jevs_hurricane_global_det_tcgen_stats.ecf
@@ -0,0 +1,73 @@
+#PBS -S /bin/bash
+#PBS -N jevs_hurricane_global_det_tcgen_stats
+#PBS -j oe
+#PBS -A ENSTRACK-DEV
+#PBS -q dev
+#PBS -l select=1:ncpus=2:mem=4GB
+##PBS -l place=vscatter:exclhost,select=1:ncpus=128:ompthreads=1
+#PBS -l walltime=00:30:00
+#PBS -l debug=true
+#PBS -V
+
+#%include
+#%include
+
+############################################################
+# Load modules
+############################################################
+module purge
+export HPC_OPT=/apps/ops/prod/libs
+module use /apps/ops/prod/libs/modulefiles/compiler/intel/19.1.3.304/
+export HOMEevs=/lfs/h2/emc/vpppg/save/$USER/EVS
+source ${HOMEevs}/versions/run.ver
+module load intel/${intel_ver}
+module load gsl/${gsl_ver}
+module load python/${python_ver}
+module load netcdf/${netcdf_ver}
+module load met/${met_ver}
+module load metplus/${metplus_ver}
+
+module load libjpeg/$libjpeg_ver
+module load grib_util/$grib_util_ver
+module load prod_util/${produtil_ver}
+module load prod_envir/${prodenvir_ver}
+
+module load geos/${geos_ver}
+module load proj/${proj_ver}
+module load libfabric/${libfabric}
+module load imagemagick/${imagemagick_ver}
+
+module list
+
+export NET=evs
+export COMPONENT=hurricane_global_det
+export RUN=tcgen
+export STEP=stats
+
+export envir=dev
+export cyc=00
+export job=jevs_hurricane_global_det_tcgen_stats_${cyc}
+
+#Define the directories of your TC genesis data and A/Bdeck files
+export COMINgenesis=/lfs/h2/emc/vpppg/save/jiayi.peng/TCgen/genesisDATA
+export COMINadeckNHC=/lfs/h2/emc/vpppg/save/jiayi.peng/TCgen/ABdeck
+export COMINbdeckNHC=/lfs/h2/emc/vpppg/save/jiayi.peng/TCgen/ABdeck
+export COMINadeckJTWC=/lfs/h2/emc/vpppg/save/jiayi.peng/TCgen/ABdeck
+export COMINbdeckJTWC=/lfs/h2/emc/vpppg/save/jiayi.peng/TCgen/ABdeck
+
+export DATAROOT=/lfs/h2/emc/ptmp/$USER
+export COMROOT=${DATAROOT}/com
+export KEEPDATA=YES
+
+
+# CALL executable job script here
+$HOMEevs/jobs/hurricane/stats/JEVS_HURRICANE_STATS
+
+%include
+%manual
+######################################################################
+# Purpose: This job will generate the grid2obs statistics for the HRRR
+# model and generate stat files.
+######################################################################
+%end
+
diff --git a/ecf/hurricane/stats/jevs_hurricane_global_det_tropcyc_stats.ecf b/ecf/hurricane/stats/jevs_hurricane_global_det_tropcyc_stats.ecf
new file mode 100755
index 0000000000..fbe761b6a4
--- /dev/null
+++ b/ecf/hurricane/stats/jevs_hurricane_global_det_tropcyc_stats.ecf
@@ -0,0 +1,68 @@
+#PBS -S /bin/bash
+#PBS -N jevs_hurricane_global_det_tropcyc_stats
+#PBS -j oe
+#PBS -A ENSTRACK-DEV
+#PBS -q dev
+#PBS -l select=1:ncpus=2:mem=4GB
+##PBS -l place=vscatter:exclhost,select=1:ncpus=128:ompthreads=1
+#PBS -l walltime=00:30:00
+#PBS -l debug=true
+#PBS -V
+
+#%include
+#%include
+
+############################################################
+# Load modules
+############################################################
+module purge
+export HPC_OPT=/apps/ops/prod/libs
+module use /apps/ops/prod/libs/modulefiles/compiler/intel/19.1.3.304/
+export HOMEevs=/lfs/h2/emc/vpppg/save/$USER/EVS
+source ${HOMEevs}/versions/run.ver
+module load intel/${intel_ver}
+module load gsl/${gsl_ver}
+module load python/${python_ver}
+module load netcdf/${netcdf_ver}
+module load met/${met_ver}
+module load metplus/${metplus_ver}
+
+module load libjpeg/$libjpeg_ver
+module load grib_util/$grib_util_ver
+module load prod_util/${produtil_ver}
+module load prod_envir/${prodenvir_ver}
+module load imagemagick/${imagemagick_ver}
+
+module list
+
+export NET=evs
+export COMPONENT=hurricane_global_det
+export RUN=tropcyc
+export STEP=stats
+
+export envir=dev
+export cyc=00
+export job=jevs_hurricane_global_det_tropcyc_stats_${cyc}
+
+#Define TC-vital file, TC track file and the directory for Bdeck files
+export COMINvit=/lfs/h1/ops/prod/com/gfs/v16.3/syndat/syndat_tcvitals.2022
+export COMINtrack=/lfs/h1/ops/prod/com/ens_tracker/v1.3/global/tracks.atcfunix.22
+export COMINbdeckNHC=/lfs/h1/ops/prod/dcom/nhc/atcf-noaa/btk
+export COMINbdeckJTWC=/lfs/h1/ops/prod/dcom/nhc/atcf-navy/btk
+
+export DATAROOT=/lfs/h2/emc/ptmp/$USER
+export COMROOT=${DATAROOT}/com
+export KEEPDATA=YES
+
+
+# CALL executable job script here
+$HOMEevs/jobs/hurricane/stats/JEVS_HURRICANE_STATS
+
+%include
+%manual
+######################################################################
+# Purpose: This job will generate the grid2obs statistics for the HRRR
+# model and generate stat files.
+######################################################################
+%end
+
diff --git a/ecf/hurricane/stats/jevs_hurricane_global_ens_tropcyc_stats.ecf b/ecf/hurricane/stats/jevs_hurricane_global_ens_tropcyc_stats.ecf
new file mode 100755
index 0000000000..21422991b8
--- /dev/null
+++ b/ecf/hurricane/stats/jevs_hurricane_global_ens_tropcyc_stats.ecf
@@ -0,0 +1,68 @@
+#PBS -S /bin/bash
+#PBS -N jevs_hurricane_global_ens_tropcyc_stats
+#PBS -j oe
+#PBS -A ENSTRACK-DEV
+#PBS -q dev
+#PBS -l select=1:ncpus=2:mem=4GB
+##PBS -l place=vscatter:exclhost,select=1:ncpus=128:ompthreads=1
+#PBS -l walltime=00:30:00
+#PBS -l debug=true
+#PBS -V
+
+#%include
+#%include
+
+############################################################
+# Load modules
+############################################################
+module purge
+export HPC_OPT=/apps/ops/prod/libs
+module use /apps/ops/prod/libs/modulefiles/compiler/intel/19.1.3.304/
+export HOMEevs=/lfs/h2/emc/vpppg/save/$USER/EVS
+source ${HOMEevs}/versions/run.ver
+module load intel/${intel_ver}
+module load gsl/${gsl_ver}
+module load python/${python_ver}
+module load netcdf/${netcdf_ver}
+module load met/${met_ver}
+module load metplus/${metplus_ver}
+
+module load libjpeg/$libjpeg_ver
+module load grib_util/$grib_util_ver
+module load prod_util/${produtil_ver}
+module load prod_envir/${prodenvir_ver}
+module load imagemagick/${imagemagick_ver}
+
+module list
+
+export NET=evs
+export COMPONENT=hurricane_global_ens
+export RUN=tropcyc
+export STEP=stats
+
+export envir=dev
+export cyc=00
+export job=jevs_hurricane_global_ens_tropcyc_stats_${cyc}
+
+#Define TC-vital file, TC track file and the directory for Bdeck files
+export COMINvit=/lfs/h1/ops/prod/com/gfs/v16.3/syndat/syndat_tcvitals.2022
+export COMINtrack=/lfs/h1/ops/prod/com/ens_tracker/v1.3/global/tracks.atcfunix.22
+export COMINbdeckNHC=/lfs/h1/ops/prod/dcom/nhc/atcf-noaa/btk
+export COMINbdeckJTWC=/lfs/h1/ops/prod/dcom/nhc/atcf-navy/btk
+
+export DATAROOT=/lfs/h2/emc/ptmp/$USER
+export COMROOT=${DATAROOT}/com
+export KEEPDATA=YES
+
+
+# CALL executable job script here
+$HOMEevs/jobs/hurricane/stats/JEVS_HURRICANE_STATS
+
+%include
+%manual
+######################################################################
+# Purpose: This job will generate the grid2obs statistics for the HRRR
+# model and generate stat files.
+######################################################################
+%end
+
diff --git a/ecf/hurricane/stats/jevs_hurricane_regional_tropcyc_stats.ecf b/ecf/hurricane/stats/jevs_hurricane_regional_tropcyc_stats.ecf
new file mode 100755
index 0000000000..4842bbc186
--- /dev/null
+++ b/ecf/hurricane/stats/jevs_hurricane_regional_tropcyc_stats.ecf
@@ -0,0 +1,72 @@
+#PBS -S /bin/bash
+#PBS -N jevs_hurricane_regional_tropcyc_stats
+#PBS -j oe
+#PBS -A ENSTRACK-DEV
+#PBS -q dev
+#PBS -l select=1:ncpus=2:mem=4GB
+##PBS -l place=vscatter:exclhost,select=1:ncpus=128:ompthreads=1
+#PBS -l walltime=00:30:00
+#PBS -l debug=true
+#PBS -V
+
+#%include
+#%include
+
+############################################################
+# Load modules
+############################################################
+module purge
+export HPC_OPT=/apps/ops/prod/libs
+module use /apps/ops/prod/libs/modulefiles/compiler/intel/19.1.3.304/
+export HOMEevs=/lfs/h2/emc/vpppg/save/$USER/EVS
+source ${HOMEevs}/versions/run.ver
+module load intel/${intel_ver}
+module load gsl/${gsl_ver}
+module load python/${python_ver}
+module load netcdf/${netcdf_ver}
+module load met/${met_ver}
+module load metplus/${metplus_ver}
+
+module load libjpeg/$libjpeg_ver
+module load grib_util/$grib_util_ver
+module load prod_util/${produtil_ver}
+module load prod_envir/${prodenvir_ver}
+module load imagemagick/${imagemagick_ver}
+
+module list
+
+export NET=evs
+export COMPONENT=hurricane_regional
+export RUN=tropcyc
+export STEP=stats
+
+export envir=dev
+export cyc=00
+export job=jevs_hurricane_regional_tropcyc_stats_${cyc}
+
+#Define TC-vital file, TC track file and the directory for Bdeck files
+#export COMINvit=/lfs/h1/ops/prod/com/gfs/v16.3/syndat/syndat_tcvitals.2022
+#export COMINbdeckNHC=/lfs/h1/ops/prod/dcom/nhc/atcf-noaa/btk
+#export COMINbdeckJTWC=/lfs/h1/ops/prod/dcom/nhc/atcf-navy/btk
+
+export COMINvit=/lfs/h2/emc/vpppg/noscrub/jiayi.peng/MetTCData/TCvital/syndat_tcvitals.2022
+export COMINtrack=/lfs/h2/emc/vpppg/noscrub/jiayi.peng/MetTCData/regionalTrack/tracks.atcfunix.22
+export COMINbdeckNHC=/lfs/h2/emc/vpppg/noscrub/jiayi.peng/MetTCData/bdeck
+export COMINbdeckJTWC=/lfs/h2/emc/vpppg/noscrub/jiayi.peng/MetTCData/bdeck
+
+export DATAROOT=/lfs/h2/emc/ptmp/$USER
+export COMROOT=${DATAROOT}/com
+export KEEPDATA=YES
+
+
+# CALL executable job script here
+$HOMEevs/jobs/hurricane/stats/JEVS_HURRICANE_STATS
+
+%include
+%manual
+######################################################################
+# Purpose: This job will generate the grid2obs statistics for the HRRR
+# model and generate stat files.
+######################################################################
+%end
+
diff --git a/fix/cartopy/shapefiles/natural_earth/cultural/ne_110m_admin_0_boundary_lines_land.dbf b/fix/cartopy/shapefiles/natural_earth/cultural/ne_110m_admin_0_boundary_lines_land.dbf
new file mode 100644
index 0000000000..39ebad52d2
Binary files /dev/null and b/fix/cartopy/shapefiles/natural_earth/cultural/ne_110m_admin_0_boundary_lines_land.dbf differ
diff --git a/fix/cartopy/shapefiles/natural_earth/cultural/ne_110m_admin_0_boundary_lines_land.shp b/fix/cartopy/shapefiles/natural_earth/cultural/ne_110m_admin_0_boundary_lines_land.shp
new file mode 100644
index 0000000000..053742259a
Binary files /dev/null and b/fix/cartopy/shapefiles/natural_earth/cultural/ne_110m_admin_0_boundary_lines_land.shp differ
diff --git a/fix/cartopy/shapefiles/natural_earth/cultural/ne_110m_admin_0_boundary_lines_land.shx b/fix/cartopy/shapefiles/natural_earth/cultural/ne_110m_admin_0_boundary_lines_land.shx
new file mode 100644
index 0000000000..763719eeb4
Binary files /dev/null and b/fix/cartopy/shapefiles/natural_earth/cultural/ne_110m_admin_0_boundary_lines_land.shx differ
diff --git a/fix/cartopy/shapefiles/natural_earth/cultural/ne_50m_admin_0_boundary_lines_land.dbf b/fix/cartopy/shapefiles/natural_earth/cultural/ne_50m_admin_0_boundary_lines_land.dbf
new file mode 100644
index 0000000000..fb5f801c85
Binary files /dev/null and b/fix/cartopy/shapefiles/natural_earth/cultural/ne_50m_admin_0_boundary_lines_land.dbf differ
diff --git a/fix/cartopy/shapefiles/natural_earth/cultural/ne_50m_admin_0_boundary_lines_land.shp b/fix/cartopy/shapefiles/natural_earth/cultural/ne_50m_admin_0_boundary_lines_land.shp
new file mode 100644
index 0000000000..03820b6168
Binary files /dev/null and b/fix/cartopy/shapefiles/natural_earth/cultural/ne_50m_admin_0_boundary_lines_land.shp differ
diff --git a/fix/cartopy/shapefiles/natural_earth/cultural/ne_50m_admin_0_boundary_lines_land.shx b/fix/cartopy/shapefiles/natural_earth/cultural/ne_50m_admin_0_boundary_lines_land.shx
new file mode 100644
index 0000000000..2ce054b307
Binary files /dev/null and b/fix/cartopy/shapefiles/natural_earth/cultural/ne_50m_admin_0_boundary_lines_land.shx differ
diff --git a/fix/cartopy/shapefiles/natural_earth/cultural/ne_50m_admin_1_states_provinces_lakes.dbf b/fix/cartopy/shapefiles/natural_earth/cultural/ne_50m_admin_1_states_provinces_lakes.dbf
new file mode 100644
index 0000000000..195f734315
Binary files /dev/null and b/fix/cartopy/shapefiles/natural_earth/cultural/ne_50m_admin_1_states_provinces_lakes.dbf differ
diff --git a/fix/cartopy/shapefiles/natural_earth/cultural/ne_50m_admin_1_states_provinces_lakes.shp b/fix/cartopy/shapefiles/natural_earth/cultural/ne_50m_admin_1_states_provinces_lakes.shp
new file mode 100644
index 0000000000..856fc1eeb6
Binary files /dev/null and b/fix/cartopy/shapefiles/natural_earth/cultural/ne_50m_admin_1_states_provinces_lakes.shp differ
diff --git a/fix/cartopy/shapefiles/natural_earth/cultural/ne_50m_admin_1_states_provinces_lakes.shx b/fix/cartopy/shapefiles/natural_earth/cultural/ne_50m_admin_1_states_provinces_lakes.shx
new file mode 100644
index 0000000000..3bd6183709
Binary files /dev/null and b/fix/cartopy/shapefiles/natural_earth/cultural/ne_50m_admin_1_states_provinces_lakes.shx differ
diff --git a/fix/cartopy/shapefiles/natural_earth/physical/ne_110m_lakes.dbf b/fix/cartopy/shapefiles/natural_earth/physical/ne_110m_lakes.dbf
new file mode 100644
index 0000000000..243773cb3f
Binary files /dev/null and b/fix/cartopy/shapefiles/natural_earth/physical/ne_110m_lakes.dbf differ
diff --git a/fix/cartopy/shapefiles/natural_earth/physical/ne_110m_lakes.shp b/fix/cartopy/shapefiles/natural_earth/physical/ne_110m_lakes.shp
new file mode 100644
index 0000000000..726df04123
Binary files /dev/null and b/fix/cartopy/shapefiles/natural_earth/physical/ne_110m_lakes.shp differ
diff --git a/fix/cartopy/shapefiles/natural_earth/physical/ne_110m_lakes.shx b/fix/cartopy/shapefiles/natural_earth/physical/ne_110m_lakes.shx
new file mode 100644
index 0000000000..649305d08a
Binary files /dev/null and b/fix/cartopy/shapefiles/natural_earth/physical/ne_110m_lakes.shx differ
diff --git a/fix/noaa.png b/fix/noaa.png
new file mode 100644
index 0000000000..1264f47120
Binary files /dev/null and b/fix/noaa.png differ
diff --git a/fix/nws.png b/fix/nws.png
new file mode 100644
index 0000000000..2d8b9fa753
Binary files /dev/null and b/fix/nws.png differ
diff --git a/jobs/hurricane/stats/JEVS_HURRICANE_STATS b/jobs/hurricane/stats/JEVS_HURRICANE_STATS
old mode 100644
new mode 100755
index d64a3d962e..c48cace61d
--- a/jobs/hurricane/stats/JEVS_HURRICANE_STATS
+++ b/jobs/hurricane/stats/JEVS_HURRICANE_STATS
@@ -1 +1,138 @@
-sample
+#!/bin/bash
+date
+export PS4=' $SECONDS + '
+set -x
+
+####################################
+# obtain unique process id (pid) and make temp directory
+####################################
+export jobid=${jobid:-$job.o$$}
+export DATA=${DATA:-${DATAROOT:?}/${jobid}}
+mkdir -p $DATA
+cd $DATA
+export cycle=t${cyc}z
+
+####################################
+# File To Log Msgs
+####################################
+export jlogfile=${jlogfile:-${DATA}/jlogfile.${jobid}}
+
+####################################
+# Specify NET and RUN Name and model
+####################################
+export NET=${NET:-evs}
+export COMPONENT=${COMPONENT:-define_in_ecf}
+export RUN=${RUN:-define_in_ecf}
+export STEP=${STEP:-stats}
+
+####################################
+# Determine Job Output Name on System
+####################################
+export pgmout="OUTPUT.$$"
+export pgmerr=errfile
+
+####################################
+# SENDECF - Flag Events on ecFLOW
+# SENDCOM - Copy Files From TMPDIR to $COMOUT
+# SENDDBN - Issue DBNet Client Calls
+####################################
+export SENDECF=${SENDECF:-NO}
+export SENDCOM=${SENDCOM:-YES}
+export SENDDBN=${SENDDBN:-NO}
+
+####################################
+# Specify Execution Areas
+####################################
+export HOMEevs=${HOMEevs:-${PACKAGEHOME}}
+export SCRIPTSevs=${SCRIPTSevs:-$HOMEevs/scripts/hurricane/stats}
+export USHevs=${USHevs:-$HOMEevs/ush/hurricane/stats}
+export PARMevs=${PARMevs:-$HOMEevs/parm/metplus_config/hurricane/stats}
+export FIXevs=${FIXevs:-$HOMEevs/fix}
+
+# Run setpdy and initialize PDY variables
+##############################
+#setpdy.sh
+#. ./PDY
+
+##############################################
+# Define COM directories
+##############################################
+if [ ${COMPONENT} = "hurricane_global_det" -a ${RUN} = "tropcyc" ]; then
+setpdy.sh
+. ./PDY
+export YYYY=`echo ${PDY} | cut -c1-4`
+export YY22=`echo ${PDY} | cut -c3-4`
+
+export COMINvit=${COMINvit:-/your/TC/vitals/file}
+export COMINtrack=${COMINtrack:-your/TC/track/file}
+export COMINbdeckNHC=${COMINbdeckNHC:-/your/NHC/bdeck/data/dir}
+export COMINbdeckJTWC=${COMINbdeckJTWC:-/your/JTWC/bdeck/data/dir}
+
+elif [ ${COMPONENT} = "hurricane_global_ens" -a ${RUN} = "tropcyc" ]; then
+setpdy.sh
+. ./PDY
+export YYYY=`echo ${PDY} | cut -c1-4`
+export YY22=`echo ${PDY} | cut -c3-4`
+
+export COMINvit=${COMINvit:-/your/TC/vitals/file}
+export COMINtrack=${COMINtrack:-your/TC/track/file}
+export COMINbdeckNHC=${COMINbdeckNHC:-/your/NHC/bdeck/data/dir}
+export COMINbdeckJTWC=${COMINbdeckJTWC:-/your/JTWC/bdeck/data/dir}
+
+elif [ ${COMPONENT} = "hurricane_global_det" -a ${RUN} = "tcgen" ]; then
+#setpdy.sh
+#. ./PDY
+export PDY=20211115
+
+export YYYY=`echo ${PDY} | cut -c1-4`
+#export YY22=`echo ${PDY} | cut -c3-4`
+export COMINgenesis=${COMINgenesis:-/your/TC/genesis/data/dir}
+export COMINadeckNHC=${COMINadeckNHC:-/your/NHC/adeck/data/dir}
+export COMINbdeckNHC=${COMINbdeckNHC:-/your/NHC/bdeck/data/dir}
+export COMINadeckJTWC=${COMINadeckJTWC:-/your/JTWC/adeck/data/dir}
+export COMINbdeckJTWC=${COMINbdeckJTWC:-/your/JTWC/bdeck/data/dir}
+
+elif [ ${COMPONENT} = "hurricane_regional" -a ${RUN} = "tropcyc" ]; then
+setpdy.sh
+. ./PDY
+export YYYY=`echo ${PDY} | cut -c1-4`
+export YY22=`echo ${PDY} | cut -c3-4`
+
+export COMINvit=${COMINvit:-/your/TC/vitals/file}
+export COMINtrack=${COMINtrack:-your/TC/track/file}
+export COMINbdeckNHC=${COMINbdeckNHC:-/your/NHC/bdeck/data/dir}
+export COMINbdeckJTWC=${COMINbdeckJTWC:-/your/JTWC/bdeck/data/dir}
+
+else
+echo "Job failed: unknown ${COMPONENT} and ${RUN}"
+err_exit "FAILED ${jobid} - ERROR IN unknown ${COMPONENT} and ${RUN} - ABNORMAL EXIT"
+
+fi
+
+#export COMOUT=${COMOUT:-$(compath.py -o ${NET}/${evs_ver})/${COMPONENT}/${RUN}/${STEP}}
+export COMOUT=${COMOUT:-${COMROOT}/${NET}/${evs_ver}/${COMPONENT}/${RUN}/${STEP}}
+mkdir -m 775 -p $COMOUT
+
+msg="HAS BEGUN on `hostname`"
+postmsg "$jlogfile" "$msg"
+
+#env
+
+#############################################################
+# Execute the script
+${SCRIPTSevs}/exevs_${COMPONENT}_${RUN}_stats.sh
+export err=$?; err_chk
+
+#############################################################
+
+msg="JOB COMPLETED NORMALLY"
+postmsg "$jlogfile" "$msg"
+
+##############################
+# Remove the Temporary working directory
+##############################
+if [[ $KEEPDATA != "YES" ]]; then
+ rm -rf $DATA
+fi
+
+date
diff --git a/parm/metplus_config/hurricane/stats/TCGen_template.conf b/parm/metplus_config/hurricane/stats/TCGen_template.conf
new file mode 100644
index 0000000000..f36c40658f
--- /dev/null
+++ b/parm/metplus_config/hurricane/stats/TCGen_template.conf
@@ -0,0 +1,175 @@
+#
+[dir]
+INPUT_BASE = INPUT_BASE_template
+OUTPUT_BASE = OUTPUT_BASE_template
+MET_INSTALL_DIR = /apps/ops/para/libs/intel/19.1.3.304/met/10.1.2
+#MET_INSTALL_DIR = /apps/ops/para/libs/intel/19.1.3.304/met/10.1.1
+#
+# CONFIGURATION
+#
+
+[config]
+
+# Looping by times: steps through each 'task' in the PROCESS_LIST for each
+# defined time, and repeats until all times have been evaluated.
+LOOP_ORDER = times
+
+# 'Tasks' to be run
+PROCESS_LIST = TCGen
+
+LOOP_BY = INIT
+
+# The init time
+INIT_TIME_FMT = %Y
+INIT_BEG = YEAR_template
+
+
+LOG_TC_GEN_VERBOSITY = 2
+
+# optional list of strings to loop over and call the tool multiple times
+# value of each item can be referenced in filename templates with {custom?fmt=%s}
+TC_GEN_CUSTOM_LOOP_LIST =
+
+
+# I/O Configurations
+
+# Location of input data directory for track data
+TC_GEN_TRACK_INPUT_DIR = {INPUT_BASE}
+TC_GEN_TRACK_INPUT_TEMPLATE = *.dat
+
+# Location of input data directory for genesis data
+TC_GEN_GENESIS_INPUT_DIR = {INPUT_BASE}
+TC_GEN_GENESIS_INPUT_TEMPLATE = genesis*{init?fmt=%Y}*
+
+#TC_GEN_EDECK_INPUT_DIR =
+#TC_GEN_EDECK_INPUT_TEMPLATE =
+
+#TC_GEN_SHAPE_INPUT_DIR =
+#TC_GEN_SHAPE_INPUT_TEMPLATE =
+
+
+# directory to write output files generated by tc_gen
+TC_GEN_OUTPUT_DIR = {OUTPUT_BASE}
+TC_GEN_OUTPUT_TEMPLATE = tc_gen_{init?fmt=%Y}
+
+
+# MET Configurations
+
+TC_GEN_CONFIG_FILE = {PARM_BASE}/met_config/TCGenConfig_wrapped
+
+# The following variables set values in the MET configuration file used by this example
+# Leaving these values commented will use the value found in the default MET configuration file
+# See the MET documentation for this tool for more information on the settings
+
+TC_GEN_INIT_FREQ = INIT_FREQ_template
+
+TC_GEN_VALID_FREQ = VALID_FREQ_template
+
+#TC_GEN_FCST_HR_WINDOW_BEGIN = 6
+TC_GEN_FCST_HR_WINDOW_BEGIN = 0
+TC_GEN_FCST_HR_WINDOW_END = 120
+
+#TC_GEN_MIN_DURATION = 12
+TC_GEN_MIN_DURATION = 24
+
+TC_GEN_FCST_GENESIS_VMAX_THRESH = NA
+TC_GEN_FCST_GENESIS_MSLP_THRESH = NA
+
+TC_GEN_BEST_GENESIS_TECHNIQUE = BEST
+TC_GEN_BEST_GENESIS_CATEGORY = TD, TS, SD, SS
+TC_GEN_BEST_GENESIS_VMAX_THRESH = NA
+TC_GEN_BEST_GENESIS_MSLP_THRESH = NA
+
+TC_GEN_OPER_TECHNIQUE = CARQ
+
+# TC_GEN_FILTER_ sets filter items in the MET configuration file
+# quotation marks within quotation marks must be preceeded with \
+#TC_GEN_FILTER_1 = desc = "AL_BASIN"; vx_mask = "MET_BASE/tc_data/basin_global_tenth_degree.nc { name=\"basin\"; level=\"(*,*)\"; } ==1";
+#TC_GEN_FILTER_2 = desc = "AL_DLAND_300"; vx_mask = "MET_BASE/tc_data/basin_global_tenth_degree.nc { name=\"basin\"; level=\"(*,*)\"; } ==1"; dland_thresh = >0&&<300;
+#TC_GEN_FILTER_3 = desc = "EP_CP_BASIN"; vx_mask = "MET_BASE/tc_data/basin_global_tenth_degree.nc { name=\"basin\"; level=\"(*,*)\";} ==2||==3";
+#TC_GEN_FILTER_4 = desc = "EP_BASIN"; genesis_window = { beg = -3*24; end = 3*24; }; genesis_radius = 300;
+#TC_GEN_FILTER_5 = desc = "3DAY_300KM"; genesis_window = { beg = -3*24; end = 3*24; }; genesis_radius = 300;
+#TC_GEN_FILTER_6 = desc = "3DAY_600KM"; genesis_window = { beg = -3*24; end = 3*24; }; genesis_radius = 600;
+#TC_GEN_FILTER_7 = desc = "5DAY_300KM"; genesis_window = { beg = -5*24; end = 5*24; }; genesis_radius = 300;
+#TC_GEN_FILTER_8 = desc = "5DAY_600KM"; genesis_window = { beg = -5*24; end = 5*24; }; genesis_radius = 600;
+
+TC_GEN_DESC = ALL
+
+MODEL =
+
+TC_GEN_STORM_ID =
+
+TC_GEN_STORM_NAME =
+
+TC_GEN_INIT_BEG =
+TC_GEN_INIT_END =
+TC_GEN_INIT_INC =
+TC_GEN_INIT_EXC =
+
+TC_GEN_VALID_BEG =
+TC_GEN_VALID_END =
+
+TC_GEN_INIT_HOUR =
+
+# sets METPLUS_LEAD in the wrapped MET config file
+LEAD_SEQ =
+
+TC_GEN_VX_MASK =
+
+TC_GEN_BASIN_MASK = BASIN_MASK_template
+
+TC_GEN_DLAND_THRESH = >0
+
+TC_GEN_GENESIS_MATCH_RADIUS = 500
+
+#TC_GEN_GENESIS_MATCH_POINT_TO_TRACK = True
+TC_GEN_GENESIS_MATCH_POINT_TO_TRACK = false
+
+TC_GEN_GENESIS_MATCH_WINDOW_BEG = 0
+TC_GEN_GENESIS_MATCH_WINDOW_END = 0
+
+TC_GEN_OPS_HIT_WINDOW_BEG = 0
+TC_GEN_OPS_HIT_WINDOW_END = 48
+
+TC_GEN_DEV_HIT_RADIUS = 500
+
+TC_GEN_DEV_HIT_WINDOW_BEGIN = -24
+TC_GEN_DEV_HIT_WINDOW_END = 24
+
+TC_GEN_DISCARD_INIT_POST_GENESIS_FLAG = True
+
+TC_GEN_DEV_METHOD_FLAG = True
+
+TC_GEN_OPS_METHOD_FLAG = True
+
+TC_GEN_CI_ALPHA = 0.05
+
+TC_GEN_OUTPUT_FLAG_FHO = NONE
+TC_GEN_OUTPUT_FLAG_CTC = BOTH
+TC_GEN_OUTPUT_FLAG_CTS = BOTH
+TC_GEN_OUTPUT_FLAG_PCT = NONE
+TC_GEN_OUTPUT_FLAG_PSTD = NONE
+TC_GEN_OUTPUT_FLAG_PJC = NONE
+TC_GEN_OUTPUT_FLAG_PRC = NONE
+TC_GEN_OUTPUT_FLAG_GENMPR = BOTH
+
+
+TC_GEN_NC_PAIRS_FLAG_LATLON = TRUE
+TC_GEN_NC_PAIRS_FLAG_FCST_GENESIS = TRUE
+TC_GEN_NC_PAIRS_FLAG_FCST_TRACKS = TRUE
+TC_GEN_NC_PAIRS_FLAG_FCST_FY_OY = TRUE
+TC_GEN_NC_PAIRS_FLAG_FCST_FY_ON = TRUE
+TC_GEN_NC_PAIRS_FLAG_BEST_GENESIS = TRUE
+TC_GEN_NC_PAIRS_FLAG_BEST_TRACKS = TRUE
+TC_GEN_NC_PAIRS_FLAG_BEST_FY_OY = TRUE
+TC_GEN_NC_PAIRS_FLAG_BEST_FN_OY = TRUE
+
+TC_GEN_VALID_MINUS_GENESIS_DIFF_THRESH = NA
+
+TC_GEN_BEST_UNIQUE_FLAG = TRUE
+
+TC_GEN_DLAND_FILE = MET_BASE/tc_data/dland_global_tenth_degree.nc
+
+TC_GEN_BASIN_FILE = MET_BASE/tc_data/basin_global_tenth_degree.nc
+
+TC_GEN_NC_PAIRS_GRID = G003
diff --git a/parm/metplus_config/hurricane/stats/TCPairs_template.conf b/parm/metplus_config/hurricane/stats/TCPairs_template.conf
new file mode 100644
index 0000000000..9db95cd7a4
--- /dev/null
+++ b/parm/metplus_config/hurricane/stats/TCPairs_template.conf
@@ -0,0 +1,85 @@
+#
+[dir]
+INPUT_BASE = INPUT_BASE_template
+OUTPUT_BASE = OUTPUT_BASE_template
+MET_INSTALL_DIR = /apps/ops/para/libs/intel/19.1.3.304/met/10.1.2
+#MET_INSTALL_DIR = /apps/ops/para/libs/intel/19.1.3.304/met/10.1.1
+#
+# CONFIGURATION
+#
+[config]
+
+# Looping by times: steps through each 'task' in the PROCESS_LIST for each
+# defined time, and repeats until all times have been evaluated.
+LOOP_ORDER = processes
+
+# 'Tasks' to be run
+PROCESS_LIST = TCPairs
+
+LOOP_BY = INIT
+
+
+# The init time begin and end times, increment, and last init hour.
+INIT_TIME_FMT = %Y%m%d%H
+INIT_BEG = INIT_BEG_template
+INIT_END = INIT_END_template
+
+# This is the step-size. Increment in seconds from the begin time to the end time
+# set to 6 hours = 21600 seconds
+INIT_INCREMENT = 21600
+
+#
+# Run MET tc_pairs by indicating the top-level directories for the A-deck and B-deck files. Set to 'yes' to
+# run using top-level directories, 'no' if you want to run tc_pairs on files paired by the wrapper.
+TC_PAIRS_READ_ALL_FILES = no
+
+
+#
+# MET TC-Pairs
+#
+# List of models to be used (white space or comma separated) eg: DSHP, LGEM, HWRF
+# If no models are listed, then process all models in the input file(s).
+MODEL = MD01, MD02, MD03
+
+# List of storm ids of interest (space or comma separated) e.g.: AL112012, AL122012
+# If no storm ids are listed, then process all storm ids in the input file(s).
+#TC_PAIRS_STORM_ID = AL092021, AL102021
+
+# Basins (of origin/region). Indicate with space or comma-separated list of regions, eg. AL: for North Atlantic,
+# WP: Western North Pacific, CP: Central North Pacific, SH: Southern Hemisphere, IO: North Indian Ocean, LS: Southern
+# Hemisphere
+TC_PAIRS_BASIN = TC_PAIRS_BASIN_template
+
+# Cyclone, a space or comma-separated list of cyclone numbers. If left empty, all cyclones will be used.
+TC_PAIRS_CYCLONE = TC_PAIRS_CYCLONE_template
+
+# DLAND file, the full path of the file that contains the gridded representation of the
+# minimum distance from land.
+TC_PAIRS_DLAND_FILE = MET_BASE/tc_data/dland_global_tenth_degree.nc
+
+# setting this so that when verifying against analysis track, the union of points are written
+TC_PAIRS_MET_CONFIG_OVERRIDES = match_points = TRUE;
+
+#// Specify if the code should check for duplicate ATCF lines
+TC_PAIRS_CHECK_DUP = true
+
+#// Specify special processing to be performed for interpolated models.
+#// Set to NONE, FILL, or REPLACE.
+TC_PAIRS_INTERP12 = none
+
+#
+# DIRECTORIES
+#
+# Location of input track data directory
+# for ADECK and BDECK data
+
+TC_PAIRS_ADECK_INPUT_DIR = {INPUT_BASE}
+TC_PAIRS_BDECK_INPUT_DIR = {INPUT_BASE}
+TC_PAIRS_ADECK_TEMPLATE = a{basin}{cyclone}{init?fmt=%Y}.dat
+TC_PAIRS_BDECK_TEMPLATE = b{basin}{cyclone}{init?fmt=%Y}.dat
+#TC_PAIRS_ADECK_TEMPLATE = track.{init?fmt=%Y%m%d%H}.dat
+#TC_PAIRS_BDECK_TEMPLATE = b{basin?fmt=%s}{cyclone?fmt=%s}{date?fmt=%Y}.dat
+
+TC_PAIRS_OUTPUT_DIR = {OUTPUT_BASE}/tc_pairs
+TC_PAIRS_OUTPUT_TEMPLATE = tc_pairs.{basin}{cyclone}{init?fmt=%Y}
+
diff --git a/parm/metplus_config/hurricane/stats/TCPairs_template_regional.conf b/parm/metplus_config/hurricane/stats/TCPairs_template_regional.conf
new file mode 100644
index 0000000000..7ad6690e51
--- /dev/null
+++ b/parm/metplus_config/hurricane/stats/TCPairs_template_regional.conf
@@ -0,0 +1,85 @@
+#
+[dir]
+INPUT_BASE = INPUT_BASE_template
+OUTPUT_BASE = OUTPUT_BASE_template
+MET_INSTALL_DIR = /apps/ops/para/libs/intel/19.1.3.304/met/10.1.2
+#MET_INSTALL_DIR = /apps/ops/para/libs/intel/19.1.3.304/met/10.1.1
+#
+# CONFIGURATION
+#
+[config]
+
+# Looping by times: steps through each 'task' in the PROCESS_LIST for each
+# defined time, and repeats until all times have been evaluated.
+LOOP_ORDER = processes
+
+# 'Tasks' to be run
+PROCESS_LIST = TCPairs
+
+LOOP_BY = INIT
+
+
+# The init time begin and end times, increment, and last init hour.
+INIT_TIME_FMT = %Y%m%d%H
+INIT_BEG = INIT_BEG_template
+INIT_END = INIT_END_template
+
+# This is the step-size. Increment in seconds from the begin time to the end time
+# set to 6 hours = 21600 seconds
+INIT_INCREMENT = 21600
+
+#
+# Run MET tc_pairs by indicating the top-level directories for the A-deck and B-deck files. Set to 'yes' to
+# run using top-level directories, 'no' if you want to run tc_pairs on files paired by the wrapper.
+TC_PAIRS_READ_ALL_FILES = no
+
+
+#
+# MET TC-Pairs
+#
+# List of models to be used (white space or comma separated) eg: DSHP, LGEM, HWRF
+# If no models are listed, then process all models in the input file(s).
+MODEL = MD01, MD02, MD03, MD04
+
+# List of storm ids of interest (space or comma separated) e.g.: AL112012, AL122012
+# If no storm ids are listed, then process all storm ids in the input file(s).
+#TC_PAIRS_STORM_ID = AL092021, AL102021
+
+# Basins (of origin/region). Indicate with space or comma-separated list of regions, eg. AL: for North Atlantic,
+# WP: Western North Pacific, CP: Central North Pacific, SH: Southern Hemisphere, IO: North Indian Ocean, LS: Southern
+# Hemisphere
+TC_PAIRS_BASIN = TC_PAIRS_BASIN_template
+
+# Cyclone, a space or comma-separated list of cyclone numbers. If left empty, all cyclones will be used.
+TC_PAIRS_CYCLONE = TC_PAIRS_CYCLONE_template
+
+# DLAND file, the full path of the file that contains the gridded representation of the
+# minimum distance from land.
+TC_PAIRS_DLAND_FILE = MET_BASE/tc_data/dland_global_tenth_degree.nc
+
+# setting this so that when verifying against analysis track, the union of points are written
+TC_PAIRS_MET_CONFIG_OVERRIDES = match_points = TRUE;
+
+#// Specify if the code should check for duplicate ATCF lines
+TC_PAIRS_CHECK_DUP = true
+
+#// Specify special processing to be performed for interpolated models.
+#// Set to NONE, FILL, or REPLACE.
+TC_PAIRS_INTERP12 = none
+
+#
+# DIRECTORIES
+#
+# Location of input track data directory
+# for ADECK and BDECK data
+
+TC_PAIRS_ADECK_INPUT_DIR = {INPUT_BASE}
+TC_PAIRS_BDECK_INPUT_DIR = {INPUT_BASE}
+TC_PAIRS_ADECK_TEMPLATE = a{basin}{cyclone}{init?fmt=%Y}.dat
+TC_PAIRS_BDECK_TEMPLATE = b{basin}{cyclone}{init?fmt=%Y}.dat
+#TC_PAIRS_ADECK_TEMPLATE = track.{init?fmt=%Y%m%d%H}.dat
+#TC_PAIRS_BDECK_TEMPLATE = b{basin?fmt=%s}{cyclone?fmt=%s}{date?fmt=%Y}.dat
+
+TC_PAIRS_OUTPUT_DIR = {OUTPUT_BASE}/tc_pairs
+TC_PAIRS_OUTPUT_TEMPLATE = tc_pairs.{basin}{cyclone}{init?fmt=%Y}
+
diff --git a/parm/metplus_config/hurricane/stats/TCStat_template.conf b/parm/metplus_config/hurricane/stats/TCStat_template.conf
new file mode 100644
index 0000000000..2e0b90914d
--- /dev/null
+++ b/parm/metplus_config/hurricane/stats/TCStat_template.conf
@@ -0,0 +1,140 @@
+#
+[dir]
+INPUT_BASE = INPUT_BASE_template
+OUTPUT_BASE = OUTPUT_BASE_template
+MET_INSTALL_DIR = /apps/ops/para/libs/intel/19.1.3.304/met/10.1.2
+#MET_INSTALL_DIR = /apps/ops/para/libs/intel/19.1.3.304/met/10.1.1
+# CONFIGURATION
+#
+[config]
+#set looping method to processes-each 'task' in the process list runs to
+# completion (for all init times) before the next 'task' is run
+# List of 'tasks' to run
+LOOP_ORDER = processes
+PROCESS_LIST = TCStat
+
+LOOP_BY = INIT
+
+# The init time begin and end times, increment, and last init hour.
+INIT_TIME_FMT = %Y%m%d%H
+INIT_BEG = INIT_BEG_template
+INIT_END = INIT_END_template
+
+# This is the step-size. Increment in seconds from the begin time to the end time
+# set to 6 hours = 21600 seconds
+INIT_INCREMENT = 6H
+
+# DIRECTORIES
+#
+# TC-Stat input data (-lookin argument)
+# uses output from tc-pairs
+TC_STAT_LOOKIN_DIR = {OUTPUT_BASE}/tc_pairs
+
+# TC-Stat output data (creates .tcst ASCII files which can be read or used as
+# input to TCMPR_Plotter_wrapper (the Python wrapper to plot_tcmpr.R) to create plots.
+TC_STAT_OUTPUT_DIR = {OUTPUT_BASE}/tc_stat
+TC_STAT_OUTPUT_TEMPLATE = tc_stat.out
+
+# Leave blank or remove to use wrapped config file in parm/met_config
+TC_STAT_CONFIG_FILE = {PARM_BASE}/met_config/TCStatConfig_wrapped
+
+# 5-days' statistics
+#TC_STAT_JOB_ARGS = -job summary -lead 000000 -lead 120000 -lead 240000 -lead 360000 -lead 480000 -lead 600000 -lead 720000 -lead 840000 -lead 960000 -lead 1080000 -lead 1200000 -line_type TCMPR -match_points true -event_equal true -column AMAX_WIND-BMAX_WIND -column ABS(AMAX_WIND-BMAX_WIND) -column ALTK_ERR -column CRTK_ERR -column ABS(TK_ERR) -by LEAD -by AMODEL -out_alpha 0.05000 -dump_row {TC_STAT_OUTPUT_DIR}/tc_stat_summary.tcst
+# 7-days' statistics
+TC_STAT_JOB_ARGS = -job summary -lead 000000 -lead 120000 -lead 240000 -lead 360000 -lead 480000 -lead 600000 -lead 720000 -lead 840000 -lead 960000 -lead 1080000 -lead 1200000 -lead 1320000 -lead 1440000 -lead 1560000 -lead 1680000 -line_type TCMPR -match_points true -event_equal true -column AMAX_WIND-BMAX_WIND -column ABS(AMAX_WIND-BMAX_WIND) -column ALTK_ERR -column CRTK_ERR -column ABS(TK_ERR) -by LEAD -by AMODEL -out_alpha 0.05000 -dump_row {TC_STAT_OUTPUT_DIR}/tc_stat_summary.tcst
+
+#
+#The line_type field stratifies by the line_type column.
+TC_STAT_LINE_TYPE = TCMPR
+#
+# Stratify by these columns:
+TC_STAT_AMODEL = MD01, MD02, MD03
+TC_STAT_BMODEL = BEST
+TC_STAT_DESC =
+TC_STAT_STORM_ID =
+TC_STAT_BASIN =
+TC_STAT_CYCLONE =
+TC_STAT_STORM_NAME =
+#
+# # Stratify by init times via a comma-separate list of init times to
+# # include or exclude. Time format defined as YYYYMMDD_HH or YYYYMMDD_HHmmss
+TC_STAT_INIT_BEG = TC_STAT_INIT_BEG_temp
+TC_STAT_INIT_END = TC_STAT_INIT_END_temp
+TC_STAT_INIT_INCLUDE =
+TC_STAT_INIT_EXCLUDE =
+TC_STAT_INIT_HOUR =
+#
+# # Stratify by valid times via a comma-separate list of valid times to
+# # include or exclude. Time format defined as YYYYMMDD_HH or YYYYMMDD_HHmmss
+TC_STAT_VALID_BEG =
+TC_STAT_VALID_END =
+TC_STAT_VALID_INCLUDE =
+TC_STAT_VALID_EXCLUDE =
+TC_STAT_VALID_HOUR =
+TC_STAT_LEAD_REQ =
+TC_STAT_INIT_MASK =
+TC_STAT_VALID_MASK =
+
+# Stratify by the valid time and lead time via comma-separated list of
+# # times in format HH[MMSS]
+TC_STAT_VALID_HOUR =
+TC_STAT_LEAD =
+#
+# # Stratify over the watch_warn column in the tcst file. Setting this to
+# # 'ALL' will match HUWARN, HUWATCH, TSWARN, TSWATCH
+TC_STAT_TRACK_WATCH_WARN =
+#
+# # Stratify by applying thresholds to numeric data columns. Specify with
+# # comma-separated list of column names and thresholds to be applied.
+# # The length of TC_STAT_COLUMN_THRESH_NAME should be the same as
+# # TC_STAT_COLUMN_THRESH_VAL.
+TC_STAT_COLUMN_THRESH_NAME =
+TC_STAT_COLUMN_THRESH_VAL =
+#
+# # Stratify by a list of comma-separated columns names and values corresponding
+# # to non-numeric data columns of the values of interest.
+TC_STAT_COLUMN_STR_NAME = LEVEL,LEVEL,LEVEL,LEVEL,LEVEL,LEVEL,LEVEL
+TC_STAT_COLUMN_STR_VAL = TY,TC,HU,SD,SS,TS,TD
+
+# # Stratify by applying thresholds to numeric data columns only when lead=0.
+# # If lead=0 and the value does not meet the threshold, discard the entire
+# # track. The length of TC_STAT_INIT_THRESH_NAME must equal the length of
+# # TC_STAT_INIT_THRESH_VAL.
+TC_STAT_INIT_THRESH_NAME =
+TC_STAT_INIT_THRESH_VAL =
+
+# Stratify by applying thresholds to numeric data columns only when lead = 0.
+# # If lead = 0 but the value doesn't meet the threshold, discard the entire
+# # track.
+TC_STAT_INIT_STR_NAME =
+TC_STAT_INIT_STR_VAL =
+#
+# # Excludes any points where distance to land is <=0. When set to TRUE, once land
+# # is encountered, the remainder of the forecast track is NOT used for the
+# # verification, even if the track moves back over water.
+TC_STAT_WATER_ONLY = FALSE
+#
+# # TRUE or FALSE. To specify whether only those track points occurring near
+# # landfall should be retained. Landfall is the last bmodel track point before
+# # the distance to land switches from water to land.
+TC_STAT_LANDFALL =
+#
+# # Define the landfall retention window, which is defined as the hours offset
+# # from the time of landfall. Format is in HH[MMSS]. Default TC_STAT_LANDFALL_BEG
+# # is set to -24, and TC_STAT_LANDFALL_END is set to 00
+TC_STAT_LANDFALL_BEG = -24
+TC_STAT_LANDFALL_END = 00
+#
+# # Specify whether only those track points common to both the ADECK and BDECK
+# # tracks should be written out
+TC_STAT_MATCH_POINTS = true
+
+#TC_STAT_COLUMN_STR_EXC_NAME =
+##TC_STAT_COLUMN_STR_EXC_VAL =
+#
+##TC_STAT_INIT_STR_EXC_NAME =
+##TC_STAT_INIT_STR_EXC_VAL =
+#
+## IMPORTANT Refer to the README_TC for details on setting up analysis
+## jobs (located in {MET_INSTALL_DIR}/share/met/config
+
diff --git a/parm/metplus_config/hurricane/stats/TCStat_template_basin.conf b/parm/metplus_config/hurricane/stats/TCStat_template_basin.conf
new file mode 100644
index 0000000000..8756c9d42c
--- /dev/null
+++ b/parm/metplus_config/hurricane/stats/TCStat_template_basin.conf
@@ -0,0 +1,140 @@
+#
+[dir]
+INPUT_BASE = INPUT_BASE_template
+OUTPUT_BASE = OUTPUT_BASE_template
+MET_INSTALL_DIR = /apps/ops/para/libs/intel/19.1.3.304/met/10.1.2
+#MET_INSTALL_DIR = /apps/ops/para/libs/intel/19.1.3.304/met/10.1.1
+# CONFIGURATION
+#
+[config]
+#set looping method to processes-each 'task' in the process list runs to
+# completion (for all init times) before the next 'task' is run
+# List of 'tasks' to run
+LOOP_ORDER = processes
+PROCESS_LIST = TCStat
+
+LOOP_BY = INIT
+
+# The init time begin and end times, increment, and last init hour.
+INIT_TIME_FMT = %Y%m%d%H
+INIT_BEG = INIT_BEG_template
+INIT_END = INIT_END_template
+
+# This is the step-size. Increment in seconds from the begin time to the end time
+# set to 6 hours = 21600 seconds
+INIT_INCREMENT = 6H
+
+# DIRECTORIES
+#
+# TC-Stat input data (-lookin argument)
+# uses output from tc-pairs
+TC_STAT_LOOKIN_DIR = {INPUT_BASE}
+
+# TC-Stat output data (creates .tcst ASCII files which can be read or used as
+# input to TCMPR_Plotter_wrapper (the Python wrapper to plot_tcmpr.R) to create plots.
+TC_STAT_OUTPUT_DIR = {OUTPUT_BASE}/tc_stat
+TC_STAT_OUTPUT_TEMPLATE = tc_stat.out
+
+# Leave blank or remove to use wrapped config file in parm/met_config
+TC_STAT_CONFIG_FILE = {PARM_BASE}/met_config/TCStatConfig_wrapped
+
+# 5-days' statistics
+#TC_STAT_JOB_ARGS = -job summary -lead 000000 -lead 120000 -lead 240000 -lead 360000 -lead 480000 -lead 600000 -lead 720000 -lead 840000 -lead 960000 -lead 1080000 -lead 1200000 -line_type TCMPR -match_points true -event_equal true -column AMAX_WIND-BMAX_WIND -column ABS(AMAX_WIND-BMAX_WIND) -column ALTK_ERR -column CRTK_ERR -column ABS(TK_ERR) -by LEAD -by AMODEL -out_alpha 0.05000 -dump_row {TC_STAT_OUTPUT_DIR}/tc_stat_summary.tcst
+# 7-days' statistics
+TC_STAT_JOB_ARGS = -job summary -lead 000000 -lead 120000 -lead 240000 -lead 360000 -lead 480000 -lead 600000 -lead 720000 -lead 840000 -lead 960000 -lead 1080000 -lead 1200000 -lead 1320000 -lead 1440000 -lead 1560000 -lead 1680000 -line_type TCMPR -match_points true -event_equal true -column AMAX_WIND-BMAX_WIND -column ABS(AMAX_WIND-BMAX_WIND) -column ALTK_ERR -column CRTK_ERR -column ABS(TK_ERR) -by LEAD -by AMODEL -out_alpha 0.05000 -dump_row {TC_STAT_OUTPUT_DIR}/tc_stat_summary.tcst
+
+#
+#The line_type field stratifies by the line_type column.
+TC_STAT_LINE_TYPE = TCMPR
+#
+# Stratify by these columns:
+TC_STAT_AMODEL = MD01, MD02, MD03
+TC_STAT_BMODEL = BEST
+TC_STAT_DESC =
+TC_STAT_STORM_ID =
+TC_STAT_BASIN =
+TC_STAT_CYCLONE =
+TC_STAT_STORM_NAME =
+#
+# # Stratify by init times via a comma-separate list of init times to
+# # include or exclude. Time format defined as YYYYMMDD_HH or YYYYMMDD_HHmmss
+TC_STAT_INIT_BEG = TC_STAT_INIT_BEG_temp
+TC_STAT_INIT_END = TC_STAT_INIT_END_temp
+TC_STAT_INIT_INCLUDE =
+TC_STAT_INIT_EXCLUDE =
+TC_STAT_INIT_HOUR =
+#
+# # Stratify by valid times via a comma-separate list of valid times to
+# # include or exclude. Time format defined as YYYYMMDD_HH or YYYYMMDD_HHmmss
+TC_STAT_VALID_BEG =
+TC_STAT_VALID_END =
+TC_STAT_VALID_INCLUDE =
+TC_STAT_VALID_EXCLUDE =
+TC_STAT_VALID_HOUR =
+TC_STAT_LEAD_REQ =
+TC_STAT_INIT_MASK =
+TC_STAT_VALID_MASK =
+
+# Stratify by the valid time and lead time via comma-separated list of
+# # times in format HH[MMSS]
+TC_STAT_VALID_HOUR =
+TC_STAT_LEAD =
+#
+# # Stratify over the watch_warn column in the tcst file. Setting this to
+# # 'ALL' will match HUWARN, HUWATCH, TSWARN, TSWATCH
+TC_STAT_TRACK_WATCH_WARN =
+#
+# # Stratify by applying thresholds to numeric data columns. Specify with
+# # comma-separated list of column names and thresholds to be applied.
+# # The length of TC_STAT_COLUMN_THRESH_NAME should be the same as
+# # TC_STAT_COLUMN_THRESH_VAL.
+TC_STAT_COLUMN_THRESH_NAME =
+TC_STAT_COLUMN_THRESH_VAL =
+#
+# # Stratify by a list of comma-separated columns names and values corresponding
+# # to non-numeric data columns of the values of interest.
+TC_STAT_COLUMN_STR_NAME = LEVEL,LEVEL,LEVEL,LEVEL,LEVEL,LEVEL,LEVEL
+TC_STAT_COLUMN_STR_VAL = TY,TC,HU,SD,SS,TS,TD
+
+# # Stratify by applying thresholds to numeric data columns only when lead=0.
+# # If lead=0 and the value does not meet the threshold, discard the entire
+# # track. The length of TC_STAT_INIT_THRESH_NAME must equal the length of
+# # TC_STAT_INIT_THRESH_VAL.
+TC_STAT_INIT_THRESH_NAME =
+TC_STAT_INIT_THRESH_VAL =
+
+# Stratify by applying thresholds to numeric data columns only when lead = 0.
+# # If lead = 0 but the value doesn't meet the threshold, discard the entire
+# # track.
+TC_STAT_INIT_STR_NAME =
+TC_STAT_INIT_STR_VAL =
+#
+# # Excludes any points where distance to land is <=0. When set to TRUE, once land
+# # is encountered, the remainder of the forecast track is NOT used for the
+# # verification, even if the track moves back over water.
+TC_STAT_WATER_ONLY = FALSE
+#
+# # TRUE or FALSE. To specify whether only those track points occurring near
+# # landfall should be retained. Landfall is the last bmodel track point before
+# # the distance to land switches from water to land.
+TC_STAT_LANDFALL =
+#
+# # Define the landfall retention window, which is defined as the hours offset
+# # from the time of landfall. Format is in HH[MMSS]. Default TC_STAT_LANDFALL_BEG
+# # is set to -24, and TC_STAT_LANDFALL_END is set to 00
+TC_STAT_LANDFALL_BEG = -24
+TC_STAT_LANDFALL_END = 00
+#
+# # Specify whether only those track points common to both the ADECK and BDECK
+# # tracks should be written out
+TC_STAT_MATCH_POINTS = true
+
+#TC_STAT_COLUMN_STR_EXC_NAME =
+##TC_STAT_COLUMN_STR_EXC_VAL =
+#
+##TC_STAT_INIT_STR_EXC_NAME =
+##TC_STAT_INIT_STR_EXC_VAL =
+#
+## IMPORTANT Refer to the README_TC for details on setting up analysis
+## jobs (located in {MET_INSTALL_DIR}/share/met/config
+
diff --git a/parm/metplus_config/hurricane/stats/TCStat_template_basin_regional.conf b/parm/metplus_config/hurricane/stats/TCStat_template_basin_regional.conf
new file mode 100644
index 0000000000..5c748a7305
--- /dev/null
+++ b/parm/metplus_config/hurricane/stats/TCStat_template_basin_regional.conf
@@ -0,0 +1,140 @@
+#
+[dir]
+INPUT_BASE = INPUT_BASE_template
+OUTPUT_BASE = OUTPUT_BASE_template
+MET_INSTALL_DIR = /apps/ops/para/libs/intel/19.1.3.304/met/10.1.2
+#MET_INSTALL_DIR = /apps/ops/para/libs/intel/19.1.3.304/met/10.1.1
+# CONFIGURATION
+#
+[config]
+#set looping method to processes-each 'task' in the process list runs to
+# completion (for all init times) before the next 'task' is run
+# List of 'tasks' to run
+LOOP_ORDER = processes
+PROCESS_LIST = TCStat
+
+LOOP_BY = INIT
+
+# The init time begin and end times, increment, and last init hour.
+INIT_TIME_FMT = %Y%m%d%H
+INIT_BEG = INIT_BEG_template
+INIT_END = INIT_END_template
+
+# This is the step-size. Increment in seconds from the begin time to the end time
+# set to 6 hours = 21600 seconds
+INIT_INCREMENT = 6H
+
+# DIRECTORIES
+#
+# TC-Stat input data (-lookin argument)
+# uses output from tc-pairs
+TC_STAT_LOOKIN_DIR = {INPUT_BASE}
+
+# TC-Stat output data (creates .tcst ASCII files which can be read or used as
+# input to TCMPR_Plotter_wrapper (the Python wrapper to plot_tcmpr.R) to create plots.
+TC_STAT_OUTPUT_DIR = {OUTPUT_BASE}/tc_stat
+TC_STAT_OUTPUT_TEMPLATE = tc_stat.out
+
+# Leave blank or remove to use wrapped config file in parm/met_config
+TC_STAT_CONFIG_FILE = {PARM_BASE}/met_config/TCStatConfig_wrapped
+
+# 5-days' statistics
+TC_STAT_JOB_ARGS = -job summary -lead 000000 -lead 120000 -lead 240000 -lead 360000 -lead 480000 -lead 600000 -lead 720000 -lead 840000 -lead 960000 -lead 1080000 -lead 1200000 -line_type TCMPR -match_points true -event_equal true -column AMAX_WIND-BMAX_WIND -column ABS(AMAX_WIND-BMAX_WIND) -column ALTK_ERR -column CRTK_ERR -column ABS(TK_ERR) -by LEAD -by AMODEL -out_alpha 0.05000 -dump_row {TC_STAT_OUTPUT_DIR}/tc_stat_summary.tcst
+# 7-days' statistics
+#TC_STAT_JOB_ARGS = -job summary -lead 000000 -lead 120000 -lead 240000 -lead 360000 -lead 480000 -lead 600000 -lead 720000 -lead 840000 -lead 960000 -lead 1080000 -lead 1200000 -lead 1320000 -lead 1440000 -lead 1560000 -lead 1680000 -line_type TCMPR -match_points true -event_equal true -column AMAX_WIND-BMAX_WIND -column ABS(AMAX_WIND-BMAX_WIND) -column ALTK_ERR -column CRTK_ERR -column ABS(TK_ERR) -by LEAD -by AMODEL -out_alpha 0.05000 -dump_row {TC_STAT_OUTPUT_DIR}/tc_stat_summary.tcst
+
+#
+#The line_type field stratifies by the line_type column.
+TC_STAT_LINE_TYPE = TCMPR
+#
+# Stratify by these columns:
+TC_STAT_AMODEL = MD01, MD02, MD03, MD04
+TC_STAT_BMODEL = BEST
+TC_STAT_DESC =
+TC_STAT_STORM_ID =
+TC_STAT_BASIN =
+TC_STAT_CYCLONE =
+TC_STAT_STORM_NAME =
+#
+# # Stratify by init times via a comma-separate list of init times to
+# # include or exclude. Time format defined as YYYYMMDD_HH or YYYYMMDD_HHmmss
+TC_STAT_INIT_BEG = TC_STAT_INIT_BEG_temp
+TC_STAT_INIT_END = TC_STAT_INIT_END_temp
+TC_STAT_INIT_INCLUDE =
+TC_STAT_INIT_EXCLUDE =
+TC_STAT_INIT_HOUR =
+#
+# # Stratify by valid times via a comma-separate list of valid times to
+# # include or exclude. Time format defined as YYYYMMDD_HH or YYYYMMDD_HHmmss
+TC_STAT_VALID_BEG =
+TC_STAT_VALID_END =
+TC_STAT_VALID_INCLUDE =
+TC_STAT_VALID_EXCLUDE =
+TC_STAT_VALID_HOUR =
+TC_STAT_LEAD_REQ =
+TC_STAT_INIT_MASK =
+TC_STAT_VALID_MASK =
+
+# Stratify by the valid time and lead time via comma-separated list of
+# # times in format HH[MMSS]
+TC_STAT_VALID_HOUR =
+TC_STAT_LEAD =
+#
+# # Stratify over the watch_warn column in the tcst file. Setting this to
+# # 'ALL' will match HUWARN, HUWATCH, TSWARN, TSWATCH
+TC_STAT_TRACK_WATCH_WARN =
+#
+# # Stratify by applying thresholds to numeric data columns. Specify with
+# # comma-separated list of column names and thresholds to be applied.
+# # The length of TC_STAT_COLUMN_THRESH_NAME should be the same as
+# # TC_STAT_COLUMN_THRESH_VAL.
+TC_STAT_COLUMN_THRESH_NAME =
+TC_STAT_COLUMN_THRESH_VAL =
+#
+# # Stratify by a list of comma-separated columns names and values corresponding
+# # to non-numeric data columns of the values of interest.
+TC_STAT_COLUMN_STR_NAME = LEVEL,LEVEL,LEVEL,LEVEL,LEVEL,LEVEL,LEVEL
+TC_STAT_COLUMN_STR_VAL = TY,TC,HU,SD,SS,TS,TD
+
+# # Stratify by applying thresholds to numeric data columns only when lead=0.
+# # If lead=0 and the value does not meet the threshold, discard the entire
+# # track. The length of TC_STAT_INIT_THRESH_NAME must equal the length of
+# # TC_STAT_INIT_THRESH_VAL.
+TC_STAT_INIT_THRESH_NAME =
+TC_STAT_INIT_THRESH_VAL =
+
+# Stratify by applying thresholds to numeric data columns only when lead = 0.
+# # If lead = 0 but the value doesn't meet the threshold, discard the entire
+# # track.
+TC_STAT_INIT_STR_NAME =
+TC_STAT_INIT_STR_VAL =
+#
+# # Excludes any points where distance to land is <=0. When set to TRUE, once land
+# # is encountered, the remainder of the forecast track is NOT used for the
+# # verification, even if the track moves back over water.
+TC_STAT_WATER_ONLY = FALSE
+#
+# # TRUE or FALSE. To specify whether only those track points occurring near
+# # landfall should be retained. Landfall is the last bmodel track point before
+# # the distance to land switches from water to land.
+TC_STAT_LANDFALL =
+#
+# # Define the landfall retention window, which is defined as the hours offset
+# # from the time of landfall. Format is in HH[MMSS]. Default TC_STAT_LANDFALL_BEG
+# # is set to -24, and TC_STAT_LANDFALL_END is set to 00
+TC_STAT_LANDFALL_BEG = -24
+TC_STAT_LANDFALL_END = 00
+#
+# # Specify whether only those track points common to both the ADECK and BDECK
+# # tracks should be written out
+TC_STAT_MATCH_POINTS = true
+
+#TC_STAT_COLUMN_STR_EXC_NAME =
+##TC_STAT_COLUMN_STR_EXC_VAL =
+#
+##TC_STAT_INIT_STR_EXC_NAME =
+##TC_STAT_INIT_STR_EXC_VAL =
+#
+## IMPORTANT Refer to the README_TC for details on setting up analysis
+## jobs (located in {MET_INSTALL_DIR}/share/met/config
+
diff --git a/parm/metplus_config/hurricane/stats/TCStat_template_regional.conf b/parm/metplus_config/hurricane/stats/TCStat_template_regional.conf
new file mode 100644
index 0000000000..d87ed242d4
--- /dev/null
+++ b/parm/metplus_config/hurricane/stats/TCStat_template_regional.conf
@@ -0,0 +1,140 @@
+#
+[dir]
+INPUT_BASE = INPUT_BASE_template
+OUTPUT_BASE = OUTPUT_BASE_template
+MET_INSTALL_DIR = /apps/ops/para/libs/intel/19.1.3.304/met/10.1.2
+#MET_INSTALL_DIR = /apps/ops/para/libs/intel/19.1.3.304/met/10.1.1
+# CONFIGURATION
+#
+[config]
+#set looping method to processes-each 'task' in the process list runs to
+# completion (for all init times) before the next 'task' is run
+# List of 'tasks' to run
+LOOP_ORDER = processes
+PROCESS_LIST = TCStat
+
+LOOP_BY = INIT
+
+# The init time begin and end times, increment, and last init hour.
+INIT_TIME_FMT = %Y%m%d%H
+INIT_BEG = INIT_BEG_template
+INIT_END = INIT_END_template
+
+# This is the step-size. Increment in seconds from the begin time to the end time
+# set to 6 hours = 21600 seconds
+INIT_INCREMENT = 6H
+
+# DIRECTORIES
+#
+# TC-Stat input data (-lookin argument)
+# uses output from tc-pairs
+TC_STAT_LOOKIN_DIR = {OUTPUT_BASE}/tc_pairs
+
+# TC-Stat output data (creates .tcst ASCII files which can be read or used as
+# input to TCMPR_Plotter_wrapper (the Python wrapper to plot_tcmpr.R) to create plots.
+TC_STAT_OUTPUT_DIR = {OUTPUT_BASE}/tc_stat
+TC_STAT_OUTPUT_TEMPLATE = tc_stat.out
+
+# Leave blank or remove to use wrapped config file in parm/met_config
+TC_STAT_CONFIG_FILE = {PARM_BASE}/met_config/TCStatConfig_wrapped
+
+# 5-days' statistics
+TC_STAT_JOB_ARGS = -job summary -lead 000000 -lead 120000 -lead 240000 -lead 360000 -lead 480000 -lead 600000 -lead 720000 -lead 840000 -lead 960000 -lead 1080000 -lead 1200000 -line_type TCMPR -match_points true -event_equal true -column AMAX_WIND-BMAX_WIND -column ABS(AMAX_WIND-BMAX_WIND) -column ALTK_ERR -column CRTK_ERR -column ABS(TK_ERR) -by LEAD -by AMODEL -out_alpha 0.05000 -dump_row {TC_STAT_OUTPUT_DIR}/tc_stat_summary.tcst
+# 7-days' statistics
+#TC_STAT_JOB_ARGS = -job summary -lead 000000 -lead 120000 -lead 240000 -lead 360000 -lead 480000 -lead 600000 -lead 720000 -lead 840000 -lead 960000 -lead 1080000 -lead 1200000 -lead 1320000 -lead 1440000 -lead 1560000 -lead 1680000 -line_type TCMPR -match_points true -event_equal true -column AMAX_WIND-BMAX_WIND -column ABS(AMAX_WIND-BMAX_WIND) -column ALTK_ERR -column CRTK_ERR -column ABS(TK_ERR) -by LEAD -by AMODEL -out_alpha 0.05000 -dump_row {TC_STAT_OUTPUT_DIR}/tc_stat_summary.tcst
+
+#
+#The line_type field stratifies by the line_type column.
+TC_STAT_LINE_TYPE = TCMPR
+#
+# Stratify by these columns:
+TC_STAT_AMODEL = MD01, MD02, MD03, MD04
+TC_STAT_BMODEL = BEST
+TC_STAT_DESC =
+TC_STAT_STORM_ID =
+TC_STAT_BASIN =
+TC_STAT_CYCLONE =
+TC_STAT_STORM_NAME =
+#
+# # Stratify by init times via a comma-separate list of init times to
+# # include or exclude. Time format defined as YYYYMMDD_HH or YYYYMMDD_HHmmss
+TC_STAT_INIT_BEG = TC_STAT_INIT_BEG_temp
+TC_STAT_INIT_END = TC_STAT_INIT_END_temp
+TC_STAT_INIT_INCLUDE =
+TC_STAT_INIT_EXCLUDE =
+TC_STAT_INIT_HOUR =
+#
+# # Stratify by valid times via a comma-separate list of valid times to
+# # include or exclude. Time format defined as YYYYMMDD_HH or YYYYMMDD_HHmmss
+TC_STAT_VALID_BEG =
+TC_STAT_VALID_END =
+TC_STAT_VALID_INCLUDE =
+TC_STAT_VALID_EXCLUDE =
+TC_STAT_VALID_HOUR =
+TC_STAT_LEAD_REQ =
+TC_STAT_INIT_MASK =
+TC_STAT_VALID_MASK =
+
+# Stratify by the valid time and lead time via comma-separated list of
+# # times in format HH[MMSS]
+TC_STAT_VALID_HOUR =
+TC_STAT_LEAD =
+#
+# # Stratify over the watch_warn column in the tcst file. Setting this to
+# # 'ALL' will match HUWARN, HUWATCH, TSWARN, TSWATCH
+TC_STAT_TRACK_WATCH_WARN =
+#
+# # Stratify by applying thresholds to numeric data columns. Specify with
+# # comma-separated list of column names and thresholds to be applied.
+# # The length of TC_STAT_COLUMN_THRESH_NAME should be the same as
+# # TC_STAT_COLUMN_THRESH_VAL.
+TC_STAT_COLUMN_THRESH_NAME =
+TC_STAT_COLUMN_THRESH_VAL =
+#
+# # Stratify by a list of comma-separated columns names and values corresponding
+# # to non-numeric data columns of the values of interest.
+TC_STAT_COLUMN_STR_NAME = LEVEL,LEVEL,LEVEL,LEVEL,LEVEL,LEVEL,LEVEL
+TC_STAT_COLUMN_STR_VAL = TY,TC,HU,SD,SS,TS,TD
+
+# # Stratify by applying thresholds to numeric data columns only when lead=0.
+# # If lead=0 and the value does not meet the threshold, discard the entire
+# # track. The length of TC_STAT_INIT_THRESH_NAME must equal the length of
+# # TC_STAT_INIT_THRESH_VAL.
+TC_STAT_INIT_THRESH_NAME =
+TC_STAT_INIT_THRESH_VAL =
+
+# Stratify by applying thresholds to numeric data columns only when lead = 0.
+# # If lead = 0 but the value doesn't meet the threshold, discard the entire
+# # track.
+TC_STAT_INIT_STR_NAME =
+TC_STAT_INIT_STR_VAL =
+#
+# # Excludes any points where distance to land is <=0. When set to TRUE, once land
+# # is encountered, the remainder of the forecast track is NOT used for the
+# # verification, even if the track moves back over water.
+TC_STAT_WATER_ONLY = FALSE
+#
+# # TRUE or FALSE. To specify whether only those track points occurring near
+# # landfall should be retained. Landfall is the last bmodel track point before
+# # the distance to land switches from water to land.
+TC_STAT_LANDFALL =
+#
+# # Define the landfall retention window, which is defined as the hours offset
+# # from the time of landfall. Format is in HH[MMSS]. Default TC_STAT_LANDFALL_BEG
+# # is set to -24, and TC_STAT_LANDFALL_END is set to 00
+TC_STAT_LANDFALL_BEG = -24
+TC_STAT_LANDFALL_END = 00
+#
+# # Specify whether only those track points common to both the ADECK and BDECK
+# # tracks should be written out
+TC_STAT_MATCH_POINTS = true
+
+#TC_STAT_COLUMN_STR_EXC_NAME =
+##TC_STAT_COLUMN_STR_EXC_VAL =
+#
+##TC_STAT_INIT_STR_EXC_NAME =
+##TC_STAT_INIT_STR_EXC_VAL =
+#
+## IMPORTANT Refer to the README_TC for details on setting up analysis
+## jobs (located in {MET_INSTALL_DIR}/share/met/config
+
diff --git a/scripts/hurricane/stats/exevs_hurricane_global_det_tcgen_stats.sh b/scripts/hurricane/stats/exevs_hurricane_global_det_tcgen_stats.sh
new file mode 100755
index 0000000000..2f68e1a523
--- /dev/null
+++ b/scripts/hurricane/stats/exevs_hurricane_global_det_tcgen_stats.sh
@@ -0,0 +1,209 @@
+#!/bin/bash
+export PS4=' + exevs_hurricane_global_det_tcgen_stats.sh line $LINENO: '
+
+export cartopyDataDir=${cartopyDataDir:-${FIXevs}/cartopy}
+export savePlots=${savePlots:-YES}
+export YEAR=${YYYY}
+export TCGENdays="TC Genesis(05/01/${YEAR}-11/30/${YEAR})"
+export basinlist="al ep wp"
+export modellist="gfs ecmwf cmc"
+
+noaa_logo() {
+ TargetImageName=$1
+ WaterMarkLogoFileName=${FIXevs}/noaa.png
+ echo "Start NOAA Logo marking... "
+ SCALE=50
+ composite -gravity northwest -quality 2 \( $WaterMarkLogoFileName -resize $SCALE% \) "$TargetImageName" "$TargetImageName"
+ error=$?
+ echo "NOAA Logo is generated. "
+ return $error
+}
+nws_logo() {
+ TargetImageName=$1
+ WaterMarkLogoFileName=${FIXevs}/nws.png
+ echo "Start NWS Logo marking... "
+ SCALE=50
+ composite -gravity northeast -quality 2 \( $WaterMarkLogoFileName -resize $SCALE% \) "$TargetImageName" "$TargetImageName"
+ error=$?
+ echo "NWS Logo is generated. "
+ return $error
+}
+
+for basin in $basinlist; do
+### basin do loop start
+for model in $modellist; do
+### model do loop start
+
+export DATAroot=${DATA}/tcgen
+if [ ! -d ${DATAroot} ]; then mkdir -p ${DATAroot}; fi
+export INPUT=${DATAroot}/input/${basin}_${model}
+if [ ! -d ${INPUT} ]; then mkdir -p ${INPUT}; fi
+export OUTPUT=${DATAroot}/output/${basin}_${model}
+if [ ! -d ${OUTPUT} ]; then mkdir -p ${OUTPUT}; fi
+
+if [ ${model} = "gfs" ]; then
+ cp ${COMINgenesis}/${model}_genesis_${YEAR} ${INPUT}/ALLgenesis_${YEAR}
+ export INIT_FREQ=6
+elif [ ${model} = "ecmwf" ]; then
+ cp ${COMINgenesis}/${model}_genesis_${YEAR} ${INPUT}/ALLgenesis_${YEAR}
+ export INIT_FREQ=12
+elif [ ${model} = "cmc" ]; then
+ cp ${COMINgenesis}/${model}_genesis_${YEAR} ${INPUT}/ALLgenesis_${YEAR}
+ export INIT_FREQ=12
+fi
+
+if [ ${basin} = "al" ]; then
+ cp ${COMINadeckNHC}/aal*.dat ${INPUT}/.
+ cp ${COMINbdeckNHC}/bal*.dat ${INPUT}/.
+ export BASIN_MASK="AL"
+ grep "AL, 9" ${INPUT}/ALLgenesis_${YEAR} > ${INPUT}/genesis_${YEAR}
+ grep "HC," ${INPUT}/ALLgenesis_${YEAR} >> ${INPUT}/genesis_${YEAR}
+elif [ ${basin} = "ep" ]; then
+ cp ${COMINadeckNHC}/aep*.dat ${INPUT}/.
+ cp ${COMINbdeckNHC}/bep*.dat ${INPUT}/.
+ export BASIN_MASK="EP"
+ grep "EP, 9" ${INPUT}/ALLgenesis_${YEAR} > ${INPUT}/genesis_${YEAR}
+ grep "HC," ${INPUT}/ALLgenesis_${YEAR} >> ${INPUT}/genesis_${YEAR}
+elif [ ${basin} = "wp" ]; then
+ cp ${COMINadeckJTWC}/awp*.dat ${INPUT}/.
+ cp ${COMINbdeckJTWC}/bwp*.dat ${INPUT}/.
+ export BASIN_MASK="WP"
+ grep "WP, 9" ${INPUT}/ALLgenesis_${YEAR} > ${INPUT}/genesis_${YEAR}
+ grep "HC," ${INPUT}/ALLgenesis_${YEAR} >> ${INPUT}/genesis_${YEAR}
+fi
+
+#--- run for TC_gen
+cd ${OUTPUT}
+cp ${PARMevs}/TCGen_template.conf .
+export VALID_FREQ=6
+
+export SEARCH1="INPUT_BASE_template"
+export SEARCH2="OUTPUT_BASE_template"
+export SEARCH3="YEAR_template"
+export SEARCH4="INIT_FREQ_template"
+export SEARCH5="VALID_FREQ_template"
+export SEARCH6="BASIN_MASK_template"
+
+sed -i "s|$SEARCH1|$INPUT|g" TCGen_template.conf
+sed -i "s|$SEARCH2|$OUTPUT|g" TCGen_template.conf
+sed -i "s|$SEARCH3|$YEAR|g" TCGen_template.conf
+sed -i "s|$SEARCH4|$INIT_FREQ|g" TCGen_template.conf
+sed -i "s|$SEARCH5|$VALID_FREQ|g" TCGen_template.conf
+sed -i "s|$SEARCH6|$BASIN_MASK|g" TCGen_template.conf
+
+run_metplus.py -c ${OUTPUT}/TCGen_template.conf
+
+#--- plot the Hits/False Alarms Distribution
+#export OUTPUT=${DATAroot}/output/${basin}_${model}
+cd ${OUTPUT}
+cp ${USHevs}/hits_${basin}.py .
+grep "00 FYOY" tc_gen_${YEAR}_genmpr.txt > tc_gen_hits.txt
+export hitfile="tc_gen_hits.txt"
+python hits_${basin}.py
+convert TC_genesis.png tcgen_hits_${basin}_${model}.gif
+
+# Attach NOAA logo
+export gif_name=tcgen_hits_${basin}_${model}.gif
+TargetImageName=$gif_name
+noaa_logo $TargetImageName
+error=$?
+
+# Attach NWS logo
+export gif_name=tcgen_hits_${basin}_${model}.gif
+TargetImageName=$gif_name
+nws_logo $TargetImageName
+error=$?
+
+rm -f TC_genesis.png
+
+cp ${USHevs}/false_${basin}.py .
+grep "00 FYON" tc_gen_${YEAR}_genmpr.txt > tc_gen_false.txt
+grep "NA FYON" tc_gen_${YEAR}_genmpr.txt >> tc_gen_false.txt
+export falsefile="tc_gen_false.txt"
+python false_${basin}.py
+convert TC_genesis.png tcgen_falseAlarm_${basin}_${model}.gif
+rm -f TC_genesis.png
+
+# Attach NOAA logo
+export gif_name1=tcgen_falseAlarm_${basin}_${model}.gif
+TargetImageName=$gif_name1
+noaa_logo $TargetImageName
+error=$?
+
+# Attach NWS logo
+export gif_name1=tcgen_falseAlarm_${basin}_${model}.gif
+TargetImageName=$gif_name1
+nws_logo $TargetImageName
+error=$?
+
+cp ${USHevs}/tcgen_space_${basin}.py .
+python tcgen_space_${basin}.py
+convert TC_genesis.png tcgen_HitFalse_${basin}_${model}.gif
+rm -f TC_genesis.png
+
+# Attach NOAA logo
+export gif_name2=tcgen_HitFalse_${basin}_${model}.gif
+TargetImageName=$gif_name2
+noaa_logo $TargetImageName
+error=$?
+
+# Attach NWS logo
+export gif_name2=tcgen_HitFalse_${basin}_${model}.gif
+TargetImageName=$gif_name2
+nws_logo $TargetImageName
+error=$?
+
+#export COMOUTroot=${COMOUT}/${basin}_${model}
+export COMOUTroot=${COMOUT}
+if [ "$SENDCOM" = 'YES' ]; then
+ if [ ! -d ${COMOUTroot} ]; then mkdir -p ${COMOUTroot}; fi
+ cp ${OUTPUT}/tc_gen_${YEAR}_ctc.txt ${COMOUTroot}/tc_gen_${YEAR}_ctc_${basin}_${model}.txt
+ cp ${OUTPUT}/tc_gen_${YEAR}_cts.txt ${COMOUTroot}/tc_gen_${YEAR}_cts_${basin}_${model}.txt
+ cp ${OUTPUT}/tc_gen_${YEAR}_genmpr.txt ${COMOUTroot}/tc_gen_${YEAR}_genmpr_${basin}_${model}.txt
+ cp ${OUTPUT}/tc_gen_${YEAR}.stat ${COMOUTroot}/tc_gen_${YEAR}_${basin}_${model}.stat
+ cp ${OUTPUT}/tc_gen_${YEAR}_pairs.nc ${COMOUTroot}/tc_gen_${YEAR}_pairs_${basin}_${model}.nc
+ if [ "$savePlots" = 'YES' ]; then
+# cp ${OUTPUT}/*.gif ${COMOUTroot}/.
+ cp ${OUTPUT}/tcgen_hits_${basin}_${model}.gif ${COMOUTroot}/evs.hurricane_global_det.hits.${basin}.${YEAR}.${model}.season.tcgen.png
+ cp ${OUTPUT}/tcgen_falseAlarm_${basin}_${model}.gif ${COMOUTroot}/evs.hurricane_global_det.fals.${basin}.${YEAR}.${model}.season.tcgen.png
+ cp ${OUTPUT}/tcgen_HitFalse_${basin}_${model}.gif ${COMOUTroot}/evs.hurricane_global_det.hitfals.${basin}.${YEAR}.${model}.season.tcgen.png
+ fi
+fi
+
+### model do loop end
+done
+#--- plot the Performance Diagram
+export DATAplot=${DATAroot}/${basin}
+if [ ! -d ${DATAplot} ]; then mkdir -p ${DATAplot}; fi
+cd ${DATAplot}
+cp ${USHevs}/tcgen_performance_diagram.py .
+grep "GENESIS_DEV" ${COMOUTroot}/tc_gen_${YEAR}_ctc_${basin}_gfs.txt > dev_tc_gen_${YEAR}_ctc_${basin}_gfs.txt
+grep "GENESIS_DEV" ${COMOUTroot}/tc_gen_${YEAR}_ctc_${basin}_ecmwf.txt > dev_tc_gen_${YEAR}_ctc_${basin}_ecmwf.txt
+grep "GENESIS_DEV" ${COMOUTroot}/tc_gen_${YEAR}_ctc_${basin}_cmc.txt > dev_tc_gen_${YEAR}_ctc_${basin}_cmc.txt
+export CTCfile01="dev_tc_gen_${YEAR}_ctc_${basin}_gfs.txt"
+export CTCfile02="dev_tc_gen_${YEAR}_ctc_${basin}_ecmwf.txt"
+export CTCfile03="dev_tc_gen_${YEAR}_ctc_${basin}_cmc.txt"
+python tcgen_performance_diagram.py
+convert tcgen_performance_diagram.png tcgen_performance_diagram_${basin}.gif
+
+# Attach NOAA logo
+export gif_name2=tcgen_performance_diagram_${basin}.gif
+TargetImageName=$gif_name2
+noaa_logo $TargetImageName
+error=$?
+
+# Attach NWS logo
+export gif_name2=tcgen_performance_diagram_${basin}.gif
+TargetImageName=$gif_name2
+nws_logo $TargetImageName
+error=$?
+
+if [ "$SENDCOM" = 'YES' ]; then
+if [ "$savePlots" = 'YES' ]; then
+# cp ${DATAplot}/*.gif ${COMOUTroot}/.
+ cp ${DATAplot}/tcgen_performance_diagram_${basin}.gif ${COMOUTroot}/evs.hurricane_global_det.performancediagram.${basin}.${YEAR}.season.tcgen.png
+fi
+fi
+### basin do loop end
+done
+
diff --git a/scripts/hurricane/stats/exevs_hurricane_global_det_tropcyc_stats.sh b/scripts/hurricane/stats/exevs_hurricane_global_det_tropcyc_stats.sh
new file mode 100755
index 0000000000..a740511418
--- /dev/null
+++ b/scripts/hurricane/stats/exevs_hurricane_global_det_tropcyc_stats.sh
@@ -0,0 +1,298 @@
+#!/bin/bash
+export PS4=' + exevs_hurricane_global_det_tropcyc_stats.sh line $LINENO: '
+
+export savePlots=${savePlots:-YES}
+export stormYear=${YYYY}
+export basinlist="al ep wp"
+export numlist="01 02 03 04 05 06 07 08 09 10 11 12 13 14 15 16 17 18 19 20 \
+ 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40"
+
+for bas in $basinlist; do
+### bas do loop start
+for num in $numlist; do
+### num do loop start
+
+export stormBasin=${bas}
+export stbasin=`echo ${stormBasin} | tr "[a-z]" "[A-Z]"`
+echo "${stbasin} upper case: AL/EP/WP"
+export stormNumber=${num}
+
+if [ ${stormBasin} = "al" ]; then
+ COMINbdeck=${COMINbdeckNHC}
+ export COMOUTatl=${COMOUT}/Atlantic
+ if [ ! -d ${COMOUTatl} ]; then mkdir -p ${COMOUTatl}; fi
+elif [ ${stormBasin} = "ep" ]; then
+ COMINbdeck=${COMINbdeckNHC}
+ export COMOUTepa=${COMOUT}/EastPacific
+ if [ ! -d ${COMOUTepa} ]; then mkdir -p ${COMOUTepa}; fi
+elif [ ${stormBasin} = "wp" ]; then
+ COMINbdeck=${COMINbdeckJTWC}
+ export COMOUTwpa=${COMOUT}/WestPacific
+ if [ ! -d ${COMOUTwpa} ]; then mkdir -p ${COMOUTwpa}; fi
+fi
+
+export bdeckfile=${COMINbdeck}/b${stormBasin}${stormNumber}${stormYear}.dat
+if [ -f ${bdeckfile} ]; then
+numrecs=`cat ${bdeckfile} | wc -l`
+if [ ${numrecs} -gt 0 ]; then
+### two ifs start
+
+export STORMroot=${DATA}/metTC/${bas}${num}
+if [ ! -d ${STORMroot} ]; then mkdir -p ${STORMroot}; fi
+export STORMdata=${STORMroot}/data
+if [ ! -d ${STORMdata} ]; then mkdir -p ${STORMdata}; fi
+export COMOUTroot=${COMOUT}/${bas}${num}
+if [ ! -d ${COMOUTroot} ]; then mkdir -p ${COMOUTroot}; fi
+cd ${STORMdata}
+
+#---get the storm name from TC-vital file "syndat_tcvitals.${YYYY}"
+#---copy bdeck files to ${STORMdata}
+if [ ${stormBasin} = "al" ]; then
+ cp ${COMINbdeckNHC}/b${stormBasin}${stormNumber}${stormYear}.dat ${STORMdata}/.
+ grep "NHC ${stormNumber}L" ${COMINvit} > syndat_tcvitals.${YYYY}.${stormBasin}${stormNumber}
+ echo $(tail -n 1 syndat_tcvitals.${YYYY}.${stormBasin}${stormNumber}) > TCvit_tail.txt
+ sed -i 's/NHC/NHCC/' TCvit_tail.txt
+elif [ ${stormBasin} = "ep" ]; then
+ cp ${COMINbdeckNHC}/b${stormBasin}${stormNumber}${stormYear}.dat ${STORMdata}/.
+ grep "NHC ${stormNumber}E" ${COMINvit} > syndat_tcvitals.${YYYY}.${stormBasin}${stormNumber}
+ echo $(tail -n 1 syndat_tcvitals.${YYYY}.${stormBasin}${stormNumber}) > TCvit_tail.txt
+ sed -i 's/NHC/NHCC/' TCvit_tail.txt
+elif [ ${stormBasin} = "wp" ]; then
+ cp ${COMINbdeckJTWC}/b${stormBasin}${stormNumber}${stormYear}.dat ${STORMdata}/.
+ grep "JTWC ${stormNumber}W" ${COMINvit} > syndat_tcvitals.${YYYY}.${stormBasin}${stormNumber}
+ echo $(tail -n 1 syndat_tcvitals.${YYYY}.${stormBasin}${stormNumber}) > TCvit_tail.txt
+fi
+cat TCvit_tail.txt|cut -c10-18 > TCname.txt
+VARIABLE1=$( head -n 1 TCname.txt )
+echo "$VARIABLE1"
+VARIABLE2=$(printf '%s' "$VARIABLE1" | sed 's/[0-9]//g')
+echo "$VARIABLE2"
+stormName=$(sed "s/ //g" <<< $VARIABLE2)
+echo "Name_${stormName}_Name"
+echo "${stormBasin}, ${stormNumber}, ${stormYear}, ${stormName}"
+
+#---get the model forecast tracks "AVNO/EMX/CMC" from archive file "tracks.atcfunix.${YY22}"
+grep "${stbasin}, ${stormNumber}" ${COMINtrack} > tracks.atcfunix.${YY22}_${stormBasin}${stormNumber}
+grep "03, AVNO" tracks.atcfunix.${YY22}_${stormBasin}${stormNumber} > a${stormBasin}${stormNumber}${stormYear}.dat
+grep "03, EMX" tracks.atcfunix.${YY22}_${stormBasin}${stormNumber} >> a${stormBasin}${stormNumber}${stormYear}.dat
+grep "03, CMC" tracks.atcfunix.${YY22}_${stormBasin}${stormNumber} >> a${stormBasin}${stormNumber}${stormYear}.dat
+sed -i 's/03, AVNO/03, MD01/' a${stormBasin}${stormNumber}${stormYear}.dat
+sed -i 's/03, EMX/03, MD02/' a${stormBasin}${stormNumber}${stormYear}.dat
+sed -i 's/03, CMC/03, MD03/' a${stormBasin}${stormNumber}${stormYear}.dat
+
+#---get the $startdate, $enddate[YYMMDDHH] from the best track file
+echo $(head -n 1 ${bdeckfile}) > head.txt
+echo $(tail -n 1 ${bdeckfile}) > tail.txt
+cat head.txt|cut -c9-18 > startymdh.txt
+cat tail.txt|cut -c9-18 > endymdh.txt
+firstcyc=$( head -n 1 startymdh.txt )
+lastcyc=$( head -n 1 endymdh.txt )
+
+export YY01=`echo $firstcyc | cut -c1-4`
+export MM01=`echo $firstcyc | cut -c5-6`
+export DD01=`echo $firstcyc | cut -c7-8`
+export HH01=`echo $firstcyc | cut -c9-10`
+
+export YY02=`echo $lastcyc | cut -c1-4`
+export MM02=`echo $lastcyc | cut -c5-6`
+export DD02=`echo $lastcyc | cut -c7-8`
+export HH02=`echo $lastcyc | cut -c9-10`
+
+export startdate="$YY01$MM01$DD01$HH01"
+export enddate="$YY02$MM02$DD02$HH02"
+echo "$startdate, $enddate"
+
+#--- run for TC_pairs
+cp ${PARMevs}/TCPairs_template.conf .
+export SEARCH1="INPUT_BASE_template"
+export SEARCH2="OUTPUT_BASE_template"
+export SEARCH3="INIT_BEG_template"
+export SEARCH4="INIT_END_template"
+export SEARCH5="TC_PAIRS_CYCLONE_template"
+export SEARCH6="TC_PAIRS_BASIN_template"
+
+sed -i "s|$SEARCH1|$STORMdata|g" TCPairs_template.conf
+sed -i "s|$SEARCH2|$STORMroot|g" TCPairs_template.conf
+sed -i "s|$SEARCH3|$startdate|g" TCPairs_template.conf
+sed -i "s|$SEARCH4|$enddate|g" TCPairs_template.conf
+sed -i "s|$SEARCH5|$stormNumber|g" TCPairs_template.conf
+sed -i "s|$SEARCH6|$stbasin|g" TCPairs_template.conf
+
+run_metplus.py -c $STORMdata/TCPairs_template.conf
+
+#--- run for TC_stat
+cd $STORMdata
+cp ${PARMevs}/TCStat_template.conf .
+sed -i "s|$SEARCH1|$STORMdata|g" TCStat_template.conf
+sed -i "s|$SEARCH2|$STORMroot|g" TCStat_template.conf
+sed -i "s|$SEARCH3|$startdate|g" TCStat_template.conf
+sed -i "s|$SEARCH4|$startdate|g" TCStat_template.conf
+
+export SEARCH7="TC_STAT_INIT_BEG_temp"
+export SEARCH8="TC_STAT_INIT_END_temp"
+export under="_"
+export symdh=${YY01}${MM01}${DD01}${under}${HH01}
+export eymdh=${YY02}${MM02}${DD02}${under}${HH02}
+echo "$symdh, $eymdh"
+
+sed -i "s|$SEARCH7|$symdh|g" TCStat_template.conf
+sed -i "s|$SEARCH8|$eymdh|g" TCStat_template.conf
+
+run_metplus.py -c $STORMdata/TCStat_template.conf
+
+#---Storm Plots
+export LOGOroot=${FIXevs}
+export PLOTDATA=${STORMroot}
+#export RUN="tropcyc"
+export img_quality="low"
+
+export fhr_list="0,12,24,36,48,60,72,84,96,108,120,132,144,156,168"
+export model_tmp_atcf_name_list="MD01,MD02,MD03"
+export model_plot_name_list="GFS,ECMWF,CMC"
+export plot_CI_bars="NO"
+export tc_name=${stbasin}${under}${stormYear}${under}${stormName}
+export basin=${stbasin}
+export tc_num=${stormNumber}
+export tropcyc_model_type="global"
+python ${USHevs}/plot_tropcyc_lead_average.py
+
+#/lfs/h2/emc/ptmp/jiayi.peng/metTC/wp02/plot/WP_2022_MALAKAS/images
+nimgs=$(ls ${STORMroot}/plot/${tc_name}/images/* |wc -l)
+if [ $nimgs -ne 0 ]; then
+ cd ${STORMroot}/plot/${tc_name}/images
+ convert ABSAMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_global.png ABSAMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_global.gif
+ convert AMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_global.png AMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_global.gif
+
+ convert ABSTK_ERR_fhrmean_${tc_name}_global.png ABSTK_ERR_fhrmean_${tc_name}_global.gif
+ convert ALTK_ERR_fhrmean_${tc_name}_global.png ALTK_ERR_fhrmean_${tc_name}_global.gif
+ convert CRTK_ERR_fhrmean_${tc_name}_global.png CRTK_ERR_fhrmean_${tc_name}_global.gif
+ rm -f *.png
+ if [ "$SENDCOM" = 'YES' ]; then
+ if [ ! -d ${COMOUTroot}/tc_pairs ]; then mkdir -p ${COMOUTroot}/tc_pairs; fi
+ if [ ! -d ${COMOUTroot}/tc_stat ]; then mkdir -p ${COMOUTroot}/tc_stat; fi
+ cp -r ${STORMroot}/tc_pairs/* ${COMOUTroot}/tc_pairs/.
+ cp -r ${STORMroot}/tc_stat/* ${COMOUTroot}/tc_stat/.
+ if [ ${stormBasin} = "al" ]; then
+ cp ${COMOUTroot}/tc_stat/tc_stat_summary.tcst ${COMOUTatl}/${stormBasin}${stormNumber}${stormYear}_stat_summary.tcst
+ elif [ ${stormBasin} = "ep" ]; then
+ cp ${COMOUTroot}/tc_stat/tc_stat_summary.tcst ${COMOUTepa}/${stormBasin}${stormNumber}${stormYear}_stat_summary.tcst
+ elif [ ${stormBasin} = "wp" ]; then
+ cp ${COMOUTroot}/tc_stat/tc_stat_summary.tcst ${COMOUTwpa}/${stormBasin}${stormNumber}${stormYear}_stat_summary.tcst
+ fi
+ if [ "$savePlots" = 'YES' ]; then
+ if [ ! -d ${COMOUTroot}/plot ]; then mkdir -p ${COMOUTroot}/plot; fi
+# cp -r ${STORMroot}/plot/${tc_name}/images/* ${COMOUTroot}/plot/.
+ cp ${STORMroot}/plot/${tc_name}/images/ABSAMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_global.gif ${COMOUTroot}/plot/evs.hurricane_global_det.abswind_err.${stormBasin}.${stormYear}.${stormName}${stormNumber}.png
+ cp ${STORMroot}/plot/${tc_name}/images/AMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_global.gif ${COMOUTroot}/plot/evs.hurricane_global_det.wind_bias.${stormBasin}.${stormYear}.${stormName}${stormNumber}.png
+ cp ${STORMroot}/plot/${tc_name}/images/ABSTK_ERR_fhrmean_${tc_name}_global.gif ${COMOUTroot}/plot/evs.hurricane_global_det.abstk_err.${stormBasin}.${stormYear}.${stormName}${stormNumber}.png
+ cp ${STORMroot}/plot/${tc_name}/images/ALTK_ERR_fhrmean_${tc_name}_global.gif ${COMOUTroot}/plot/evs.hurricane_global_det.altk_bias.${stormBasin}.${stormYear}.${stormName}${stormNumber}.png
+ cp ${STORMroot}/plot/${tc_name}/images/CRTK_ERR_fhrmean_${tc_name}_global.gif ${COMOUTroot}/plot/evs.hurricane_global_det.crtk_bias.${stormBasin}.${stormYear}.${stormName}${stormNumber}.png
+ fi
+ fi
+fi
+
+### two ifs end
+fi
+fi
+### num do loop end
+done
+
+#--- Atlantic/EastPacific/WestPacific Basin TC_Stat
+if [ ${stormBasin} = "al" ]; then
+ export COMOUTbas=${COMOUTatl}
+elif [ ${stormBasin} = "ep" ]; then
+ export COMOUTbas=${COMOUTepa}
+elif [ ${stormBasin} = "wp" ]; then
+ export COMOUTbas=${COMOUTwpa}
+fi
+
+nfile=$(ls ${COMOUTbas}/*.tcst |wc -l)
+if [ $nfile -ne 0 ]; then
+
+export mdh=010100
+export startdateB=${YYYY}${mdh}
+export metTCcomin=${COMOUTbas}
+
+if [ ${stormBasin} = "al" ]; then
+ export metTCcomout=${DATA}/metTC/atlantic
+ if [ ! -d $metTCcomout ]; then mkdir -p $metTCcomout; fi
+elif [ ${stormBasin} = "ep" ]; then
+ export metTCcomout=${DATA}/metTC/eastpacific
+ if [ ! -d $metTCcomout ]; then mkdir -p $metTCcomout; fi
+elif [ ${stormBasin} = "wp" ]; then
+ export metTCcomout=${DATA}/metTC/westpacific
+ if [ ! -d $metTCcomout ]; then mkdir -p $metTCcomout; fi
+fi
+
+cd $metTCcomout
+#export SEARCH1=INPUT_BASE_template
+#export SEARCH2=OUTPUT_BASE_template
+#export SEARCH3=INIT_BEG_template
+#export SEARCH4=INIT_END_template
+
+cp ${PARMevs}/TCStat_template_basin.conf .
+sed -i "s|$SEARCH1|$metTCcomin|g" TCStat_template_basin.conf
+sed -i "s|$SEARCH2|$metTCcomout|g" TCStat_template_basin.conf
+sed -i "s|$SEARCH3|$startdateB|g" TCStat_template_basin.conf
+sed -i "s|$SEARCH4|$startdateB|g" TCStat_template_basin.conf
+
+#export SEARCH7="TC_STAT_INIT_BEG_temp"
+#export SEARCH8="TC_STAT_INIT_END_temp"
+export firstday="0101_00"
+export lastday="1231_18"
+export symdhB=${YYYY}${firstday}
+export eymdhB=${YYYY}${lastday}
+echo "$symdhB, $eymdhB"
+
+sed -i "s|$SEARCH7|$symdhB|g" TCStat_template_basin.conf
+sed -i "s|$SEARCH8|$eymdhB|g" TCStat_template_basin.conf
+
+run_metplus.py -c ${metTCcomout}/TCStat_template_basin.conf
+if [ "$SENDCOM" = 'YES' ]; then
+ if [ ! -d ${COMOUTbas}/tc_stat ]; then mkdir -p ${COMOUTbas}/tc_stat; fi
+ cp ${metTCcomout}/tc_stat/tc_stat.out ${COMOUTbas}/tc_stat/tc_stat_basin.out
+ cp ${metTCcomout}/tc_stat/tc_stat_summary.tcst ${COMOUTbas}/tc_stat/tc_stat_summary_basin.tcst
+fi
+fi
+
+#--- Basin-Storms Plots
+export LOGOroot=${FIXevs}
+export PLOTDATA=${metTCcomout}
+#export RUN="tropcyc"
+export img_quality="low"
+
+export fhr_list="0,12,24,36,48,60,72,84,96,108,120,132,144,156,168"
+export model_tmp_atcf_name_list="MD01,MD02,MD03"
+export model_plot_name_list="GFS,ECMWF,CMC"
+export plot_CI_bars="NO"
+export stormNameB=Basin
+export tc_name=${stbasin}${under}${stormYear}${under}${stormNameB}
+export basin=${stbasin}
+export tc_num=
+export tropcyc_model_type="global"
+python ${USHevs}/plot_tropcyc_lead_average.py
+
+bimgs=$(ls ${metTCcomout}/plot/${tc_name}/images/* |wc -l)
+if [ $bimgs -ne 0 ]; then
+ cd ${metTCcomout}/plot/${tc_name}/images
+ convert ABSAMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_global.png ABSAMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_global.gif
+ convert AMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_global.png AMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_global.gif
+
+ convert ABSTK_ERR_fhrmean_${tc_name}_global.png ABSTK_ERR_fhrmean_${tc_name}_global.gif
+ convert ALTK_ERR_fhrmean_${tc_name}_global.png ALTK_ERR_fhrmean_${tc_name}_global.gif
+ convert CRTK_ERR_fhrmean_${tc_name}_global.png CRTK_ERR_fhrmean_${tc_name}_global.gif
+ rm -f *.png
+ if [ "$SENDCOM" = 'YES' ]; then
+ if [ ! -d ${COMOUTbas}/plot ]; then mkdir -p ${COMOUTbas}/plot; fi
+ if [ "$savePlots" = 'YES' ]; then
+# cp -r ${metTCcomout}/plot/${tc_name}/images/* ${COMOUTbas}/plot/.
+ cp -r ${metTCcomout}/plot/${tc_name}/images/ABSAMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_global.gif ${COMOUTbas}/plot/evs.hurricane_global_det.abswind_err.${stormBasin}.${stormYear}.season.png
+ cp -r ${metTCcomout}/plot/${tc_name}/images/AMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_global.gif ${COMOUTbas}/plot/evs.hurricane_global_det.wind_bias.${stormBasin}.${stormYear}.season.png
+ cp -r ${metTCcomout}/plot/${tc_name}/images/ABSTK_ERR_fhrmean_${tc_name}_global.gif ${COMOUTbas}/plot/evs.hurricane_global_det.abstk_err.${stormBasin}.${stormYear}.season.png
+ cp -r ${metTCcomout}/plot/${tc_name}/images/ALTK_ERR_fhrmean_${tc_name}_global.gif ${COMOUTbas}/plot/evs.hurricane_global_det.altk_bias.${stormBasin}.${stormYear}.season.png
+ cp -r ${metTCcomout}/plot/${tc_name}/images/CRTK_ERR_fhrmean_${tc_name}_global.gif ${COMOUTbas}/plot/evs.hurricane_global_det.crtk_bias.${stormBasin}.${stormYear}.season.png
+ fi
+ fi
+fi
+### bas do loop end
+done
diff --git a/scripts/hurricane/stats/exevs_hurricane_global_ens_tropcyc_stats.sh b/scripts/hurricane/stats/exevs_hurricane_global_ens_tropcyc_stats.sh
new file mode 100755
index 0000000000..2b76b8dca3
--- /dev/null
+++ b/scripts/hurricane/stats/exevs_hurricane_global_ens_tropcyc_stats.sh
@@ -0,0 +1,298 @@
+#!/bin/bash
+export PS4=' + exevs_hurricane_global_ens_tropcyc_stats.sh line $LINENO: '
+
+export savePlots=${savePlots:-YES}
+export stormYear=${YYYY}
+export basinlist="al ep wp"
+export numlist="01 02 03 04 05 06 07 08 09 10 11 12 13 14 15 16 17 18 19 20 \
+ 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40"
+
+for bas in $basinlist; do
+### bas do loop start
+for num in $numlist; do
+### num do loop start
+
+export stormBasin=${bas}
+export stbasin=`echo ${stormBasin} | tr "[a-z]" "[A-Z]"`
+echo "${stbasin} upper case: AL/EP/WP"
+export stormNumber=${num}
+
+if [ ${stormBasin} = "al" ]; then
+ COMINbdeck=${COMINbdeckNHC}
+ export COMOUTatl=${COMOUT}/Atlantic
+ if [ ! -d ${COMOUTatl} ]; then mkdir -p ${COMOUTatl}; fi
+elif [ ${stormBasin} = "ep" ]; then
+ COMINbdeck=${COMINbdeckNHC}
+ export COMOUTepa=${COMOUT}/EastPacific
+ if [ ! -d ${COMOUTepa} ]; then mkdir -p ${COMOUTepa}; fi
+elif [ ${stormBasin} = "wp" ]; then
+ COMINbdeck=${COMINbdeckJTWC}
+ export COMOUTwpa=${COMOUT}/WestPacific
+ if [ ! -d ${COMOUTwpa} ]; then mkdir -p ${COMOUTwpa}; fi
+fi
+
+export bdeckfile=${COMINbdeck}/b${stormBasin}${stormNumber}${stormYear}.dat
+if [ -f ${bdeckfile} ]; then
+numrecs=`cat ${bdeckfile} | wc -l`
+if [ ${numrecs} -gt 0 ]; then
+### two ifs start
+
+export STORMroot=${DATA}/metTC/${bas}${num}
+if [ ! -d ${STORMroot} ]; then mkdir -p ${STORMroot}; fi
+export STORMdata=${STORMroot}/data
+if [ ! -d ${STORMdata} ]; then mkdir -p ${STORMdata}; fi
+export COMOUTroot=${COMOUT}/${bas}${num}
+if [ ! -d ${COMOUTroot} ]; then mkdir -p ${COMOUTroot}; fi
+cd ${STORMdata}
+
+#---get the storm name from TC-vital file "syndat_tcvitals.${YYYY}"
+#---copy bdeck files to ${STORMdata}
+if [ ${stormBasin} = "al" ]; then
+ cp ${COMINbdeckNHC}/b${stormBasin}${stormNumber}${stormYear}.dat ${STORMdata}/.
+ grep "NHC ${stormNumber}L" ${COMINvit} > syndat_tcvitals.${YYYY}.${stormBasin}${stormNumber}
+ echo $(tail -n 1 syndat_tcvitals.${YYYY}.${stormBasin}${stormNumber}) > TCvit_tail.txt
+ sed -i 's/NHC/NHCC/' TCvit_tail.txt
+elif [ ${stormBasin} = "ep" ]; then
+ cp ${COMINbdeckNHC}/b${stormBasin}${stormNumber}${stormYear}.dat ${STORMdata}/.
+ grep "NHC ${stormNumber}E" ${COMINvit} > syndat_tcvitals.${YYYY}.${stormBasin}${stormNumber}
+ echo $(tail -n 1 syndat_tcvitals.${YYYY}.${stormBasin}${stormNumber}) > TCvit_tail.txt
+ sed -i 's/NHC/NHCC/' TCvit_tail.txt
+elif [ ${stormBasin} = "wp" ]; then
+ cp ${COMINbdeckJTWC}/b${stormBasin}${stormNumber}${stormYear}.dat ${STORMdata}/.
+ grep "JTWC ${stormNumber}W" ${COMINvit} > syndat_tcvitals.${YYYY}.${stormBasin}${stormNumber}
+ echo $(tail -n 1 syndat_tcvitals.${YYYY}.${stormBasin}${stormNumber}) > TCvit_tail.txt
+fi
+cat TCvit_tail.txt|cut -c10-18 > TCname.txt
+VARIABLE1=$( head -n 1 TCname.txt )
+echo "$VARIABLE1"
+VARIABLE2=$(printf '%s' "$VARIABLE1" | sed 's/[0-9]//g')
+echo "$VARIABLE2"
+stormName=$(sed "s/ //g" <<< $VARIABLE2)
+echo "Name_${stormName}_Name"
+echo "${stormBasin}, ${stormNumber}, ${stormYear}, ${stormName}"
+
+#---get the model forecast tracks "AVNO/EMX/CMC" from archive file "tracks.atcfunix.${YY22}"
+grep "${stbasin}, ${stormNumber}" ${COMINtrack} > tracks.atcfunix.${YY22}_${stormBasin}${stormNumber}
+grep "03, AEMN" tracks.atcfunix.${YY22}_${stormBasin}${stormNumber} > a${stormBasin}${stormNumber}${stormYear}.dat
+grep "03, EEMN" tracks.atcfunix.${YY22}_${stormBasin}${stormNumber} >> a${stormBasin}${stormNumber}${stormYear}.dat
+grep "03, CEMN" tracks.atcfunix.${YY22}_${stormBasin}${stormNumber} >> a${stormBasin}${stormNumber}${stormYear}.dat
+sed -i 's/03, AEMN/03, MD01/' a${stormBasin}${stormNumber}${stormYear}.dat
+sed -i 's/03, EEMN/03, MD02/' a${stormBasin}${stormNumber}${stormYear}.dat
+sed -i 's/03, CEMN/03, MD03/' a${stormBasin}${stormNumber}${stormYear}.dat
+
+#---get the $startdate, $enddate[YYMMDDHH] from the best track file
+echo $(head -n 1 ${bdeckfile}) > head.txt
+echo $(tail -n 1 ${bdeckfile}) > tail.txt
+cat head.txt|cut -c9-18 > startymdh.txt
+cat tail.txt|cut -c9-18 > endymdh.txt
+firstcyc=$( head -n 1 startymdh.txt )
+lastcyc=$( head -n 1 endymdh.txt )
+
+export YY01=`echo $firstcyc | cut -c1-4`
+export MM01=`echo $firstcyc | cut -c5-6`
+export DD01=`echo $firstcyc | cut -c7-8`
+export HH01=`echo $firstcyc | cut -c9-10`
+
+export YY02=`echo $lastcyc | cut -c1-4`
+export MM02=`echo $lastcyc | cut -c5-6`
+export DD02=`echo $lastcyc | cut -c7-8`
+export HH02=`echo $lastcyc | cut -c9-10`
+
+export startdate="$YY01$MM01$DD01$HH01"
+export enddate="$YY02$MM02$DD02$HH02"
+echo "$startdate, $enddate"
+
+#--- run for TC_pairs
+cp ${PARMevs}/TCPairs_template.conf .
+export SEARCH1="INPUT_BASE_template"
+export SEARCH2="OUTPUT_BASE_template"
+export SEARCH3="INIT_BEG_template"
+export SEARCH4="INIT_END_template"
+export SEARCH5="TC_PAIRS_CYCLONE_template"
+export SEARCH6="TC_PAIRS_BASIN_template"
+
+sed -i "s|$SEARCH1|$STORMdata|g" TCPairs_template.conf
+sed -i "s|$SEARCH2|$STORMroot|g" TCPairs_template.conf
+sed -i "s|$SEARCH3|$startdate|g" TCPairs_template.conf
+sed -i "s|$SEARCH4|$enddate|g" TCPairs_template.conf
+sed -i "s|$SEARCH5|$stormNumber|g" TCPairs_template.conf
+sed -i "s|$SEARCH6|$stbasin|g" TCPairs_template.conf
+
+run_metplus.py -c $STORMdata/TCPairs_template.conf
+
+#--- run for TC_stat
+cd $STORMdata
+cp ${PARMevs}/TCStat_template.conf .
+sed -i "s|$SEARCH1|$STORMdata|g" TCStat_template.conf
+sed -i "s|$SEARCH2|$STORMroot|g" TCStat_template.conf
+sed -i "s|$SEARCH3|$startdate|g" TCStat_template.conf
+sed -i "s|$SEARCH4|$startdate|g" TCStat_template.conf
+
+export SEARCH7="TC_STAT_INIT_BEG_temp"
+export SEARCH8="TC_STAT_INIT_END_temp"
+export under="_"
+export symdh=${YY01}${MM01}${DD01}${under}${HH01}
+export eymdh=${YY02}${MM02}${DD02}${under}${HH02}
+echo "$symdh, $eymdh"
+
+sed -i "s|$SEARCH7|$symdh|g" TCStat_template.conf
+sed -i "s|$SEARCH8|$eymdh|g" TCStat_template.conf
+
+run_metplus.py -c $STORMdata/TCStat_template.conf
+
+#---Storm Plots
+export LOGOroot=${FIXevs}
+export PLOTDATA=${STORMroot}
+#export RUN="tropcyc"
+export img_quality="low"
+
+export fhr_list="0,12,24,36,48,60,72,84,96,108,120,132,144,156,168"
+export model_tmp_atcf_name_list="MD01,MD02,MD03"
+export model_plot_name_list="GEFS,EENS,CENS"
+export plot_CI_bars="NO"
+export tc_name=${stbasin}${under}${stormYear}${under}${stormName}
+export basin=${stbasin}
+export tc_num=${stormNumber}
+export tropcyc_model_type="global"
+python ${USHevs}/plot_tropcyc_lead_average.py
+
+#/lfs/h2/emc/ptmp/jiayi.peng/metTC/wp02/plot/WP_2022_MALAKAS/images
+nimgs=$(ls ${STORMroot}/plot/${tc_name}/images/* |wc -l)
+if [ $nimgs -ne 0 ]; then
+ cd ${STORMroot}/plot/${tc_name}/images
+ convert ABSAMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_global.png ABSAMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_global.gif
+ convert AMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_global.png AMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_global.gif
+
+ convert ABSTK_ERR_fhrmean_${tc_name}_global.png ABSTK_ERR_fhrmean_${tc_name}_global.gif
+ convert ALTK_ERR_fhrmean_${tc_name}_global.png ALTK_ERR_fhrmean_${tc_name}_global.gif
+ convert CRTK_ERR_fhrmean_${tc_name}_global.png CRTK_ERR_fhrmean_${tc_name}_global.gif
+ rm -f *.png
+ if [ "$SENDCOM" = 'YES' ]; then
+ if [ ! -d ${COMOUTroot}/tc_pairs ]; then mkdir -p ${COMOUTroot}/tc_pairs; fi
+ if [ ! -d ${COMOUTroot}/tc_stat ]; then mkdir -p ${COMOUTroot}/tc_stat; fi
+ cp -r ${STORMroot}/tc_pairs/* ${COMOUTroot}/tc_pairs/.
+ cp -r ${STORMroot}/tc_stat/* ${COMOUTroot}/tc_stat/.
+ if [ ${stormBasin} = "al" ]; then
+ cp ${COMOUTroot}/tc_stat/tc_stat_summary.tcst ${COMOUTatl}/${stormBasin}${stormNumber}${stormYear}_stat_summary.tcst
+ elif [ ${stormBasin} = "ep" ]; then
+ cp ${COMOUTroot}/tc_stat/tc_stat_summary.tcst ${COMOUTepa}/${stormBasin}${stormNumber}${stormYear}_stat_summary.tcst
+ elif [ ${stormBasin} = "wp" ]; then
+ cp ${COMOUTroot}/tc_stat/tc_stat_summary.tcst ${COMOUTwpa}/${stormBasin}${stormNumber}${stormYear}_stat_summary.tcst
+ fi
+ if [ "$savePlots" = 'YES' ]; then
+ if [ ! -d ${COMOUTroot}/plot ]; then mkdir -p ${COMOUTroot}/plot; fi
+# cp -r ${STORMroot}/plot/${tc_name}/images/* ${COMOUTroot}/plot/.
+ cp ${STORMroot}/plot/${tc_name}/images/ABSAMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_global.gif ${COMOUTroot}/plot/evs.hurricane_global_ens.abswind_err.${stormBasin}.${stormYear}.${stormName}${stormNumber}.png
+ cp ${STORMroot}/plot/${tc_name}/images/AMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_global.gif ${COMOUTroot}/plot/evs.hurricane_global_ens.wind_bias.${stormBasin}.${stormYear}.${stormName}${stormNumber}.png
+ cp ${STORMroot}/plot/${tc_name}/images/ABSTK_ERR_fhrmean_${tc_name}_global.gif ${COMOUTroot}/plot/evs.hurricane_global_ens.abstk_err.${stormBasin}.${stormYear}.${stormName}${stormNumber}.png
+ cp ${STORMroot}/plot/${tc_name}/images/ALTK_ERR_fhrmean_${tc_name}_global.gif ${COMOUTroot}/plot/evs.hurricane_global_ens.altk_bias.${stormBasin}.${stormYear}.${stormName}${stormNumber}.png
+ cp ${STORMroot}/plot/${tc_name}/images/CRTK_ERR_fhrmean_${tc_name}_global.gif ${COMOUTroot}/plot/evs.hurricane_global_ens.crtk_bias.${stormBasin}.${stormYear}.${stormName}${stormNumber}.png
+ fi
+ fi
+fi
+
+### two ifs end
+fi
+fi
+### num do loop end
+done
+
+#--- Atlantic/EastPacific/WestPacific Basin TC_Stat
+if [ ${stormBasin} = "al" ]; then
+ export COMOUTbas=${COMOUTatl}
+elif [ ${stormBasin} = "ep" ]; then
+ export COMOUTbas=${COMOUTepa}
+elif [ ${stormBasin} = "wp" ]; then
+ export COMOUTbas=${COMOUTwpa}
+fi
+
+nfile=$(ls ${COMOUTbas}/*.tcst |wc -l)
+if [ $nfile -ne 0 ]; then
+
+export mdh=010100
+export startdateB=${YYYY}${mdh}
+export metTCcomin=${COMOUTbas}
+
+if [ ${stormBasin} = "al" ]; then
+ export metTCcomout=${DATA}/metTC/atlantic
+ if [ ! -d $metTCcomout ]; then mkdir -p $metTCcomout; fi
+elif [ ${stormBasin} = "ep" ]; then
+ export metTCcomout=${DATA}/metTC/eastpacific
+ if [ ! -d $metTCcomout ]; then mkdir -p $metTCcomout; fi
+elif [ ${stormBasin} = "wp" ]; then
+ export metTCcomout=${DATA}/metTC/westpacific
+ if [ ! -d $metTCcomout ]; then mkdir -p $metTCcomout; fi
+fi
+
+cd $metTCcomout
+#export SEARCH1=INPUT_BASE_template
+#export SEARCH2=OUTPUT_BASE_template
+#export SEARCH3=INIT_BEG_template
+#export SEARCH4=INIT_END_template
+
+cp ${PARMevs}/TCStat_template_basin.conf .
+sed -i "s|$SEARCH1|$metTCcomin|g" TCStat_template_basin.conf
+sed -i "s|$SEARCH2|$metTCcomout|g" TCStat_template_basin.conf
+sed -i "s|$SEARCH3|$startdateB|g" TCStat_template_basin.conf
+sed -i "s|$SEARCH4|$startdateB|g" TCStat_template_basin.conf
+
+#export SEARCH7="TC_STAT_INIT_BEG_temp"
+#export SEARCH8="TC_STAT_INIT_END_temp"
+export firstday="0101_00"
+export lastday="1231_18"
+export symdhB=${YYYY}${firstday}
+export eymdhB=${YYYY}${lastday}
+echo "$symdhB, $eymdhB"
+
+sed -i "s|$SEARCH7|$symdhB|g" TCStat_template_basin.conf
+sed -i "s|$SEARCH8|$eymdhB|g" TCStat_template_basin.conf
+
+run_metplus.py -c ${metTCcomout}/TCStat_template_basin.conf
+if [ "$SENDCOM" = 'YES' ]; then
+ if [ ! -d ${COMOUTbas}/tc_stat ]; then mkdir -p ${COMOUTbas}/tc_stat; fi
+ cp ${metTCcomout}/tc_stat/tc_stat.out ${COMOUTbas}/tc_stat/tc_stat_basin.out
+ cp ${metTCcomout}/tc_stat/tc_stat_summary.tcst ${COMOUTbas}/tc_stat/tc_stat_summary_basin.tcst
+fi
+fi
+
+#--- Basin-Storms Plots
+export LOGOroot=${FIXevs}
+export PLOTDATA=${metTCcomout}
+#export RUN="tropcyc"
+export img_quality="low"
+
+export fhr_list="0,12,24,36,48,60,72,84,96,108,120,132,144,156,168"
+export model_tmp_atcf_name_list="MD01,MD02,MD03"
+export model_plot_name_list="GEFS,EENS,CENS"
+export plot_CI_bars="NO"
+export stormNameB=Basin
+export tc_name=${stbasin}${under}${stormYear}${under}${stormNameB}
+export basin=${stbasin}
+export tc_num=
+export tropcyc_model_type="global"
+python ${USHevs}/plot_tropcyc_lead_average.py
+
+bimgs=$(ls ${metTCcomout}/plot/${tc_name}/images/* |wc -l)
+if [ $bimgs -ne 0 ]; then
+ cd ${metTCcomout}/plot/${tc_name}/images
+ convert ABSAMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_global.png ABSAMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_global.gif
+ convert AMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_global.png AMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_global.gif
+
+ convert ABSTK_ERR_fhrmean_${tc_name}_global.png ABSTK_ERR_fhrmean_${tc_name}_global.gif
+ convert ALTK_ERR_fhrmean_${tc_name}_global.png ALTK_ERR_fhrmean_${tc_name}_global.gif
+ convert CRTK_ERR_fhrmean_${tc_name}_global.png CRTK_ERR_fhrmean_${tc_name}_global.gif
+ rm -f *.png
+ if [ "$SENDCOM" = 'YES' ]; then
+ if [ ! -d ${COMOUTbas}/plot ]; then mkdir -p ${COMOUTbas}/plot; fi
+ if [ "$savePlots" = 'YES' ]; then
+# cp -r ${metTCcomout}/plot/${tc_name}/images/* ${COMOUTbas}/plot/.
+ cp -r ${metTCcomout}/plot/${tc_name}/images/ABSAMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_global.gif ${COMOUTbas}/plot/evs.hurricane_global_ens.abswind_err.${stormBasin}.${stormYear}.season.png
+ cp -r ${metTCcomout}/plot/${tc_name}/images/AMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_global.gif ${COMOUTbas}/plot/evs.hurricane_global_ens.wind_bias.${stormBasin}.${stormYear}.season.png
+ cp -r ${metTCcomout}/plot/${tc_name}/images/ABSTK_ERR_fhrmean_${tc_name}_global.gif ${COMOUTbas}/plot/evs.hurricane_global_ens.abstk_err.${stormBasin}.${stormYear}.season.png
+ cp -r ${metTCcomout}/plot/${tc_name}/images/ALTK_ERR_fhrmean_${tc_name}_global.gif ${COMOUTbas}/plot/evs.hurricane_global_ens.altk_bias.${stormBasin}.${stormYear}.season.png
+ cp -r ${metTCcomout}/plot/${tc_name}/images/CRTK_ERR_fhrmean_${tc_name}_global.gif ${COMOUTbas}/plot/evs.hurricane_global_ens.crtk_bias.${stormBasin}.${stormYear}.season.png
+ fi
+ fi
+fi
+### bas do loop end
+done
diff --git a/scripts/hurricane/stats/exevs_hurricane_regional_tropcyc_stats.sh b/scripts/hurricane/stats/exevs_hurricane_regional_tropcyc_stats.sh
new file mode 100755
index 0000000000..d1f91db646
--- /dev/null
+++ b/scripts/hurricane/stats/exevs_hurricane_regional_tropcyc_stats.sh
@@ -0,0 +1,304 @@
+#!/bin/bash
+export PS4=' + exevs_hurricane_regional_tropcyc_stats.sh line $LINENO: '
+#---There is no track forecast for West Pacific from HMON model
+
+export savePlots=${savePlots:-YES}
+export stormYear=${YYYY}
+export basinlist="al ep"
+export numlist="01 02 03 04 05 06 07 08 09 10 11 12 13 14 15 16 17 18 19 20 \
+ 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40"
+
+for bas in $basinlist; do
+### bas do loop start
+for num in $numlist; do
+### num do loop start
+
+export stormBasin=${bas}
+export stbasin=`echo ${stormBasin} | tr "[a-z]" "[A-Z]"`
+echo "${stbasin} upper case: AL/EP/WP"
+export stormNumber=${num}
+
+if [ ${stormBasin} = "al" ]; then
+ COMINbdeck=${COMINbdeckNHC}
+ export COMOUTatl=${COMOUT}/Atlantic
+ if [ ! -d ${COMOUTatl} ]; then mkdir -p ${COMOUTatl}; fi
+elif [ ${stormBasin} = "ep" ]; then
+ COMINbdeck=${COMINbdeckNHC}
+ export COMOUTepa=${COMOUT}/EastPacific
+ if [ ! -d ${COMOUTepa} ]; then mkdir -p ${COMOUTepa}; fi
+elif [ ${stormBasin} = "wp" ]; then
+ COMINbdeck=${COMINbdeckJTWC}
+ export COMOUTwpa=${COMOUT}/WestPacific
+ if [ ! -d ${COMOUTwpa} ]; then mkdir -p ${COMOUTwpa}; fi
+fi
+
+export bdeckfile=${COMINbdeck}/b${stormBasin}${stormNumber}${stormYear}.dat
+if [ -f ${bdeckfile} ]; then
+numrecs=`cat ${bdeckfile} | wc -l`
+if [ ${numrecs} -gt 0 ]; then
+### two ifs start
+
+export STORMroot=${DATA}/metTC/${bas}${num}
+if [ ! -d ${STORMroot} ]; then mkdir -p ${STORMroot}; fi
+export STORMdata=${STORMroot}/data
+if [ ! -d ${STORMdata} ]; then mkdir -p ${STORMdata}; fi
+export COMOUTroot=${COMOUT}/${bas}${num}
+if [ ! -d ${COMOUTroot} ]; then mkdir -p ${COMOUTroot}; fi
+cd ${STORMdata}
+
+#---get the storm name from TC-vital file "syndat_tcvitals.${YYYY}"
+#---copy bdeck files to ${STORMdata}
+if [ ${stormBasin} = "al" ]; then
+ cp ${COMINbdeckNHC}/b${stormBasin}${stormNumber}${stormYear}.dat ${STORMdata}/.
+ grep "NHC ${stormNumber}L" ${COMINvit} > syndat_tcvitals.${YYYY}.${stormBasin}${stormNumber}
+ echo $(tail -n 1 syndat_tcvitals.${YYYY}.${stormBasin}${stormNumber}) > TCvit_tail.txt
+ sed -i 's/NHC/NHCC/' TCvit_tail.txt
+elif [ ${stormBasin} = "ep" ]; then
+ cp ${COMINbdeckNHC}/b${stormBasin}${stormNumber}${stormYear}.dat ${STORMdata}/.
+ grep "NHC ${stormNumber}E" ${COMINvit} > syndat_tcvitals.${YYYY}.${stormBasin}${stormNumber}
+ echo $(tail -n 1 syndat_tcvitals.${YYYY}.${stormBasin}${stormNumber}) > TCvit_tail.txt
+ sed -i 's/NHC/NHCC/' TCvit_tail.txt
+elif [ ${stormBasin} = "wp" ]; then
+ cp ${COMINbdeckJTWC}/b${stormBasin}${stormNumber}${stormYear}.dat ${STORMdata}/.
+ grep "JTWC ${stormNumber}W" ${COMINvit} > syndat_tcvitals.${YYYY}.${stormBasin}${stormNumber}
+ echo $(tail -n 1 syndat_tcvitals.${YYYY}.${stormBasin}${stormNumber}) > TCvit_tail.txt
+fi
+cat TCvit_tail.txt|cut -c10-18 > TCname.txt
+VARIABLE1=$( head -n 1 TCname.txt )
+echo "$VARIABLE1"
+VARIABLE2=$(printf '%s' "$VARIABLE1" | sed 's/[0-9]//g')
+echo "$VARIABLE2"
+stormName=$(sed "s/ //g" <<< $VARIABLE2)
+echo "Name_${stormName}_Name"
+echo "${stormBasin}, ${stormNumber}, ${stormYear}, ${stormName}"
+
+#---get the model forecast tracks "AVNO/EMX/CMC" from archive file "tracks.atcfunix.${YY22}"
+grep "${stbasin}, ${stormNumber}" ${COMINtrack} > tracks.atcfunix.${YY22}_${stormBasin}${stormNumber}
+grep "03, AVNO" tracks.atcfunix.${YY22}_${stormBasin}${stormNumber} > a${stormBasin}${stormNumber}${stormYear}.dat
+grep "03, HWRF" tracks.atcfunix.${YY22}_${stormBasin}${stormNumber} >> a${stormBasin}${stormNumber}${stormYear}.dat
+grep "03, HMON" tracks.atcfunix.${YY22}_${stormBasin}${stormNumber} >> a${stormBasin}${stormNumber}${stormYear}.dat
+grep "03, CTCX" tracks.atcfunix.${YY22}_${stormBasin}${stormNumber} >> a${stormBasin}${stormNumber}${stormYear}.dat
+sed -i 's/03, AVNO/03, MD01/' a${stormBasin}${stormNumber}${stormYear}.dat
+sed -i 's/03, HWRF/03, MD02/' a${stormBasin}${stormNumber}${stormYear}.dat
+sed -i 's/03, HMON/03, MD03/' a${stormBasin}${stormNumber}${stormYear}.dat
+sed -i 's/03, CTCX/03, MD04/' a${stormBasin}${stormNumber}${stormYear}.dat
+
+#---get the $startdate, $enddate[YYMMDDHH] from the best track file
+echo $(head -n 1 ${bdeckfile}) > head.txt
+echo $(tail -n 1 ${bdeckfile}) > tail.txt
+cat head.txt|cut -c9-18 > startymdh.txt
+cat tail.txt|cut -c9-18 > endymdh.txt
+firstcyc=$( head -n 1 startymdh.txt )
+lastcyc=$( head -n 1 endymdh.txt )
+
+export YY01=`echo $firstcyc | cut -c1-4`
+export MM01=`echo $firstcyc | cut -c5-6`
+export DD01=`echo $firstcyc | cut -c7-8`
+export HH01=`echo $firstcyc | cut -c9-10`
+
+export YY02=`echo $lastcyc | cut -c1-4`
+export MM02=`echo $lastcyc | cut -c5-6`
+export DD02=`echo $lastcyc | cut -c7-8`
+export HH02=`echo $lastcyc | cut -c9-10`
+
+export startdate="$YY01$MM01$DD01$HH01"
+export enddate="$YY02$MM02$DD02$HH02"
+echo "$startdate, $enddate"
+
+#--- run for TC_pairs
+#cp ${PARMevs}/TCPairs_template.conf .
+cp ${PARMevs}/TCPairs_template_regional.conf TCPairs_template.conf
+export SEARCH1="INPUT_BASE_template"
+export SEARCH2="OUTPUT_BASE_template"
+export SEARCH3="INIT_BEG_template"
+export SEARCH4="INIT_END_template"
+export SEARCH5="TC_PAIRS_CYCLONE_template"
+export SEARCH6="TC_PAIRS_BASIN_template"
+
+sed -i "s|$SEARCH1|$STORMdata|g" TCPairs_template.conf
+sed -i "s|$SEARCH2|$STORMroot|g" TCPairs_template.conf
+sed -i "s|$SEARCH3|$startdate|g" TCPairs_template.conf
+sed -i "s|$SEARCH4|$enddate|g" TCPairs_template.conf
+sed -i "s|$SEARCH5|$stormNumber|g" TCPairs_template.conf
+sed -i "s|$SEARCH6|$stbasin|g" TCPairs_template.conf
+
+run_metplus.py -c $STORMdata/TCPairs_template.conf
+
+#--- run for TC_stat
+cd $STORMdata
+#cp ${PARMevs}/TCStat_template.conf .
+cp ${PARMevs}/TCStat_template_regional.conf TCStat_template.conf
+sed -i "s|$SEARCH1|$STORMdata|g" TCStat_template.conf
+sed -i "s|$SEARCH2|$STORMroot|g" TCStat_template.conf
+sed -i "s|$SEARCH3|$startdate|g" TCStat_template.conf
+sed -i "s|$SEARCH4|$startdate|g" TCStat_template.conf
+
+export SEARCH7="TC_STAT_INIT_BEG_temp"
+export SEARCH8="TC_STAT_INIT_END_temp"
+export under="_"
+export symdh=${YY01}${MM01}${DD01}${under}${HH01}
+export eymdh=${YY02}${MM02}${DD02}${under}${HH02}
+echo "$symdh, $eymdh"
+
+sed -i "s|$SEARCH7|$symdh|g" TCStat_template.conf
+sed -i "s|$SEARCH8|$eymdh|g" TCStat_template.conf
+
+run_metplus.py -c $STORMdata/TCStat_template.conf
+
+#---Storm Plots
+export LOGOroot=${FIXevs}
+export PLOTDATA=${STORMroot}
+#export RUN="tropcyc"
+export img_quality="low"
+
+export fhr_list="0,12,24,36,48,60,72,84,96,108,120"
+export model_tmp_atcf_name_list="MD01,MD02,MD03,MD04"
+export model_plot_name_list="GFS,HWRF,HMON,CTCX"
+export plot_CI_bars="NO"
+export tc_name=${stbasin}${under}${stormYear}${under}${stormName}
+export basin=${stbasin}
+export tc_num=${stormNumber}
+export tropcyc_model_type="regional"
+python ${USHevs}/plot_tropcyc_lead_regional.py
+
+#/lfs/h2/emc/ptmp/jiayi.peng/metTC/wp02/plot/WP_2022_MALAKAS/images
+nimgs=$(ls ${STORMroot}/plot/${tc_name}/images/* |wc -l)
+if [ $nimgs -ne 0 ]; then
+ cd ${STORMroot}/plot/${tc_name}/images
+ convert ABSAMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_regional.png ABSAMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_regional.gif
+ convert AMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_regional.png AMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_regional.gif
+
+ convert ABSTK_ERR_fhrmean_${tc_name}_regional.png ABSTK_ERR_fhrmean_${tc_name}_regional.gif
+ convert ALTK_ERR_fhrmean_${tc_name}_regional.png ALTK_ERR_fhrmean_${tc_name}_regional.gif
+ convert CRTK_ERR_fhrmean_${tc_name}_regional.png CRTK_ERR_fhrmean_${tc_name}_regional.gif
+ rm -f *.png
+ if [ "$SENDCOM" = 'YES' ]; then
+ if [ ! -d ${COMOUTroot}/tc_pairs ]; then mkdir -p ${COMOUTroot}/tc_pairs; fi
+ if [ ! -d ${COMOUTroot}/tc_stat ]; then mkdir -p ${COMOUTroot}/tc_stat; fi
+ cp -r ${STORMroot}/tc_pairs/* ${COMOUTroot}/tc_pairs/.
+ cp -r ${STORMroot}/tc_stat/* ${COMOUTroot}/tc_stat/.
+ if [ ${stormBasin} = "al" ]; then
+ cp ${COMOUTroot}/tc_stat/tc_stat_summary.tcst ${COMOUTatl}/${stormBasin}${stormNumber}${stormYear}_stat_summary.tcst
+ elif [ ${stormBasin} = "ep" ]; then
+ cp ${COMOUTroot}/tc_stat/tc_stat_summary.tcst ${COMOUTepa}/${stormBasin}${stormNumber}${stormYear}_stat_summary.tcst
+ elif [ ${stormBasin} = "wp" ]; then
+ cp ${COMOUTroot}/tc_stat/tc_stat_summary.tcst ${COMOUTwpa}/${stormBasin}${stormNumber}${stormYear}_stat_summary.tcst
+ fi
+ if [ "$savePlots" = 'YES' ]; then
+ if [ ! -d ${COMOUTroot}/plot ]; then mkdir -p ${COMOUTroot}/plot; fi
+# cp -r ${STORMroot}/plot/${tc_name}/images/* ${COMOUTroot}/plot/.
+ cp ${STORMroot}/plot/${tc_name}/images/ABSAMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_regional.gif ${COMOUTroot}/plot/evs.hurricane_regional.abswind_err.${stormBasin}.${stormYear}.${stormName}${stormNumber}.png
+ cp ${STORMroot}/plot/${tc_name}/images/AMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_regional.gif ${COMOUTroot}/plot/evs.hurricane_regional.wind_bias.${stormBasin}.${stormYear}.${stormName}${stormNumber}.png
+ cp ${STORMroot}/plot/${tc_name}/images/ABSTK_ERR_fhrmean_${tc_name}_regional.gif ${COMOUTroot}/plot/evs.hurricane_regional.abstk_err.${stormBasin}.${stormYear}.${stormName}${stormNumber}.png
+ cp ${STORMroot}/plot/${tc_name}/images/ALTK_ERR_fhrmean_${tc_name}_regional.gif ${COMOUTroot}/plot/evs.hurricane_regional.altk_bias.${stormBasin}.${stormYear}.${stormName}${stormNumber}.png
+ cp ${STORMroot}/plot/${tc_name}/images/CRTK_ERR_fhrmean_${tc_name}_regional.gif ${COMOUTroot}/plot/evs.hurricane_regional.crtk_bias.${stormBasin}.${stormYear}.${stormName}${stormNumber}.png
+ fi
+ fi
+fi
+
+### two ifs end
+fi
+fi
+### num do loop end
+done
+
+#--- Atlantic/EastPacific/WestPacific Basin TC_Stat
+if [ ${stormBasin} = "al" ]; then
+ export COMOUTbas=${COMOUTatl}
+elif [ ${stormBasin} = "ep" ]; then
+ export COMOUTbas=${COMOUTepa}
+elif [ ${stormBasin} = "wp" ]; then
+ export COMOUTbas=${COMOUTwpa}
+fi
+
+nfile=$(ls ${COMOUTbas}/*.tcst |wc -l)
+if [ $nfile -ne 0 ]; then
+
+export mdh=010100
+export startdateB=${YYYY}${mdh}
+export metTCcomin=${COMOUTbas}
+
+if [ ${stormBasin} = "al" ]; then
+ export metTCcomout=${DATA}/metTC/atlantic
+ if [ ! -d $metTCcomout ]; then mkdir -p $metTCcomout; fi
+elif [ ${stormBasin} = "ep" ]; then
+ export metTCcomout=${DATA}/metTC/eastpacific
+ if [ ! -d $metTCcomout ]; then mkdir -p $metTCcomout; fi
+elif [ ${stormBasin} = "wp" ]; then
+ export metTCcomout=${DATA}/metTC/westpacific
+ if [ ! -d $metTCcomout ]; then mkdir -p $metTCcomout; fi
+fi
+
+cd $metTCcomout
+#export SEARCH1=INPUT_BASE_template
+#export SEARCH2=OUTPUT_BASE_template
+#export SEARCH3=INIT_BEG_template
+#export SEARCH4=INIT_END_template
+
+#cp ${PARMevs}/TCStat_template_basin.conf .
+cp ${PARMevs}/TCStat_template_basin_regional.conf TCStat_template_basin.conf
+sed -i "s|$SEARCH1|$metTCcomin|g" TCStat_template_basin.conf
+sed -i "s|$SEARCH2|$metTCcomout|g" TCStat_template_basin.conf
+sed -i "s|$SEARCH3|$startdateB|g" TCStat_template_basin.conf
+sed -i "s|$SEARCH4|$startdateB|g" TCStat_template_basin.conf
+
+#export SEARCH7="TC_STAT_INIT_BEG_temp"
+#export SEARCH8="TC_STAT_INIT_END_temp"
+export firstday="0101_00"
+export lastday="1231_18"
+export symdhB=${YYYY}${firstday}
+export eymdhB=${YYYY}${lastday}
+echo "$symdhB, $eymdhB"
+
+sed -i "s|$SEARCH7|$symdhB|g" TCStat_template_basin.conf
+sed -i "s|$SEARCH8|$eymdhB|g" TCStat_template_basin.conf
+
+run_metplus.py -c ${metTCcomout}/TCStat_template_basin.conf
+if [ "$SENDCOM" = 'YES' ]; then
+ if [ ! -d ${COMOUTbas}/tc_stat ]; then mkdir -p ${COMOUTbas}/tc_stat; fi
+ cp ${metTCcomout}/tc_stat/tc_stat.out ${COMOUTbas}/tc_stat/tc_stat_basin.out
+ cp ${metTCcomout}/tc_stat/tc_stat_summary.tcst ${COMOUTbas}/tc_stat/tc_stat_summary_basin.tcst
+fi
+fi
+
+#--- Basin-Storms Plots
+export LOGOroot=${FIXevs}
+export PLOTDATA=${metTCcomout}
+#export RUN="tropcyc"
+export img_quality="low"
+
+export fhr_list="0,12,24,36,48,60,72,84,96,108,120"
+export model_tmp_atcf_name_list="MD01,MD02,MD03,MD04"
+export model_plot_name_list="GFS,HWRF,HMON,CTCX"
+export plot_CI_bars="NO"
+export stormNameB=Basin
+export tc_name=${stbasin}${under}${stormYear}${under}${stormNameB}
+export basin=${stbasin}
+export tc_num=
+export tropcyc_model_type="regional"
+python ${USHevs}/plot_tropcyc_lead_regional.py
+
+bimgs=$(ls ${metTCcomout}/plot/${tc_name}/images/* |wc -l)
+if [ $bimgs -ne 0 ]; then
+ cd ${metTCcomout}/plot/${tc_name}/images
+ convert ABSAMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_regional.png ABSAMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_regional.gif
+ convert AMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_regional.png AMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_regional.gif
+
+ convert ABSTK_ERR_fhrmean_${tc_name}_regional.png ABSTK_ERR_fhrmean_${tc_name}_regional.gif
+ convert ALTK_ERR_fhrmean_${tc_name}_regional.png ALTK_ERR_fhrmean_${tc_name}_regional.gif
+ convert CRTK_ERR_fhrmean_${tc_name}_regional.png CRTK_ERR_fhrmean_${tc_name}_regional.gif
+ rm -f *.png
+ if [ "$SENDCOM" = 'YES' ]; then
+ if [ ! -d ${COMOUTbas}/plot ]; then mkdir -p ${COMOUTbas}/plot; fi
+ if [ "$savePlots" = 'YES' ]; then
+# cp -r ${metTCcomout}/plot/${tc_name}/images/* ${COMOUTbas}/plot/.
+ cp -r ${metTCcomout}/plot/${tc_name}/images/ABSAMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_regional.gif ${COMOUTbas}/plot/evs.hurricane_regional.abswind_err.${stormBasin}.${stormYear}.season.png
+ cp -r ${metTCcomout}/plot/${tc_name}/images/AMAX_WIND-BMAX_WIND_fhrmean_${tc_name}_regional.gif ${COMOUTbas}/plot/evs.hurricane_regional.wind_bias.${stormBasin}.${stormYear}.season.png
+ cp -r ${metTCcomout}/plot/${tc_name}/images/ABSTK_ERR_fhrmean_${tc_name}_regional.gif ${COMOUTbas}/plot/evs.hurricane_regional.abstk_err.${stormBasin}.${stormYear}.season.png
+ cp -r ${metTCcomout}/plot/${tc_name}/images/ALTK_ERR_fhrmean_${tc_name}_regional.gif ${COMOUTbas}/plot/evs.hurricane_regional.altk_bias.${stormBasin}.${stormYear}.season.png
+ cp -r ${metTCcomout}/plot/${tc_name}/images/CRTK_ERR_fhrmean_${tc_name}_regional.gif ${COMOUTbas}/plot/evs.hurricane_regional.crtk_bias.${stormBasin}.${stormYear}.season.png
+ fi
+ fi
+fi
+### bas do loop end
+done
diff --git a/ush/hurricane/stats/false_al.py b/ush/hurricane/stats/false_al.py
new file mode 100755
index 0000000000..bcfff033dd
--- /dev/null
+++ b/ush/hurricane/stats/false_al.py
@@ -0,0 +1,109 @@
+##Python script to plot TC genesis/HITS
+
+from __future__ import print_function
+
+#import tkinter as tk
+import os
+import sys
+import glob
+import matplotlib
+matplotlib.use('Agg')
+import matplotlib.pyplot as plt
+import numpy as np
+import pandas as pd
+from ast import literal_eval
+import ast
+import datetime
+import matplotlib.path as mpath
+import matplotlib.ticker as mticker
+
+import cartopy
+import cartopy.crs as ccrs
+from cartopy.feature import GSHHSFeature
+from cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter
+import cartopy.feature as cfeature
+
+cartopyDataDir = os.environ['cartopyDataDir']
+cartopy.config['data_dir'] = cartopyDataDir
+
+# RE -------------------
+#Initialize the empty Plot (only need to call the figure to write the map.)
+fig = plt.figure()
+
+###FOR THE INTERSTED DOMAIN PART###############
+##e.g Alaska, CONUS etc
+## Map proj are different for different areas.
+domain = "atlantic"
+if(domain == "atlantic"):
+ ax = plt.axes(projection=ccrs.Miller(central_longitude=-55.0))
+# ax.set_extent([260, 370, -5, 65], crs=ccrs.PlateCarree())
+ ax.set_extent([260, 370, -5, 50], crs=ccrs.PlateCarree())
+
+##PLS NOTE THAT THE ax_extent is diff from tick marks and that is by design
+ xticks = [-130, -120, -110, -100, -90, -80, -70, -60, -50, -40, -30, -20, -10, 0, 10]
+# yticks = [-5, 0, 10, 20, 30, 40, 50, 60, 65]
+ yticks = [-5, 0, 10, 20, 30, 40, 50]
+ ax.gridlines(xlocs=xticks, ylocs=yticks,color='gray', alpha=0.9, linestyle='--')
+###Add topography, lakes, borders etc
+ ax.coastlines('10m',linewidth=0.30, color='black')
+ ax.add_feature(cfeature.GSHHSFeature('low', levels=[2],
+ facecolor='white'), color='black', linewidth=0.1)
+ land_10m = cfeature.NaturalEarthFeature('physical', 'land', '10m',
+ edgecolor='face',
+ facecolor='None')
+ ax.add_feature(cfeature.LAKES)
+ ax.add_feature(cfeature.BORDERS)
+ ax.add_feature(land_10m)
+
+########This part is a roundabout
+###for the known issues with
+###cartopy LCC/Northpolarstero projection.
+ plt.annotate("0 " , (0,0), (-6,18), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("10N " , (0,0), (-15,52), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("20N " , (0,0), (-15,85), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("30N " , (0,0), (-15,120), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("40N " , (0,0), (-15,157), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+# plt.annotate("50N " , (0,0), (-15,198), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+# plt.annotate("60N " , (0,0), (-15,245), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+
+ plt.annotate("90W " , (0,0), (27,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("80W " , (0,0), (60,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("70W " , (0,0), (92,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("60W " , (0,0), (125,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("50W " , (0,0), (157,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("40W " , (0,0), (189,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("30W " , (0,0), (220,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("20W " , (0,0), (253,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("10W " , (0,0), (286,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("0 " , (0,0), (323,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+
+######################atlantic##################################################
+# falsefile="tc_gen_2021_genmpr_FYOY.txt"
+# hits = np.loadtxt(falsefile,dtype=str)
+ falsefile = os.environ['falsefile']
+ hits = np.loadtxt(falsefile,dtype=str)
+
+ numhits = 0
+ numhits = numhits + len(hits)
+ #Plot each hit event
+ for i in range(len(hits)):
+ lat = float(hits[i,31])
+ lon = float(hits[i,32]) + 360.
+ print(lat)
+ print(lon)
+ plt.scatter(lon, lat,transform=ccrs.PlateCarree(), marker='s', color='red',s=12, facecolor='none')
+
+ plt.scatter(346, 47,transform=ccrs.PlateCarree(), marker='s', color='red',s=12, facecolor='none')
+ plt.annotate("False alarms ("+str(numhits)+")", (0,0), (286,185), xycoords='axes fraction', textcoords='offset points', va='top', color='Red', fontsize=6.5)
+
+# plt.title(f"Atlantic TC Genesis False Alarms")
+ TCGENdays = os.environ['TCGENdays']
+ plt.title(TCGENdays)
+####################################################################
+##The plt is saved as png and converted to gif in the bash script.
+
+plt.savefig('TC_genesis.png', dpi=160,bbox_inches='tight')
+
+exit
+ # ----------------------------
+
diff --git a/ush/hurricane/stats/false_ep.py b/ush/hurricane/stats/false_ep.py
new file mode 100755
index 0000000000..9b9541a3b6
--- /dev/null
+++ b/ush/hurricane/stats/false_ep.py
@@ -0,0 +1,100 @@
+##Python script to plot TC genesis HITS
+
+from __future__ import print_function
+
+#import tkinter as tk
+import os
+import sys
+import glob
+import matplotlib
+matplotlib.use('Agg')
+import matplotlib.pyplot as plt
+import numpy as np
+import pandas as pd
+from ast import literal_eval
+import ast
+import datetime
+import matplotlib.path as mpath
+import matplotlib.ticker as mticker
+
+import cartopy
+import cartopy.crs as ccrs
+from cartopy.feature import GSHHSFeature
+from cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter
+import cartopy.feature as cfeature
+
+cartopyDataDir = os.environ['cartopyDataDir']
+cartopy.config['data_dir'] = cartopyDataDir
+
+# RE -------------------
+#Initialize the empty Plot (only need to call the figure to write the map.)
+fig = plt.figure()
+
+domain = "eastpac"
+if(domain == "eastpac"):
+ ax = plt.axes(projection=ccrs.Miller(central_longitude=-60.0))
+# ax.set_extent([180, 290, 0, 60], crs=ccrs.PlateCarree())
+ ax.set_extent([180, 290, 0, 50], crs=ccrs.PlateCarree())
+##PLS NOTE THAT THE ax_extent is diff from tick marks and that is by design
+ xticks = [-70, -80, -90, -100, -110, -120, -130, -140, -150, -160, -170, -180]
+# yticks = [ 0, 10, 20, 30, 40, 50, 60]
+ yticks = [ 0, 10, 20, 30, 40, 50]
+ ax.gridlines(xlocs=xticks, ylocs=yticks,color='gray', alpha=0.9, linestyle='--')
+###Add topography, lakes, borders etc
+ ax.coastlines('10m',linewidth=0.30, color='black')
+ ax.add_feature(cfeature.GSHHSFeature('low', levels=[2],
+ facecolor='white'), color='black', linewidth=0.1)
+ land_10m = cfeature.NaturalEarthFeature('physical', 'land', '10m',
+ edgecolor='face',
+ facecolor='None')
+ ax.add_feature(cfeature.LAKES)
+ ax.add_feature(cfeature.BORDERS)
+ ax.add_feature(land_10m)
+########This part is a roundabout
+###for the known issues with
+###cartopy LCC/Northpolarstero projection.
+# plt.annotate("50N " , (0,0), (-15,180), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("40N " , (0,0), (-15,140), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("30N " , (0,0), (-15,104), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("20N " , (0,0), (-15,69), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("10N " , (0,0), (-15,35), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+
+ plt.annotate("80W " , (0,0), (318,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("90W " , (0,0), (286,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("100W " , (0,0), (250,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("110W " , (0,0), (218,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("120W " , (0,0), (185,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("130W " , (0,0), (152,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("140W " , (0,0), (119,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("150W " , (0,0), (87,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("160W " , (0,0), (55,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("170W " , (0,0), (23,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+
+###############END OF AREAS##################################
+ falsefile = os.environ['falsefile']
+ hits = np.loadtxt(falsefile,dtype=str)
+
+ numhits = 0
+ numhits = numhits + len(hits)
+ #Plot each hit event
+ for i in range(len(hits)):
+ lat = float(hits[i,31])
+ lon = float(hits[i,32]) + 360.
+ print(lat)
+ print(lon)
+ plt.scatter(lon, lat,transform=ccrs.PlateCarree(), marker='s', color='red',s=12, facecolor='none')
+
+ plt.scatter(185, 47,transform=ccrs.PlateCarree(), marker='s', color='red',s=12, facecolor='none')
+ plt.annotate("False alarms ("+str(numhits)+")", (0,0), (20,168), xycoords='axes fraction', textcoords='offset points', va='top', color='Red', fontsize=6.5)
+
+# plt.title(f"East Pacific TC Genesis False Alarms")
+ TCGENdays = os.environ['TCGENdays']
+ plt.title(TCGENdays)
+####################################################################
+##The plt is saved as png and converted to gif in the bash script.
+
+plt.savefig('TC_genesis.png', dpi=160,bbox_inches='tight')
+
+exit
+ # ----------------------------
+
diff --git a/ush/hurricane/stats/false_wp.py b/ush/hurricane/stats/false_wp.py
new file mode 100755
index 0000000000..fcbc257e09
--- /dev/null
+++ b/ush/hurricane/stats/false_wp.py
@@ -0,0 +1,100 @@
+##Python script to plot TC genesis HITS
+
+from __future__ import print_function
+
+#import tkinter as tk
+import os
+import sys
+import glob
+import matplotlib
+matplotlib.use('Agg')
+import matplotlib.pyplot as plt
+import numpy as np
+import pandas as pd
+from ast import literal_eval
+import ast
+import datetime
+import matplotlib.path as mpath
+import matplotlib.ticker as mticker
+
+import cartopy
+import cartopy.crs as ccrs
+from cartopy.feature import GSHHSFeature
+from cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter
+import cartopy.feature as cfeature
+
+cartopyDataDir = os.environ['cartopyDataDir']
+cartopy.config['data_dir'] = cartopyDataDir
+
+# RE -------------------
+#Initialize the empty Plot (only need to call the figure to write the map.)
+fig = plt.figure()
+
+domain = "westpac"
+if(domain == "westpac"):
+ ax = plt.axes(projection=ccrs.Miller(central_longitude=80.0))
+# ax.set_extent([100, 180, 0, 60], crs=ccrs.PlateCarree())
+ ax.set_extent([100, 180, 0, 50], crs=ccrs.PlateCarree())
+
+##PLS NOTE THAT THE ax_extent is diff from tick marks and that is by design
+ xticks = [100, 110, 120, 130, 140, 150, 160, 170, 180]
+# yticks = [0, 10, 20, 30, 40, 50, 60]
+ yticks = [0, 10, 20, 30, 40, 50]
+ ax.gridlines(xlocs=xticks, ylocs=yticks,color='gray', alpha=0.9, linestyle='--')
+###Add topography, lakes, borders etc
+ ax.coastlines('10m',linewidth=0.80, color='black')
+ ax.add_feature(cfeature.GSHHSFeature('low', levels=[2],
+ facecolor='white'), color=cfeature.COLORS['water']
+, linewidth=0.1)
+ land_10m = cfeature.NaturalEarthFeature('physical', 'land', '10m',
+ edgecolor='face',
+ facecolor='None')
+ ax.add_feature(cfeature.LAKES)
+ ax.add_feature(cfeature.BORDERS)
+ ax.add_feature(land_10m)
+
+########This part is a roundabout
+###for the known issues with
+###cartopy LCC/Northpolarstero projection.
+# plt.annotate("50N " , (0,0), (-15,215), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("40N " , (0,0), (-15,191), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("30N " , (0,0), (-15,141), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("20N " , (0,0), (-15,94), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("10N " , (0,0), (-15,47), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+
+ plt.annotate("110E " , (0,0), (36,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("120E " , (0,0), (83,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("130E " , (0,0), (128,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("140E " , (0,0), (171,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("150E " , (0,0), (216,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("160E " , (0,0), (262,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("170E " , (0,0), (305,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+
+######################asia##################################################
+ falsefile = os.environ['falsefile']
+ hits = np.loadtxt(falsefile,dtype=str)
+
+ numhits = 0
+ numhits = numhits + len(hits)
+ #Plot each hit event
+ for i in range(len(hits)):
+ lat = float(hits[i,31])
+ lon = float(hits[i,32]) + 360.
+ print(lat)
+ print(lon)
+ plt.scatter(lon, lat,transform=ccrs.PlateCarree(), marker='s', color='red',s=12, facecolor='none')
+
+ plt.scatter(160, 47,transform=ccrs.PlateCarree(), marker='s', color='red',s=12, facecolor='none')
+ plt.annotate("False alarms ("+str(numhits)+")", (0,0), (272,230), xycoords='axes fraction', textcoords='offset points', va='top', color='Red', fontsize=6.5)
+
+# plt.title(f"West Pacific TC Genesis False Alarms")
+ TCGENdays = os.environ['TCGENdays']
+ plt.title(TCGENdays)
+####################################################################
+##The plt is saved as png and converted to gif in the bash script.
+
+plt.savefig('TC_genesis.png', dpi=160,bbox_inches='tight')
+
+exit
+ # ----------------------------
+
diff --git a/ush/hurricane/stats/hits_al.py b/ush/hurricane/stats/hits_al.py
new file mode 100755
index 0000000000..7eb5fc0037
--- /dev/null
+++ b/ush/hurricane/stats/hits_al.py
@@ -0,0 +1,109 @@
+##Python script to plot TC genesis/HITS
+
+from __future__ import print_function
+
+#import tkinter as tk
+import os
+import sys
+import glob
+import matplotlib
+matplotlib.use('Agg')
+import matplotlib.pyplot as plt
+import numpy as np
+import pandas as pd
+from ast import literal_eval
+import ast
+import datetime
+import matplotlib.path as mpath
+import matplotlib.ticker as mticker
+
+import cartopy
+import cartopy.crs as ccrs
+from cartopy.feature import GSHHSFeature
+from cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter
+import cartopy.feature as cfeature
+
+cartopyDataDir = os.environ['cartopyDataDir']
+cartopy.config['data_dir'] = cartopyDataDir
+
+# RE -------------------
+#Initialize the empty Plot (only need to call the figure to write the map.)
+fig = plt.figure()
+
+###FOR THE INTERSTED DOMAIN PART###############
+##e.g Alaska, CONUS etc
+## Map proj are different for different areas.
+domain = "atlantic"
+if(domain == "atlantic"):
+ ax = plt.axes(projection=ccrs.Miller(central_longitude=-55.0))
+# ax.set_extent([260, 370, -5, 65], crs=ccrs.PlateCarree())
+ ax.set_extent([260, 370, -5, 50], crs=ccrs.PlateCarree())
+
+##PLS NOTE THAT THE ax_extent is diff from tick marks and that is by design
+ xticks = [-130, -120, -110, -100, -90, -80, -70, -60, -50, -40, -30, -20, -10, 0, 10]
+# yticks = [-5, 0, 10, 20, 30, 40, 50, 60, 65]
+ yticks = [-5, 0, 10, 20, 30, 40, 50]
+ ax.gridlines(xlocs=xticks, ylocs=yticks,color='gray', alpha=0.9, linestyle='--')
+###Add topography, lakes, borders etc
+ ax.coastlines('10m',linewidth=0.30, color='black')
+ ax.add_feature(cfeature.GSHHSFeature('low', levels=[2],
+ facecolor='white'), color='black', linewidth=0.1)
+ land_10m = cfeature.NaturalEarthFeature('physical', 'land', '10m',
+ edgecolor='face',
+ facecolor='None')
+ ax.add_feature(cfeature.LAKES)
+ ax.add_feature(cfeature.BORDERS)
+ ax.add_feature(land_10m)
+
+########This part is a roundabout
+###for the known issues with
+###cartopy LCC/Northpolarstero projection.
+ plt.annotate("0 " , (0,0), (-6,18), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("10N " , (0,0), (-15,52), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("20N " , (0,0), (-15,85), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("30N " , (0,0), (-15,120), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("40N " , (0,0), (-15,157), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+# plt.annotate("50N " , (0,0), (-15,198), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+# plt.annotate("60N " , (0,0), (-15,245), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+
+ plt.annotate("90W " , (0,0), (27,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("80W " , (0,0), (60,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("70W " , (0,0), (92,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("60W " , (0,0), (125,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("50W " , (0,0), (157,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("40W " , (0,0), (189,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("30W " , (0,0), (220,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("20W " , (0,0), (253,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("10W " , (0,0), (286,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("0 " , (0,0), (323,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+
+######################atlantic##################################################
+# hitfile="tc_gen_2021_genmpr_FYOY.txt"
+# hits = np.loadtxt(hitfile,dtype=str)
+ hitfile = os.environ['hitfile']
+ hits = np.loadtxt(hitfile,dtype=str)
+
+ numhits = 0
+ numhits = numhits + len(hits)
+ #Plot each hit event
+ for i in range(len(hits)):
+ lat = float(hits[i,31])
+ lon = float(hits[i,32]) + 360.
+ print(lat)
+ print(lon)
+ plt.scatter(lon, lat,transform=ccrs.PlateCarree(), marker='o', color='green',s=12, facecolor='none')
+
+ plt.scatter(346, 47,transform=ccrs.PlateCarree(), marker='o', color='green',s=12, facecolor='none')
+ plt.annotate("Hits ("+str(numhits)+")", (0,0), (286,185), xycoords='axes fraction', textcoords='offset points', va='top', color='Green', fontsize=6.5)
+
+# plt.title(f"Atlantic TC Genesis Hits")
+ TCGENdays = os.environ['TCGENdays']
+ plt.title(TCGENdays)
+####################################################################
+##The plt is saved as png and converted to gif in the bash script.
+
+plt.savefig('TC_genesis.png', dpi=160,bbox_inches='tight')
+
+exit
+ # ----------------------------
+
diff --git a/ush/hurricane/stats/hits_ep.py b/ush/hurricane/stats/hits_ep.py
new file mode 100755
index 0000000000..a1359b5d6c
--- /dev/null
+++ b/ush/hurricane/stats/hits_ep.py
@@ -0,0 +1,100 @@
+##Python script to plot TC genesis HITS
+
+from __future__ import print_function
+
+#import tkinter as tk
+import os
+import sys
+import glob
+import matplotlib
+matplotlib.use('Agg')
+import matplotlib.pyplot as plt
+import numpy as np
+import pandas as pd
+from ast import literal_eval
+import ast
+import datetime
+import matplotlib.path as mpath
+import matplotlib.ticker as mticker
+
+import cartopy
+import cartopy.crs as ccrs
+from cartopy.feature import GSHHSFeature
+from cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter
+import cartopy.feature as cfeature
+
+cartopyDataDir = os.environ['cartopyDataDir']
+cartopy.config['data_dir'] = cartopyDataDir
+
+# RE -------------------
+#Initialize the empty Plot (only need to call the figure to write the map.)
+fig = plt.figure()
+
+domain = "eastpac"
+if(domain == "eastpac"):
+ ax = plt.axes(projection=ccrs.Miller(central_longitude=-60.0))
+# ax.set_extent([180, 290, 0, 60], crs=ccrs.PlateCarree())
+ ax.set_extent([180, 290, 0, 50], crs=ccrs.PlateCarree())
+##PLS NOTE THAT THE ax_extent is diff from tick marks and that is by design
+ xticks = [-70, -80, -90, -100, -110, -120, -130, -140, -150, -160, -170, -180]
+# yticks = [ 0, 10, 20, 30, 40, 50, 60]
+ yticks = [ 0, 10, 20, 30, 40, 50]
+ ax.gridlines(xlocs=xticks, ylocs=yticks,color='gray', alpha=0.9, linestyle='--')
+###Add topography, lakes, borders etc
+ ax.coastlines('10m',linewidth=0.30, color='black')
+ ax.add_feature(cfeature.GSHHSFeature('low', levels=[2],
+ facecolor='white'), color='black', linewidth=0.1)
+ land_10m = cfeature.NaturalEarthFeature('physical', 'land', '10m',
+ edgecolor='face',
+ facecolor='None')
+ ax.add_feature(cfeature.LAKES)
+ ax.add_feature(cfeature.BORDERS)
+ ax.add_feature(land_10m)
+########This part is a roundabout
+###for the known issues with
+###cartopy LCC/Northpolarstero projection.
+# plt.annotate("50N " , (0,0), (-15,180), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("40N " , (0,0), (-15,140), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("30N " , (0,0), (-15,104), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("20N " , (0,0), (-15,69), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("10N " , (0,0), (-15,35), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+
+ plt.annotate("80W " , (0,0), (318,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("90W " , (0,0), (286,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("100W " , (0,0), (250,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("110W " , (0,0), (218,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("120W " , (0,0), (185,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("130W " , (0,0), (152,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("140W " , (0,0), (119,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("150W " , (0,0), (87,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("160W " , (0,0), (55,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("170W " , (0,0), (23,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+
+###############END OF AREAS##################################
+ hitfile = os.environ['hitfile']
+ hits = np.loadtxt(hitfile,dtype=str)
+
+ numhits = 0
+ numhits = numhits + len(hits)
+ #Plot each hit event
+ for i in range(len(hits)):
+ lat = float(hits[i,31])
+ lon = float(hits[i,32]) + 360.
+ print(lat)
+ print(lon)
+ plt.scatter(lon, lat,transform=ccrs.PlateCarree(), marker='o', color='green',s=12, facecolor='none')
+
+ plt.scatter(185, 47,transform=ccrs.PlateCarree(), marker='o', color='green',s=12, facecolor='none')
+ plt.annotate("Hits ("+str(numhits)+")", (0,0), (20,168), xycoords='axes fraction', textcoords='offset points', va='top', color='Green', fontsize=6.5)
+
+# plt.title(f"East Pacific TC Genesis Hits")
+ TCGENdays = os.environ['TCGENdays']
+ plt.title(TCGENdays)
+####################################################################
+##The plt is saved as png and converted to gif in the bash script.
+
+plt.savefig('TC_genesis.png', dpi=160,bbox_inches='tight')
+
+exit
+ # ----------------------------
+
diff --git a/ush/hurricane/stats/hits_wp.py b/ush/hurricane/stats/hits_wp.py
new file mode 100755
index 0000000000..0f796d3155
--- /dev/null
+++ b/ush/hurricane/stats/hits_wp.py
@@ -0,0 +1,100 @@
+##Python script to plot TC genesis HITS
+
+from __future__ import print_function
+
+#import tkinter as tk
+import os
+import sys
+import glob
+import matplotlib
+matplotlib.use('Agg')
+import matplotlib.pyplot as plt
+import numpy as np
+import pandas as pd
+from ast import literal_eval
+import ast
+import datetime
+import matplotlib.path as mpath
+import matplotlib.ticker as mticker
+
+import cartopy
+import cartopy.crs as ccrs
+from cartopy.feature import GSHHSFeature
+from cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter
+import cartopy.feature as cfeature
+
+cartopyDataDir = os.environ['cartopyDataDir']
+cartopy.config['data_dir'] = cartopyDataDir
+
+# RE -------------------
+#Initialize the empty Plot (only need to call the figure to write the map.)
+fig = plt.figure()
+
+domain = "westpac"
+if(domain == "westpac"):
+ ax = plt.axes(projection=ccrs.Miller(central_longitude=80.0))
+# ax.set_extent([100, 180, 0, 60], crs=ccrs.PlateCarree())
+ ax.set_extent([100, 180, 0, 50], crs=ccrs.PlateCarree())
+
+##PLS NOTE THAT THE ax_extent is diff from tick marks and that is by design
+ xticks = [100, 110, 120, 130, 140, 150, 160, 170, 180]
+# yticks = [0, 10, 20, 30, 40, 50, 60]
+ yticks = [0, 10, 20, 30, 40, 50]
+ ax.gridlines(xlocs=xticks, ylocs=yticks,color='gray', alpha=0.9, linestyle='--')
+###Add topography, lakes, borders etc
+ ax.coastlines('10m',linewidth=0.80, color='black')
+ ax.add_feature(cfeature.GSHHSFeature('low', levels=[2],
+ facecolor='white'), color=cfeature.COLORS['water']
+, linewidth=0.1)
+ land_10m = cfeature.NaturalEarthFeature('physical', 'land', '10m',
+ edgecolor='face',
+ facecolor='None')
+ ax.add_feature(cfeature.LAKES)
+ ax.add_feature(cfeature.BORDERS)
+ ax.add_feature(land_10m)
+
+########This part is a roundabout
+###for the known issues with
+###cartopy LCC/Northpolarstero projection.
+# plt.annotate("50N " , (0,0), (-15,215), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("40N " , (0,0), (-15,191), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("30N " , (0,0), (-15,141), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("20N " , (0,0), (-15,94), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("10N " , (0,0), (-15,47), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+
+ plt.annotate("110E " , (0,0), (36,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("120E " , (0,0), (83,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("130E " , (0,0), (128,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("140E " , (0,0), (171,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("150E " , (0,0), (216,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("160E " , (0,0), (262,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("170E " , (0,0), (305,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+
+######################asia##################################################
+ hitfile = os.environ['hitfile']
+ hits = np.loadtxt(hitfile,dtype=str)
+
+ numhits = 0
+ numhits = numhits + len(hits)
+ #Plot each hit event
+ for i in range(len(hits)):
+ lat = float(hits[i,31])
+ lon = float(hits[i,32]) + 360.
+ print(lat)
+ print(lon)
+ plt.scatter(lon, lat,transform=ccrs.PlateCarree(), marker='o', color='green',s=12, facecolor='none')
+
+ plt.scatter(160, 47,transform=ccrs.PlateCarree(), marker='o', color='green',s=12, facecolor='none')
+ plt.annotate("Hits ("+str(numhits)+")", (0,0), (272,230), xycoords='axes fraction', textcoords='offset points', va='top', color='Green', fontsize=6.5)
+
+# plt.title(f"West Pacific TC Genesis Hits")
+ TCGENdays = os.environ['TCGENdays']
+ plt.title(TCGENdays)
+####################################################################
+##The plt is saved as png and converted to gif in the bash script.
+
+plt.savefig('TC_genesis.png', dpi=160,bbox_inches='tight')
+
+exit
+ # ----------------------------
+
diff --git a/ush/hurricane/stats/plot_tropcyc_lead_average.py b/ush/hurricane/stats/plot_tropcyc_lead_average.py
new file mode 100755
index 0000000000..00cb74d671
--- /dev/null
+++ b/ush/hurricane/stats/plot_tropcyc_lead_average.py
@@ -0,0 +1,535 @@
+from __future__ import (print_function, division)
+import os
+import sys
+import numpy as np
+#import plot_util as plot_util
+import plot_tropcyc_util as plot_util
+import pandas as pd
+import warnings
+import matplotlib
+import math
+matplotlib.use('agg')
+import matplotlib.pyplot as plt
+
+warnings.filterwarnings('ignore')
+
+# Plot Settings
+plt.rcParams['font.weight'] = 'bold'
+plt.rcParams['axes.titleweight'] = 'bold'
+plt.rcParams['axes.titlesize'] = 16
+plt.rcParams['axes.titlepad'] = 15
+plt.rcParams['axes.labelweight'] = 'bold'
+plt.rcParams['axes.labelsize'] = 16
+plt.rcParams['axes.labelpad'] = 10
+plt.rcParams['axes.formatter.useoffset'] = False
+plt.rcParams['xtick.labelsize'] = 16
+plt.rcParams['xtick.major.pad'] = 10
+plt.rcParams['ytick.labelsize'] = 16
+plt.rcParams['ytick.major.pad'] = 10
+plt.rcParams['figure.subplot.left'] = 0.1
+plt.rcParams['figure.subplot.right'] = 0.95
+plt.rcParams['figure.subplot.top'] = 0.85
+plt.rcParams['figure.subplot.bottom'] = 0.15
+plt.rcParams['legend.handletextpad'] = 0.25
+plt.rcParams['legend.handlelength'] = 1.25
+plt.rcParams['legend.borderaxespad'] = 0
+plt.rcParams['legend.columnspacing'] = 1.0
+plt.rcParams['legend.frameon'] = False
+x_figsize, y_figsize = 14, 7
+legend_bbox_x, legend_bbox_y = 0, 1
+legend_fontsize = 13
+legend_loc = 'upper left'
+legend_ncol = 1
+title_loc = 'center'
+model_obs_plot_settings_dict = {
+ 'model1': {'color': '#000000',
+ 'marker': 'o', 'markersize': 7,
+ 'linestyle': 'solid', 'linewidth': 3},
+ 'model2': {'color': '#FB2020',
+ 'marker': 'o', 'markersize': 7,
+ 'linestyle': 'solid', 'linewidth': 1.5},
+ 'model3': {'color': '#1E3CFF',
+ 'marker': 'o', 'markersize': 7,
+ 'linestyle': 'solid', 'linewidth': 1.5},
+ 'model4': {'color': '#E69F00',
+ 'marker': 'o', 'markersize': 7,
+ 'linestyle': 'solid', 'linewidth': 1.5},
+ 'model5': {'color': '#00DC00',
+ 'marker': 'o', 'markersize': 6,
+ 'linestyle': 'solid', 'linewidth': 1.5},
+ 'model6': {'color': '#56B4E9',
+ 'marker': 'o', 'markersize': 6,
+ 'linestyle': 'solid', 'linewidth': 1.5},
+ 'model7': {'color': '#696969',
+ 'marker': 's', 'markersize': 6,
+ 'linestyle': 'solid', 'linewidth': 1.5},
+ 'model8': {'color': '#8400C8',
+ 'marker': 'D', 'markersize': 6,
+ 'linestyle': 'solid', 'linewidth': 1.5},
+ 'model9': {'color': '#D269C1',
+ 'marker': 's', 'markersize': 6,
+ 'linestyle': 'solid', 'linewidth': 1.5},
+ 'model10': {'color': '#F0E492',
+ 'marker': 'o', 'markersize': 6,
+ 'linestyle': 'solid', 'linewidth': 1.5},
+ 'obs': {'color': '#AAAAAA',
+ 'marker': 'None', 'markersize': 0,
+ 'linestyle': 'solid', 'linewidth': 2}
+}
+# modified by Yan Jin
+#model_obs_plot_settings_dict = {
+# 'model1': {'color': '#000000',
+# 'marker': 'o', 'markersize': 0,
+# 'linestyle': 'solid', 'linewidth': 3},
+# 'model2': {'color': '#8400C8',
+# 'marker': 'o', 'markersize': 7,
+# 'linestyle': 'solid', 'linewidth': 1.5},
+# 'model3': {'color': '#00DC00',
+# 'marker': 'o', 'markersize': 7,
+# 'linestyle': 'solid', 'linewidth': 1.5},
+# 'model4': {'color': '#56B4E9',
+# 'marker': 'o', 'markersize': 7,
+# 'linestyle': 'solid', 'linewidth': 1.5},
+# 'model5': {'color': '#E69F00',
+# 'marker': 'o', 'markersize': 6,
+# 'linestyle': 'solid', 'linewidth': 1.5},
+# 'model6': {'color': '#1E3CFF',
+# 'marker': 'o', 'markersize': 6,
+# 'linestyle': 'solid', 'linewidth': 1.5},
+# 'model7': {'color': '#696969',
+# 'marker': 's', 'markersize': 6,
+# 'linestyle': 'solid', 'linewidth': 1.5},
+# 'model8': {'color': '#FB2020',
+# 'marker': 'D', 'markersize': 6,
+# 'linestyle': 'solid', 'linewidth': 1.5},
+# 'model9': {'color': '#D269C1',
+# 'marker': 's', 'markersize': 6,
+# 'linestyle': 'solid', 'linewidth': 1.5},
+# 'model10': {'color': '#F0E492',
+# 'marker': 'o', 'markersize': 6,
+# 'linestyle': 'solid', 'linewidth': 1.5},
+# 'obs': {'color': '#AAAAAA',
+# 'marker': 'None', 'markersize': 0,
+# 'linestyle': 'solid', 'linewidth': 2}
+#}
+noaa_logo_img_array = matplotlib.image.imread(
+ os.path.join(os.environ['LOGOroot'], 'noaa.png')
+)
+noaa_logo_xpixel_loc = x_figsize*plt.rcParams['figure.dpi']*0.1
+noaa_logo_ypixel_loc = y_figsize*plt.rcParams['figure.dpi']*0.865
+noaa_logo_alpha = 0.5
+nws_logo_img_array = matplotlib.image.imread(
+ os.path.join(os.environ['LOGOroot'], 'nws.png')
+)
+nws_logo_xpixel_loc = x_figsize*plt.rcParams['figure.dpi']*0.9
+nws_logo_ypixel_loc = y_figsize*plt.rcParams['figure.dpi']*0.865
+nws_logo_alpha = 0.5
+# modified by Yan Jin
+case_num_label_x_loc, case_num_label_y_loc = -0.05, -0.15
+case_num_tick_y_loc = case_num_label_y_loc + 0.015
+
+# Read in environment variables
+PLOTDATA = os.environ['PLOTDATA']
+RUN = os.environ['RUN']
+fhr_list = os.environ['fhr_list'].split(',')
+fhrs = np.asarray(fhr_list, dtype=int)
+#init_hour_list = os.environ['init_hour_list'].split(',')
+#valid_hour_list = os.environ['valid_hour_list'].split(',')
+#model_atcf_name_list = os.environ['model_atcf_name_list'].split(',')
+model_tmp_atcf_name_list = os.environ['model_tmp_atcf_name_list'].split(',')
+model_plot_name_list = os.environ['model_plot_name_list'].split(',')
+basin = os.environ['basin']
+plot_CI_bars = os.environ['plot_CI_bars']
+model_type = os.environ['tropcyc_model_type']
+print(model_type)
+if 'tc_name' in list(os.environ.keys()):
+ plot_info = os.environ['tc_name']
+ year = plot_info.split('_')[1]
+ name = plot_info.split('_')[2]
+ tc_num = os.environ['tc_num']
+else:
+ plot_info = basin
+tc_stat_file_dir = os.path.join(PLOTDATA,'tc_stat')
+#tc_stat_file_dir = os.path.join(PLOTDATA, RUN, 'metplus_output', 'gather',
+# 'tc_stat', plot_info)
+plotting_out_dir_imgs = os.path.join(PLOTDATA, 'plot', plot_info, 'images')
+#plotting_out_dir_imgs = os.path.join(PLOTDATA, RUN, 'metplus_output', 'plot',
+# plot_info, 'images')
+if not os.path.exists(plotting_out_dir_imgs):
+ os.makedirs(plotting_out_dir_imgs)
+img_quality = os.environ['img_quality']
+
+# Set image quality
+#if img_quality == 'low':
+# plt.rcParams['savefig.dpi'] = 50
+#elif img_quality == 'medium':
+# plt.rcParams['savefig.dpi'] = 75
+
+# Read and plot stats
+print("Working on track and intensity error plots for "+plot_info)
+print("Reading in data")
+summary_tcst_filename = os.path.join(tc_stat_file_dir, 'tc_stat.out')
+if os.path.exists(summary_tcst_filename):
+ nrow = sum(1 for line in open(summary_tcst_filename))
+ if nrow == 3:
+ print("ERROR: "+summary_tcst_filename+" empty")
+ sys.exit(1)
+ else:
+ print(summary_tcst_filename+" exists")
+ summary_tcst_file = open(summary_tcst_filename, 'r')
+ tc_stat_job = summary_tcst_file.readline()
+ summary_tcst_read_columns = summary_tcst_file.readline().split(' ')
+ summary_tcst_file.close()
+ tc_stat_summary_job_columns = []
+ for col in summary_tcst_read_columns:
+ if col != '':
+ tc_stat_summary_job_columns.append(col.rstrip())
+ summary_tcst_data = pd.read_csv(summary_tcst_filename,
+ sep=" ", skiprows=2,
+ skipinitialspace=True,
+ header=None, dtype=str,
+ names=tc_stat_summary_job_columns)
+ summary_tcst_data_groupby_COLUMN = (
+ summary_tcst_data.groupby(['COLUMN'])
+ )
+ for COLUMN_group in summary_tcst_data_groupby_COLUMN.groups.keys():
+ print("Creating plot for "+COLUMN_group)
+ if COLUMN_group == 'AMAX_WIND-BMAX_WIND':
+ formal_stat_name = 'Intensity Bias (knots)'
+ elif COLUMN_group == 'ABS(AMAX_WIND-BMAX_WIND)':
+ formal_stat_name = 'Absolute Intensity Error (knots)'
+ elif COLUMN_group == 'ABS(TK_ERR)':
+ formal_stat_name = 'Absolute Track Error (nm)'
+ elif COLUMN_group == 'ALTK_ERR':
+ formal_stat_name = 'Along Track Bias (nm)'
+ elif COLUMN_group == 'CRTK_ERR':
+ formal_stat_name = 'Cross Track Bias (nm)'
+ else:
+ formal_stat_name = COLUMN_group
+ summary_tcst_data_COLUMN = (
+ summary_tcst_data_groupby_COLUMN.get_group(COLUMN_group)
+ )
+ summary_tcst_data_COLUMN_groupby_AMODEL = (
+ summary_tcst_data_COLUMN.groupby(['AMODEL'])
+ )
+ nmodels = len(
+ summary_tcst_data_COLUMN_groupby_AMODEL.groups.keys()
+ )
+ if nmodels != len(model_tmp_atcf_name_list):
+ print("ERROR: Model(s) missing in "+summary_tcst_filename)
+ continue
+ stat_max = np.ma.masked_invalid(np.nan)
+ fig, ax = plt.subplots(1,1,figsize=(x_figsize, y_figsize))
+ ax.grid(True, color = 'grey', linestyle = '--')
+ ax.axhline(0, color = 'black')
+ ax.set_xlabel('Forecast Hour')
+ ax.xaxis.set_label_coords(0.5, -0.15)
+ if len(fhrs) > 15:
+ ax.set_xticks(fhrs[::2])
+ ax.set_xticks(fhrs, minor=True)
+ else:
+ ax.set_xticks(fhrs)
+ ax.set_xlim([fhrs[0], fhrs[-1]])
+ ax.set_ylabel(formal_stat_name)
+ model_num = 0
+ CI_bar_max_widths = np.append(np.diff(fhrs),
+ fhrs[-1]-fhrs[-2])/1.5
+ CI_bar_min_widths = np.append(np.diff(fhrs),
+ fhrs[-1]-fhrs[-2])/nmodels
+ CI_bar_intvl_widths = (
+ (CI_bar_max_widths-CI_bar_min_widths)/nmodels
+ )
+ tcstat_file_AMODEL_list = (
+ summary_tcst_data_COLUMN_groupby_AMODEL.groups.keys()
+ )
+ for AMODEL in model_tmp_atcf_name_list:
+ AMODEL_idx = model_tmp_atcf_name_list.index(AMODEL)
+ #AMODEL_plot_name = (model_plot_name_list[AMODEL_idx]+' '
+ # +'('+model_atcf_name_list[AMODEL_idx]+')')
+ # modified by Yan Jin
+ AMODEL_plot_name = model_plot_name_list[AMODEL_idx]
+ print("Plotting "+AMODEL_plot_name)
+ model_num+=1
+ model_plot_settings_dict = (
+ model_obs_plot_settings_dict['model'+str(model_num)]
+ )
+ fhrs_column_amodel_mean = np.full_like(fhrs, np.nan,
+ dtype=float)
+ fhrs_column_amodel_total = np.full_like(fhrs, np.nan,
+ dtype=float)
+ fhrs_column_amodel_mean_ncl = np.full_like(fhrs, np.nan,
+ dtype=float)
+ fhrs_column_amodel_mean_ncu = np.full_like(fhrs, np.nan,
+ dtype=float)
+ if AMODEL not in tcstat_file_AMODEL_list:
+ print("Data for "+AMODEL+" missing...setting to NaN")
+ else:
+ summary_tcst_data_COLUMN_AMODEL = (
+ summary_tcst_data_COLUMN_groupby_AMODEL. \
+ get_group(AMODEL)
+ )
+ summary_tcst_data_COLUMN_AMODEL_LEAD = (
+ summary_tcst_data_COLUMN_AMODEL['LEAD'].values
+ )
+ summary_tcst_data_COLUMN_AMODEL_MEAN = np.asarray(
+ summary_tcst_data_COLUMN_AMODEL['MEAN'].values,
+ dtype=float
+ )
+ summary_tcst_data_COLUMN_AMODEL_TOTAL = np.asarray(
+ summary_tcst_data_COLUMN_AMODEL['TOTAL'].values,
+ dtype=float
+ )
+ summary_tcst_data_COLUMN_AMODEL_MEAN_NCL = np.asarray(
+ summary_tcst_data_COLUMN_AMODEL['MEAN_NCL'].values,
+ dtype=float
+ )
+ summary_tcst_data_COLUMN_AMODEL_MEAN_NCU = np.asarray(
+ summary_tcst_data_COLUMN_AMODEL['MEAN_NCU'].values,
+ dtype=float
+ )
+ summary_tcst_data_COLUMN_AMODEL_STDEV = np.asarray(
+ summary_tcst_data_COLUMN_AMODEL['STDEV'].values,
+ dtype=float
+ )
+ leads_list = []
+ for lead in summary_tcst_data_COLUMN_AMODEL_LEAD:
+ if lead[0] != '0':
+ leads_list.append(lead[0:3])
+ else:
+ leads_list.append(lead[1:3])
+ leads = np.asarray(leads_list, dtype=int)
+ for fhr in fhrs:
+ fhr_idx = np.where(fhr == fhrs)[0][0]
+ if fhr in leads:
+ matching_lead_idx = np.where(fhr == leads)[0][0]
+ fhrs_column_amodel_mean[fhr_idx] = (
+ summary_tcst_data_COLUMN_AMODEL_MEAN[
+ matching_lead_idx
+ ]
+ )
+ fhrs_column_amodel_total[fhr_idx] = (
+ summary_tcst_data_COLUMN_AMODEL_TOTAL[
+ matching_lead_idx
+ ]
+ )
+ fhrs_column_amodel_mean_ncl[fhr_idx] = (
+ summary_tcst_data_COLUMN_AMODEL_MEAN_NCL[
+ matching_lead_idx
+ ]
+ )
+ fhrs_column_amodel_mean_ncu[fhr_idx] = (
+ summary_tcst_data_COLUMN_AMODEL_MEAN_NCU[
+ matching_lead_idx
+ ]
+ )
+ fhrs_column_amodel_mean = np.ma.masked_invalid(
+ fhrs_column_amodel_mean
+ )
+ fhrs_column_amodel_total = np.ma.masked_invalid(
+ fhrs_column_amodel_total
+ )
+ fhrs_column_amodel_mean_ncl = np.ma.masked_invalid(
+ fhrs_column_amodel_mean_ncl
+ )
+ fhrs_column_amodel_mean_ncu = np.ma.masked_invalid(
+ fhrs_column_amodel_mean_ncu
+ )
+ if model_num == 1:
+ all_amodel_total = [fhrs_column_amodel_total]
+ else:
+ all_amodel_total = np.vstack(
+ (all_amodel_total, fhrs_column_amodel_total)
+ )
+ all_amodel_total = np.ma.masked_invalid(all_amodel_total)
+ count = (
+ len(fhrs_column_amodel_mean)
+ - np.ma.count_masked(fhrs_column_amodel_mean)
+ )
+ mfhrs = np.ma.array(
+ fhrs, mask=np.ma.getmaskarray(fhrs_column_amodel_mean)
+ )
+ if count != 0:
+ ax.plot(mfhrs.compressed(),
+ fhrs_column_amodel_mean.compressed(),
+ color = model_plot_settings_dict['color'],
+ linestyle = model_plot_settings_dict['linestyle'],
+ linewidth = model_plot_settings_dict['linewidth'],
+ marker = model_plot_settings_dict['marker'],
+ markersize = model_plot_settings_dict['markersize'],
+ label=AMODEL_plot_name,
+ zorder=(nmodels-model_num-1)+4)
+ if fhrs_column_amodel_mean.max() > stat_max \
+ or np.ma.is_masked(stat_max):
+ stat_max = fhrs_column_amodel_mean.max()
+ if plot_CI_bars == 'YES':
+ for fhr in fhrs:
+ fhr_idx = np.where(fhr == fhrs)[0][0]
+ ax.bar(fhrs[fhr_idx],
+ (fhrs_column_amodel_mean_ncu[fhr_idx]
+ - fhrs_column_amodel_mean_ncl[fhr_idx]),
+ bottom=fhrs_column_amodel_mean_ncl[fhr_idx],
+ color='None',
+ width=CI_bar_max_widths-(CI_bar_intvl_widths
+ *(model_num-1)),
+ edgecolor= model_plot_settings_dict['color'],
+ linewidth=0.5)
+ if fhrs_column_amodel_mean_ncu[fhr_idx] > stat_max \
+ or np.ma.is_masked(stat_max):
+ if not np.ma.is_masked(fhrs_column_amodel_mean_ncu[fhr_idx]):
+ stat_max = fhrs_column_amodel_mean_ncu[fhr_idx]
+ # Adjust y axis limits and ticks
+ preset_y_axis_tick_min = ax.get_yticks()[0]
+ preset_y_axis_tick_max = ax.get_yticks()[-1]
+ preset_y_axis_tick_inc = ax.get_yticks()[1] - ax.get_yticks()[0]
+
+ # modified by Yan Jin
+ #y_axis_min = math.floor(preset_y_axis_tick_min - 10)
+ #y_axis_max = math.ceil(preset_y_axis_tick_max + 10)
+ if plot_CI_bars == 'YES':
+ y_axis_min = math.ceil(preset_y_axis_tick_max) * (-1.0) - 50.0
+ y_axis_max = math.ceil(preset_y_axis_tick_max) + 50.0
+ else:
+ y_axis_min = math.floor(preset_y_axis_tick_min)
+ y_axis_max = math.ceil(preset_y_axis_tick_max)
+
+ #y_axis_min = -max(abs(math.floor(preset_y_axis_tick_min)), abs(math.ceil(preset_y_axis_tick_max)))
+
+ y_axis_tick_inc = preset_y_axis_tick_inc
+ if np.ma.is_masked(stat_max):
+ y_axis_max = preset_y_axis_tick_max
+ else:
+ y_axis_max = preset_y_axis_tick_max
+ while y_axis_max < stat_max:
+ y_axis_max = y_axis_max + y_axis_tick_inc
+
+ if COLUMN_group == 'ABS(TK_ERR)' or COLUMN_group == 'ABS(AMAX_WIND-BMAX_WIND)':
+ y_axis_min = 0
+ #y_axis_max = y_axis_max + 50.0
+ ax.set_yticks(
+ np.arange(y_axis_min,
+ y_axis_max+y_axis_tick_inc,
+ y_axis_tick_inc)
+ )
+ ax.set_ylim([y_axis_min, y_axis_max])
+ # Check y axis limit
+ if stat_max >= ax.get_ylim()[1]:
+ while stat_max >= ax.get_ylim()[1]:
+ y_axis_max = y_axis_max + y_axis_tick_inc
+ y_axis_min = y_axis_max * (-1.0) - 50.0
+ if COLUMN_group == 'ABS(TK_ERR)' or COLUMN_group == 'ABS(AMAX_WIND-BMAX_WIND)':
+ y_axis_min = 0
+ y_axis_max = y_axis_max + 50.0
+ ax.set_yticks(
+ np.arange(y_axis_min,
+ y_axis_max + y_axis_tick_inc,
+ y_axis_tick_inc)
+ )
+ ax.set_ylim([y_axis_min, y_axis_max])
+ # Add legend, adjust if points in legend
+ if len(ax.lines) != 0:
+ legend = ax.legend(bbox_to_anchor=(legend_bbox_x,
+ legend_bbox_y),
+ loc=legend_loc, ncol=legend_ncol,
+ fontsize=legend_fontsize)
+ plt.draw()
+ legend_box = legend.get_window_extent() \
+ .inverse_transformed(ax.transData)
+ if stat_max > legend_box.y1:
+ while stat_max > legend_box.y1:
+ y_axis_max = y_axis_max + y_axis_tick_inc
+ ax.set_yticks(
+ np.arange(y_axis_min,
+ y_axis_max + y_axis_tick_inc,
+ y_axis_tick_inc)
+ )
+ ax.set_ylim([y_axis_min, y_axis_max])
+ legend = ax.legend(
+ bbox_to_anchor=(legend_bbox_x, legend_bbox_y),
+ loc=legend_loc, ncol=legend_ncol,
+ fontsize=legend_fontsize
+ )
+ plt.draw()
+ legend_box = (
+ legend.get_window_extent() \
+ .inverse_transformed(ax.transData)
+ )
+ # Add number of cases
+ x_axis_ticks_fraction = np.linspace(0, 1,len(fhrs), endpoint=True)
+ ax.annotate('# of\nCases',
+ xy=(case_num_label_x_loc, case_num_label_y_loc),
+ xycoords='axes fraction')
+ if len(fhrs) > 15:
+ fhrs_ncase_to_plot = fhrs[::2]
+ else:
+ fhrs_ncase_to_plot = fhrs
+ for fhr in fhrs_ncase_to_plot:
+ fhr_idx = np.where(fhr == fhrs)[0][0]
+ if not np.ma.is_masked(all_amodel_total[:,fhr_idx]):
+ if np.all(all_amodel_total[:,fhr_idx]
+ == all_amodel_total[0,fhr_idx]):
+ num_cases = all_amodel_total[0,fhr_idx]
+ num_cases_str = str(int(num_cases))
+ ax.annotate(num_cases_str,
+ xy=(x_axis_ticks_fraction[fhr_idx],
+ case_num_tick_y_loc), size=12,
+ xycoords='axes fraction', ha='center')
+ else:
+ print("Working with nonhomogeneous sample for fhr "
+ +str(fhr)+"...not printing number of cases")
+ props = {
+ 'boxstyle': 'square',
+ 'pad': 0.35,
+ 'facecolor': 'white',
+ 'linestyle': 'solid',
+ 'linewidth': 1,
+ 'edgecolor': 'black'
+ }
+ x_axis_tick_inc = fhrs[1] - fhrs[0]
+ # modified by Yan Jin
+ #if len(ax.lines) != 0:
+ # ax.text(legend_box.x1 + (x_axis_tick_inc * 0.75),
+ # ax.get_ylim()[1] - (0.15 * y_axis_tick_inc),
+ # 'Note: statistical significance at the 95% '
+ # +'confidence level where confidence intervals '
+ # +'do not intersect',
+ # ha='left', va='top', fontsize=10,
+ # bbox=props, transform=ax.transData)
+ # Build formal plot title
+ full_title = formal_stat_name+'\n'
+ #full_title = ""
+ if basin == 'AL':
+ formal_basin = 'Atlantic'
+ elif basin == 'CP':
+ formal_basin = 'Central Pacific'
+ elif basin == 'EP':
+ formal_basin = 'Eastern Pacific'
+ elif basin == 'WP':
+ formal_basin = 'Western Pacific'
+ if len(plot_info) == 2:
+ full_title = full_title+formal_basin+' Mean\n'
+ else:
+ full_title = full_title + name.title().upper() + ' ' + '(' + basin + str(tc_num) + ' ' + year + ')'
+ #full_title = (full_title+'Cycles: '+', '.join(init_hour_list)+', '
+ # +' Valid Hours: '+', '.join(valid_hour_list))
+ ax.set_title(full_title)
+ noaa_img = fig.figimage(noaa_logo_img_array,
+ noaa_logo_xpixel_loc, noaa_logo_ypixel_loc,
+ zorder=1, alpha=noaa_logo_alpha)
+ nws_img = fig.figimage(nws_logo_img_array,
+ nws_logo_xpixel_loc, nws_logo_ypixel_loc,
+ zorder=1, alpha=nws_logo_alpha)
+ #if img_quality in ['low', 'medium']:
+ # noaa_img.set_visible(False)
+ # nws_img.set_visible(False)
+ # Build savefig name
+ savefig_name = os.path.join(
+ plotting_out_dir_imgs,
+ COLUMN_group.replace('(', '').replace(')', '')
+ +'_fhrmean_'+plot_info+'_'+model_type+'.png'
+ )
+ print("Saving image as "+savefig_name)
+ plt.savefig(savefig_name)
+ plt.close()
+else:
+ print("ERROR: "+summary_tcst_filename+" does not exist")
+ sys.exit(1)
diff --git a/ush/hurricane/stats/plot_tropcyc_lead_regional.py b/ush/hurricane/stats/plot_tropcyc_lead_regional.py
new file mode 100755
index 0000000000..21718fbf46
--- /dev/null
+++ b/ush/hurricane/stats/plot_tropcyc_lead_regional.py
@@ -0,0 +1,535 @@
+from __future__ import (print_function, division)
+import os
+import sys
+import numpy as np
+#import plot_util as plot_util
+import plot_tropcyc_util as plot_util
+import pandas as pd
+import warnings
+import matplotlib
+import math
+matplotlib.use('agg')
+import matplotlib.pyplot as plt
+
+warnings.filterwarnings('ignore')
+
+# Plot Settings
+plt.rcParams['font.weight'] = 'bold'
+plt.rcParams['axes.titleweight'] = 'bold'
+plt.rcParams['axes.titlesize'] = 16
+plt.rcParams['axes.titlepad'] = 15
+plt.rcParams['axes.labelweight'] = 'bold'
+plt.rcParams['axes.labelsize'] = 16
+plt.rcParams['axes.labelpad'] = 10
+plt.rcParams['axes.formatter.useoffset'] = False
+plt.rcParams['xtick.labelsize'] = 16
+plt.rcParams['xtick.major.pad'] = 10
+plt.rcParams['ytick.labelsize'] = 16
+plt.rcParams['ytick.major.pad'] = 10
+plt.rcParams['figure.subplot.left'] = 0.1
+plt.rcParams['figure.subplot.right'] = 0.95
+plt.rcParams['figure.subplot.top'] = 0.85
+plt.rcParams['figure.subplot.bottom'] = 0.15
+plt.rcParams['legend.handletextpad'] = 0.25
+plt.rcParams['legend.handlelength'] = 1.25
+plt.rcParams['legend.borderaxespad'] = 0
+plt.rcParams['legend.columnspacing'] = 1.0
+plt.rcParams['legend.frameon'] = False
+x_figsize, y_figsize = 14, 7
+legend_bbox_x, legend_bbox_y = 0, 1
+legend_fontsize = 13
+legend_loc = 'upper left'
+legend_ncol = 1
+title_loc = 'center'
+model_obs_plot_settings_dict = {
+ 'model1': {'color': '#000000',
+ 'marker': 'o', 'markersize': 7,
+ 'linestyle': 'solid', 'linewidth': 3},
+ 'model2': {'color': '#8400C8',
+ 'marker': 'o', 'markersize': 7,
+ 'linestyle': 'solid', 'linewidth': 1.5},
+ 'model3': {'color': '#00DC00',
+ 'marker': 'o', 'markersize': 7,
+ 'linestyle': 'solid', 'linewidth': 1.5},
+ 'model4': {'color': '#56B4E9',
+ 'marker': 'o', 'markersize': 7,
+ 'linestyle': 'solid', 'linewidth': 1.5},
+ 'model5': {'color': '#00DC00',
+ 'marker': 'o', 'markersize': 6,
+ 'linestyle': 'solid', 'linewidth': 1.5},
+ 'model6': {'color': '#56B4E9',
+ 'marker': 'o', 'markersize': 6,
+ 'linestyle': 'solid', 'linewidth': 1.5},
+ 'model7': {'color': '#696969',
+ 'marker': 's', 'markersize': 6,
+ 'linestyle': 'solid', 'linewidth': 1.5},
+ 'model8': {'color': '#8400C8',
+ 'marker': 'D', 'markersize': 6,
+ 'linestyle': 'solid', 'linewidth': 1.5},
+ 'model9': {'color': '#D269C1',
+ 'marker': 's', 'markersize': 6,
+ 'linestyle': 'solid', 'linewidth': 1.5},
+ 'model10': {'color': '#F0E492',
+ 'marker': 'o', 'markersize': 6,
+ 'linestyle': 'solid', 'linewidth': 1.5},
+ 'obs': {'color': '#AAAAAA',
+ 'marker': 'None', 'markersize': 0,
+ 'linestyle': 'solid', 'linewidth': 2}
+}
+# modified by Yan Jin
+#model_obs_plot_settings_dict = {
+# 'model1': {'color': '#000000',
+# 'marker': 'o', 'markersize': 0,
+# 'linestyle': 'solid', 'linewidth': 3},
+# 'model2': {'color': '#8400C8',
+# 'marker': 'o', 'markersize': 7,
+# 'linestyle': 'solid', 'linewidth': 1.5},
+# 'model3': {'color': '#00DC00',
+# 'marker': 'o', 'markersize': 7,
+# 'linestyle': 'solid', 'linewidth': 1.5},
+# 'model4': {'color': '#56B4E9',
+# 'marker': 'o', 'markersize': 7,
+# 'linestyle': 'solid', 'linewidth': 1.5},
+# 'model5': {'color': '#E69F00',
+# 'marker': 'o', 'markersize': 6,
+# 'linestyle': 'solid', 'linewidth': 1.5},
+# 'model6': {'color': '#1E3CFF',
+# 'marker': 'o', 'markersize': 6,
+# 'linestyle': 'solid', 'linewidth': 1.5},
+# 'model7': {'color': '#696969',
+# 'marker': 's', 'markersize': 6,
+# 'linestyle': 'solid', 'linewidth': 1.5},
+# 'model8': {'color': '#FB2020',
+# 'marker': 'D', 'markersize': 6,
+# 'linestyle': 'solid', 'linewidth': 1.5},
+# 'model9': {'color': '#D269C1',
+# 'marker': 's', 'markersize': 6,
+# 'linestyle': 'solid', 'linewidth': 1.5},
+# 'model10': {'color': '#F0E492',
+# 'marker': 'o', 'markersize': 6,
+# 'linestyle': 'solid', 'linewidth': 1.5},
+# 'obs': {'color': '#AAAAAA',
+# 'marker': 'None', 'markersize': 0,
+# 'linestyle': 'solid', 'linewidth': 2}
+#}
+noaa_logo_img_array = matplotlib.image.imread(
+ os.path.join(os.environ['LOGOroot'], 'noaa.png')
+)
+noaa_logo_xpixel_loc = x_figsize*plt.rcParams['figure.dpi']*0.1
+noaa_logo_ypixel_loc = y_figsize*plt.rcParams['figure.dpi']*0.865
+noaa_logo_alpha = 0.5
+nws_logo_img_array = matplotlib.image.imread(
+ os.path.join(os.environ['LOGOroot'], 'nws.png')
+)
+nws_logo_xpixel_loc = x_figsize*plt.rcParams['figure.dpi']*0.9
+nws_logo_ypixel_loc = y_figsize*plt.rcParams['figure.dpi']*0.865
+nws_logo_alpha = 0.5
+# modified by Yan Jin
+case_num_label_x_loc, case_num_label_y_loc = -0.05, -0.15
+case_num_tick_y_loc = case_num_label_y_loc + 0.015
+
+# Read in environment variables
+PLOTDATA = os.environ['PLOTDATA']
+RUN = os.environ['RUN']
+fhr_list = os.environ['fhr_list'].split(',')
+fhrs = np.asarray(fhr_list, dtype=int)
+#init_hour_list = os.environ['init_hour_list'].split(',')
+#valid_hour_list = os.environ['valid_hour_list'].split(',')
+#model_atcf_name_list = os.environ['model_atcf_name_list'].split(',')
+model_tmp_atcf_name_list = os.environ['model_tmp_atcf_name_list'].split(',')
+model_plot_name_list = os.environ['model_plot_name_list'].split(',')
+basin = os.environ['basin']
+plot_CI_bars = os.environ['plot_CI_bars']
+model_type = os.environ['tropcyc_model_type']
+print(model_type)
+if 'tc_name' in list(os.environ.keys()):
+ plot_info = os.environ['tc_name']
+ year = plot_info.split('_')[1]
+ name = plot_info.split('_')[2]
+ tc_num = os.environ['tc_num']
+else:
+ plot_info = basin
+tc_stat_file_dir = os.path.join(PLOTDATA,'tc_stat')
+#tc_stat_file_dir = os.path.join(PLOTDATA, RUN, 'metplus_output', 'gather',
+# 'tc_stat', plot_info)
+plotting_out_dir_imgs = os.path.join(PLOTDATA, 'plot', plot_info, 'images')
+#plotting_out_dir_imgs = os.path.join(PLOTDATA, RUN, 'metplus_output', 'plot',
+# plot_info, 'images')
+if not os.path.exists(plotting_out_dir_imgs):
+ os.makedirs(plotting_out_dir_imgs)
+img_quality = os.environ['img_quality']
+
+# Set image quality
+#if img_quality == 'low':
+# plt.rcParams['savefig.dpi'] = 50
+#elif img_quality == 'medium':
+# plt.rcParams['savefig.dpi'] = 75
+
+# Read and plot stats
+print("Working on track and intensity error plots for "+plot_info)
+print("Reading in data")
+summary_tcst_filename = os.path.join(tc_stat_file_dir, 'tc_stat.out')
+if os.path.exists(summary_tcst_filename):
+ nrow = sum(1 for line in open(summary_tcst_filename))
+ if nrow == 3:
+ print("ERROR: "+summary_tcst_filename+" empty")
+ sys.exit(1)
+ else:
+ print(summary_tcst_filename+" exists")
+ summary_tcst_file = open(summary_tcst_filename, 'r')
+ tc_stat_job = summary_tcst_file.readline()
+ summary_tcst_read_columns = summary_tcst_file.readline().split(' ')
+ summary_tcst_file.close()
+ tc_stat_summary_job_columns = []
+ for col in summary_tcst_read_columns:
+ if col != '':
+ tc_stat_summary_job_columns.append(col.rstrip())
+ summary_tcst_data = pd.read_csv(summary_tcst_filename,
+ sep=" ", skiprows=2,
+ skipinitialspace=True,
+ header=None, dtype=str,
+ names=tc_stat_summary_job_columns)
+ summary_tcst_data_groupby_COLUMN = (
+ summary_tcst_data.groupby(['COLUMN'])
+ )
+ for COLUMN_group in summary_tcst_data_groupby_COLUMN.groups.keys():
+ print("Creating plot for "+COLUMN_group)
+ if COLUMN_group == 'AMAX_WIND-BMAX_WIND':
+ formal_stat_name = 'Intensity Bias (knots)'
+ elif COLUMN_group == 'ABS(AMAX_WIND-BMAX_WIND)':
+ formal_stat_name = 'Absolute Intensity Error (knots)'
+ elif COLUMN_group == 'ABS(TK_ERR)':
+ formal_stat_name = 'Absolute Track Error (nm)'
+ elif COLUMN_group == 'ALTK_ERR':
+ formal_stat_name = 'Along Track Bias (nm)'
+ elif COLUMN_group == 'CRTK_ERR':
+ formal_stat_name = 'Cross Track Bias (nm)'
+ else:
+ formal_stat_name = COLUMN_group
+ summary_tcst_data_COLUMN = (
+ summary_tcst_data_groupby_COLUMN.get_group(COLUMN_group)
+ )
+ summary_tcst_data_COLUMN_groupby_AMODEL = (
+ summary_tcst_data_COLUMN.groupby(['AMODEL'])
+ )
+ nmodels = len(
+ summary_tcst_data_COLUMN_groupby_AMODEL.groups.keys()
+ )
+ if nmodels != len(model_tmp_atcf_name_list):
+ print("ERROR: Model(s) missing in "+summary_tcst_filename)
+ continue
+ stat_max = np.ma.masked_invalid(np.nan)
+ fig, ax = plt.subplots(1,1,figsize=(x_figsize, y_figsize))
+ ax.grid(True, color = 'grey', linestyle = '--')
+ ax.axhline(0, color = 'black')
+ ax.set_xlabel('Forecast Hour')
+ ax.xaxis.set_label_coords(0.5, -0.15)
+ if len(fhrs) > 15:
+ ax.set_xticks(fhrs[::2])
+ ax.set_xticks(fhrs, minor=True)
+ else:
+ ax.set_xticks(fhrs)
+ ax.set_xlim([fhrs[0], fhrs[-1]])
+ ax.set_ylabel(formal_stat_name)
+ model_num = 0
+ CI_bar_max_widths = np.append(np.diff(fhrs),
+ fhrs[-1]-fhrs[-2])/1.5
+ CI_bar_min_widths = np.append(np.diff(fhrs),
+ fhrs[-1]-fhrs[-2])/nmodels
+ CI_bar_intvl_widths = (
+ (CI_bar_max_widths-CI_bar_min_widths)/nmodels
+ )
+ tcstat_file_AMODEL_list = (
+ summary_tcst_data_COLUMN_groupby_AMODEL.groups.keys()
+ )
+ for AMODEL in model_tmp_atcf_name_list:
+ AMODEL_idx = model_tmp_atcf_name_list.index(AMODEL)
+ #AMODEL_plot_name = (model_plot_name_list[AMODEL_idx]+' '
+ # +'('+model_atcf_name_list[AMODEL_idx]+')')
+ # modified by Yan Jin
+ AMODEL_plot_name = model_plot_name_list[AMODEL_idx]
+ print("Plotting "+AMODEL_plot_name)
+ model_num+=1
+ model_plot_settings_dict = (
+ model_obs_plot_settings_dict['model'+str(model_num)]
+ )
+ fhrs_column_amodel_mean = np.full_like(fhrs, np.nan,
+ dtype=float)
+ fhrs_column_amodel_total = np.full_like(fhrs, np.nan,
+ dtype=float)
+ fhrs_column_amodel_mean_ncl = np.full_like(fhrs, np.nan,
+ dtype=float)
+ fhrs_column_amodel_mean_ncu = np.full_like(fhrs, np.nan,
+ dtype=float)
+ if AMODEL not in tcstat_file_AMODEL_list:
+ print("Data for "+AMODEL+" missing...setting to NaN")
+ else:
+ summary_tcst_data_COLUMN_AMODEL = (
+ summary_tcst_data_COLUMN_groupby_AMODEL. \
+ get_group(AMODEL)
+ )
+ summary_tcst_data_COLUMN_AMODEL_LEAD = (
+ summary_tcst_data_COLUMN_AMODEL['LEAD'].values
+ )
+ summary_tcst_data_COLUMN_AMODEL_MEAN = np.asarray(
+ summary_tcst_data_COLUMN_AMODEL['MEAN'].values,
+ dtype=float
+ )
+ summary_tcst_data_COLUMN_AMODEL_TOTAL = np.asarray(
+ summary_tcst_data_COLUMN_AMODEL['TOTAL'].values,
+ dtype=float
+ )
+ summary_tcst_data_COLUMN_AMODEL_MEAN_NCL = np.asarray(
+ summary_tcst_data_COLUMN_AMODEL['MEAN_NCL'].values,
+ dtype=float
+ )
+ summary_tcst_data_COLUMN_AMODEL_MEAN_NCU = np.asarray(
+ summary_tcst_data_COLUMN_AMODEL['MEAN_NCU'].values,
+ dtype=float
+ )
+ summary_tcst_data_COLUMN_AMODEL_STDEV = np.asarray(
+ summary_tcst_data_COLUMN_AMODEL['STDEV'].values,
+ dtype=float
+ )
+ leads_list = []
+ for lead in summary_tcst_data_COLUMN_AMODEL_LEAD:
+ if lead[0] != '0':
+ leads_list.append(lead[0:3])
+ else:
+ leads_list.append(lead[1:3])
+ leads = np.asarray(leads_list, dtype=int)
+ for fhr in fhrs:
+ fhr_idx = np.where(fhr == fhrs)[0][0]
+ if fhr in leads:
+ matching_lead_idx = np.where(fhr == leads)[0][0]
+ fhrs_column_amodel_mean[fhr_idx] = (
+ summary_tcst_data_COLUMN_AMODEL_MEAN[
+ matching_lead_idx
+ ]
+ )
+ fhrs_column_amodel_total[fhr_idx] = (
+ summary_tcst_data_COLUMN_AMODEL_TOTAL[
+ matching_lead_idx
+ ]
+ )
+ fhrs_column_amodel_mean_ncl[fhr_idx] = (
+ summary_tcst_data_COLUMN_AMODEL_MEAN_NCL[
+ matching_lead_idx
+ ]
+ )
+ fhrs_column_amodel_mean_ncu[fhr_idx] = (
+ summary_tcst_data_COLUMN_AMODEL_MEAN_NCU[
+ matching_lead_idx
+ ]
+ )
+ fhrs_column_amodel_mean = np.ma.masked_invalid(
+ fhrs_column_amodel_mean
+ )
+ fhrs_column_amodel_total = np.ma.masked_invalid(
+ fhrs_column_amodel_total
+ )
+ fhrs_column_amodel_mean_ncl = np.ma.masked_invalid(
+ fhrs_column_amodel_mean_ncl
+ )
+ fhrs_column_amodel_mean_ncu = np.ma.masked_invalid(
+ fhrs_column_amodel_mean_ncu
+ )
+ if model_num == 1:
+ all_amodel_total = [fhrs_column_amodel_total]
+ else:
+ all_amodel_total = np.vstack(
+ (all_amodel_total, fhrs_column_amodel_total)
+ )
+ all_amodel_total = np.ma.masked_invalid(all_amodel_total)
+ count = (
+ len(fhrs_column_amodel_mean)
+ - np.ma.count_masked(fhrs_column_amodel_mean)
+ )
+ mfhrs = np.ma.array(
+ fhrs, mask=np.ma.getmaskarray(fhrs_column_amodel_mean)
+ )
+ if count != 0:
+ ax.plot(mfhrs.compressed(),
+ fhrs_column_amodel_mean.compressed(),
+ color = model_plot_settings_dict['color'],
+ linestyle = model_plot_settings_dict['linestyle'],
+ linewidth = model_plot_settings_dict['linewidth'],
+ marker = model_plot_settings_dict['marker'],
+ markersize = model_plot_settings_dict['markersize'],
+ label=AMODEL_plot_name,
+ zorder=(nmodels-model_num-1)+4)
+ if fhrs_column_amodel_mean.max() > stat_max \
+ or np.ma.is_masked(stat_max):
+ stat_max = fhrs_column_amodel_mean.max()
+ if plot_CI_bars == 'YES':
+ for fhr in fhrs:
+ fhr_idx = np.where(fhr == fhrs)[0][0]
+ ax.bar(fhrs[fhr_idx],
+ (fhrs_column_amodel_mean_ncu[fhr_idx]
+ - fhrs_column_amodel_mean_ncl[fhr_idx]),
+ bottom=fhrs_column_amodel_mean_ncl[fhr_idx],
+ color='None',
+ width=CI_bar_max_widths-(CI_bar_intvl_widths
+ *(model_num-1)),
+ edgecolor= model_plot_settings_dict['color'],
+ linewidth=0.5)
+ if fhrs_column_amodel_mean_ncu[fhr_idx] > stat_max \
+ or np.ma.is_masked(stat_max):
+ if not np.ma.is_masked(fhrs_column_amodel_mean_ncu[fhr_idx]):
+ stat_max = fhrs_column_amodel_mean_ncu[fhr_idx]
+ # Adjust y axis limits and ticks
+ preset_y_axis_tick_min = ax.get_yticks()[0]
+ preset_y_axis_tick_max = ax.get_yticks()[-1]
+ preset_y_axis_tick_inc = ax.get_yticks()[1] - ax.get_yticks()[0]
+
+ # modified by Yan Jin
+ #y_axis_min = math.floor(preset_y_axis_tick_min - 10)
+ #y_axis_max = math.ceil(preset_y_axis_tick_max + 10)
+ if plot_CI_bars == 'YES':
+ y_axis_min = math.ceil(preset_y_axis_tick_max) * (-1.0) - 50.0
+ y_axis_max = math.ceil(preset_y_axis_tick_max) + 50.0
+ else:
+ y_axis_min = math.floor(preset_y_axis_tick_min)
+ y_axis_max = math.ceil(preset_y_axis_tick_max)
+
+ #y_axis_min = -max(abs(math.floor(preset_y_axis_tick_min)), abs(math.ceil(preset_y_axis_tick_max)))
+
+ y_axis_tick_inc = preset_y_axis_tick_inc
+ if np.ma.is_masked(stat_max):
+ y_axis_max = preset_y_axis_tick_max
+ else:
+ y_axis_max = preset_y_axis_tick_max
+ while y_axis_max < stat_max:
+ y_axis_max = y_axis_max + y_axis_tick_inc
+
+ if COLUMN_group == 'ABS(TK_ERR)' or COLUMN_group == 'ABS(AMAX_WIND-BMAX_WIND)':
+ y_axis_min = 0
+ #y_axis_max = y_axis_max + 50.0
+ ax.set_yticks(
+ np.arange(y_axis_min,
+ y_axis_max+y_axis_tick_inc,
+ y_axis_tick_inc)
+ )
+ ax.set_ylim([y_axis_min, y_axis_max])
+ # Check y axis limit
+ if stat_max >= ax.get_ylim()[1]:
+ while stat_max >= ax.get_ylim()[1]:
+ y_axis_max = y_axis_max + y_axis_tick_inc
+ y_axis_min = y_axis_max * (-1.0) - 50.0
+ if COLUMN_group == 'ABS(TK_ERR)' or COLUMN_group == 'ABS(AMAX_WIND-BMAX_WIND)':
+ y_axis_min = 0
+ y_axis_max = y_axis_max + 50.0
+ ax.set_yticks(
+ np.arange(y_axis_min,
+ y_axis_max + y_axis_tick_inc,
+ y_axis_tick_inc)
+ )
+ ax.set_ylim([y_axis_min, y_axis_max])
+ # Add legend, adjust if points in legend
+ if len(ax.lines) != 0:
+ legend = ax.legend(bbox_to_anchor=(legend_bbox_x,
+ legend_bbox_y),
+ loc=legend_loc, ncol=legend_ncol,
+ fontsize=legend_fontsize)
+ plt.draw()
+ legend_box = legend.get_window_extent() \
+ .inverse_transformed(ax.transData)
+ if stat_max > legend_box.y1:
+ while stat_max > legend_box.y1:
+ y_axis_max = y_axis_max + y_axis_tick_inc
+ ax.set_yticks(
+ np.arange(y_axis_min,
+ y_axis_max + y_axis_tick_inc,
+ y_axis_tick_inc)
+ )
+ ax.set_ylim([y_axis_min, y_axis_max])
+ legend = ax.legend(
+ bbox_to_anchor=(legend_bbox_x, legend_bbox_y),
+ loc=legend_loc, ncol=legend_ncol,
+ fontsize=legend_fontsize
+ )
+ plt.draw()
+ legend_box = (
+ legend.get_window_extent() \
+ .inverse_transformed(ax.transData)
+ )
+ # Add number of cases
+ x_axis_ticks_fraction = np.linspace(0, 1,len(fhrs), endpoint=True)
+ ax.annotate('# of\nCases',
+ xy=(case_num_label_x_loc, case_num_label_y_loc),
+ xycoords='axes fraction')
+ if len(fhrs) > 15:
+ fhrs_ncase_to_plot = fhrs[::2]
+ else:
+ fhrs_ncase_to_plot = fhrs
+ for fhr in fhrs_ncase_to_plot:
+ fhr_idx = np.where(fhr == fhrs)[0][0]
+ if not np.ma.is_masked(all_amodel_total[:,fhr_idx]):
+ if np.all(all_amodel_total[:,fhr_idx]
+ == all_amodel_total[0,fhr_idx]):
+ num_cases = all_amodel_total[0,fhr_idx]
+ num_cases_str = str(int(num_cases))
+ ax.annotate(num_cases_str,
+ xy=(x_axis_ticks_fraction[fhr_idx],
+ case_num_tick_y_loc), size=12,
+ xycoords='axes fraction', ha='center')
+ else:
+ print("Working with nonhomogeneous sample for fhr "
+ +str(fhr)+"...not printing number of cases")
+ props = {
+ 'boxstyle': 'square',
+ 'pad': 0.35,
+ 'facecolor': 'white',
+ 'linestyle': 'solid',
+ 'linewidth': 1,
+ 'edgecolor': 'black'
+ }
+ x_axis_tick_inc = fhrs[1] - fhrs[0]
+ # modified by Yan Jin
+ #if len(ax.lines) != 0:
+ # ax.text(legend_box.x1 + (x_axis_tick_inc * 0.75),
+ # ax.get_ylim()[1] - (0.15 * y_axis_tick_inc),
+ # 'Note: statistical significance at the 95% '
+ # +'confidence level where confidence intervals '
+ # +'do not intersect',
+ # ha='left', va='top', fontsize=10,
+ # bbox=props, transform=ax.transData)
+ # Build formal plot title
+ full_title = formal_stat_name+'\n'
+ #full_title = ""
+ if basin == 'AL':
+ formal_basin = 'Atlantic'
+ elif basin == 'CP':
+ formal_basin = 'Central Pacific'
+ elif basin == 'EP':
+ formal_basin = 'Eastern Pacific'
+ elif basin == 'WP':
+ formal_basin = 'Western Pacific'
+ if len(plot_info) == 2:
+ full_title = full_title+formal_basin+' Mean\n'
+ else:
+ full_title = full_title + name.title().upper() + ' ' + '(' + basin + str(tc_num) + ' ' + year + ')'
+ #full_title = (full_title+'Cycles: '+', '.join(init_hour_list)+', '
+ # +' Valid Hours: '+', '.join(valid_hour_list))
+ ax.set_title(full_title)
+ noaa_img = fig.figimage(noaa_logo_img_array,
+ noaa_logo_xpixel_loc, noaa_logo_ypixel_loc,
+ zorder=1, alpha=noaa_logo_alpha)
+ nws_img = fig.figimage(nws_logo_img_array,
+ nws_logo_xpixel_loc, nws_logo_ypixel_loc,
+ zorder=1, alpha=nws_logo_alpha)
+ #if img_quality in ['low', 'medium']:
+ # noaa_img.set_visible(False)
+ # nws_img.set_visible(False)
+ # Build savefig name
+ savefig_name = os.path.join(
+ plotting_out_dir_imgs,
+ COLUMN_group.replace('(', '').replace(')', '')
+ +'_fhrmean_'+plot_info+'_'+model_type+'.png'
+ )
+ print("Saving image as "+savefig_name)
+ plt.savefig(savefig_name)
+ plt.close()
+else:
+ print("ERROR: "+summary_tcst_filename+" does not exist")
+ sys.exit(1)
diff --git a/ush/hurricane/stats/plot_tropcyc_util.py b/ush/hurricane/stats/plot_tropcyc_util.py
new file mode 100755
index 0000000000..36b6b2e2b5
--- /dev/null
+++ b/ush/hurricane/stats/plot_tropcyc_util.py
@@ -0,0 +1,1076 @@
+import os
+import datetime as datetime
+import time
+import numpy as np
+import pandas as pd
+import warnings
+warnings.filterwarnings('ignore')
+
+"""!@namespace plot_util
+ @brief Provides utility functions for METplus plotting use case.
+"""
+
+def get_date_arrays(date_type, date_beg, date_end,
+ fcst_valid_hour, fcst_init_hour,
+ obs_valid_hour, obs_init_hour,
+ lead):
+ """! Create arrays of requested dates plotting and
+ dates expected to be in MET .stat files
+
+ Args:
+ date_type - string of describing the treatment
+ of dates, either VALID or INIT
+ date_beg - string of beginning date,
+ either blank or %Y%m%d format
+ date_end - string of end date,
+ either blank or %Y%m%d format
+ fcst_valid_hour - string of forecast valid hour(s)
+ information, blank or in %H%M%S
+ fcst_init_hour - string of forecast init hour(s)
+ information, blank or in %H%M%S
+ obs_valid_hour - string of observation valid hour(s)
+ information, blank or in %H%M%S
+ obs_init_hour - string of observation hour(s)
+ information, blank or in %H%M%S
+ lead - string of forecast lead, in %H%M%S
+ format
+
+ Returns:
+ plot_time_dates - array of ordinal dates based on user
+ provided information
+ expected_stat_file_dates - array of dates that are expected to
+ be found in the MET .stat files
+ based on user provided information,
+ formatted as %Y%m%d_%H%M%S
+ """
+ lead_hour_seconds = int(int(lead[:-4])%24) * 3600
+ lead_min_seconds = int(lead[-4:-2]) * 60
+ lead_seconds = int(lead[-2:])
+ valid_init_time_info = {
+ 'fcst_valid_time': list(filter(None, fcst_valid_hour.split(', '))),
+ 'fcst_init_time': list(filter(None, fcst_init_hour.split(', '))),
+ 'obs_valid_time': list(filter(None, obs_valid_hour.split(', '))),
+ 'obs_init_time': list(filter(None, obs_init_hour.split(', '))),
+ }
+ # Extract missing information, if possible
+ for type in ['fcst', 'obs']:
+ valid_time_list = valid_init_time_info[type+'_valid_time']
+ init_time_list = valid_init_time_info[type+'_init_time']
+ if (len(valid_time_list) == 0
+ and len(init_time_list) > 0):
+ for itime in init_time_list:
+ itime_hour_seconds = int(int(itime[0:2])%24) * 3600
+ itime_min_seconds = int(itime[2:4]) * 60
+ itime_seconds = int(itime[4:])
+ offset = datetime.timedelta(seconds=lead_hour_seconds
+ + lead_min_seconds
+ + lead_seconds
+ + itime_hour_seconds
+ + itime_min_seconds
+ + itime_seconds)
+ tot_sec = offset.total_seconds()
+ valid_hour = int(tot_sec//3600)
+ valid_min = int((tot_sec%3600) // 60)
+ valid_sec = int((tot_sec%3600)%60)
+ valid_time = (
+ str(valid_hour).zfill(2)
+ +str(valid_min).zfill(2)
+ +str(valid_sec).zfill(2)
+ )
+ valid_init_time_info[type+'_valid_time'].append(valid_time)
+ if (len(init_time_list) == 0
+ and len(valid_time_list) > 0):
+ for vtime in valid_time_list:
+ vtime_hour_seconds = int(int(vtime[0:2])%24) * 3600
+ vtime_min_seconds = int(vtime[2:4]) * 60
+ vtime_seconds = int(vtime[4:])
+ offset = datetime.timedelta(seconds=lead_hour_seconds
+ + lead_min_seconds
+ + lead_seconds
+ - vtime_hour_seconds
+ - vtime_min_seconds
+ - vtime_seconds)
+ tot_sec = offset.total_seconds()
+ init_hour = int(tot_sec//3600)
+ init_min = int((tot_sec%3600) // 60)
+ init_sec = int((tot_sec%3600)%60)
+ init_time = (
+ str(init_hour).zfill(2)
+ +str(init_min).zfill(2)
+ +str(init_sec).zfill(2)
+ )
+ valid_init_time_info[type+'_init_time'].append(init_time)
+ for type in ['valid', 'init']:
+ fcst_time_list = valid_init_time_info['fcst_'+type+'_time']
+ obs_time_list = valid_init_time_info['obs_'+type+'_time']
+ if len(fcst_time_list) == 0:
+ if len(obs_time_list) > 0:
+ valid_init_time_info['fcst_'+type+'_time'] = (
+ valid_init_time_info['obs_'+type+'_time']
+ )
+ if len(obs_time_list) == 0:
+ if len(fcst_time_list) > 0:
+ valid_init_time_info['obs_'+type+'_time'] = (
+ valid_init_time_info['fcst_'+type+'_time']
+ )
+ date_info = {}
+ for type in ['fcst_'+date_type.lower(),
+ 'obs_'+date_type.lower()]:
+ time_list = valid_init_time_info[type+'_time']
+ if len(time_list) != 0:
+ time_beg = min(time_list)
+ time_end = max(time_list)
+ if time_beg == time_end or len(time_list) == 1:
+ delta_t = datetime.timedelta(seconds=86400)
+ else:
+ delta_t_list = []
+ for t in range(len(time_list)):
+ if time_list[t] == time_end:
+ delta_t_list.append(
+ (
+ datetime.datetime.strptime('235959','%H%M%S')
+ - (datetime.datetime.strptime(time_list[t],
+ '%H%M%S'))
+ )
+ + datetime.timedelta(seconds = 1)
+ )
+ else:
+ delta_t_list.append(
+ datetime.datetime.strptime(time_list[t+1],
+ '%H%M%S')
+ - datetime.datetime.strptime(time_list[t],
+ '%H%M%S')
+ )
+ delta_t_array = np.array(delta_t_list)
+ if np.all(delta_t_array == delta_t_array[0]):
+ delta_t = delta_t_array[0]
+ else:
+ delta_t = np.min(delta_t_array)
+ beg = datetime.datetime.strptime(
+ date_beg+time_beg, '%Y%m%d%H%M%S'
+ )
+ end = datetime.datetime.strptime(
+ date_end+time_end, '%Y%m%d%H%M%S'
+ )
+ dates = np.arange(
+ beg, end+delta_t,
+ delta_t
+ ).astype(datetime.datetime)
+ else:
+ dates = []
+ date_info[type+'_dates'] = dates
+ # Build opposite dates
+ if date_type == 'VALID':
+ oppo_date_type = 'INIT'
+ elif date_type == 'INIT':
+ oppo_date_type = 'VALID'
+ lead_timedelta = datetime.timedelta(
+ seconds=(int(int(lead[:-4])) * 3600 + lead_min_seconds
+ + lead_seconds)
+ )
+ if oppo_date_type == 'INIT':
+ lead_timedelta = -1 * lead_timedelta
+ for type in ['fcst', 'obs']:
+ date_info[type+'_'+oppo_date_type.lower()+'_dates'] = (
+ date_info[type+'_'+date_type.lower()+'_dates'] + lead_timedelta
+ )
+ # Use fcst_*_dates for dates
+ # this makes the assumption that
+ # fcst_*_dates and obs_*_dates
+ # are the same, and they should be for
+ # most cases
+ dates = date_info['fcst_'+date_type.lower()+'_dates']
+ fv_dates = date_info['fcst_valid_dates']
+ plot_time_dates = []
+ expected_stat_file_dates = []
+ for date in dates:
+ dt = date.time()
+ seconds = (dt.hour * 60 + dt.minute) * 60 + dt.second
+ plot_time_dates.append(date.toordinal() + seconds/86400.)
+ # MET .stat files saves valid dates in file
+ fv_dates = date_info['fcst_valid_dates']
+ expected_stat_file_dates = []
+ for fv_date in fv_dates:
+ expected_stat_file_dates.append(fv_date.strftime('%Y%m%d_%H%M%S'))
+ return plot_time_dates, expected_stat_file_dates
+
+def format_thresh(thresh):
+ """! Format thresholds for file naming
+
+ Args:
+ thresh - string of the treshold(s)
+
+ Return:
+ thresh_symbol - string of the threshold(s)
+ with symbols
+ thresh_letters - string of the threshold(s)
+ with letters
+ """
+ thresh_list = thresh.split(' ')
+ thresh_symbol = ''
+ thresh_letter = ''
+ for thresh in thresh_list:
+ if thresh == '':
+ continue
+ thresh_value = thresh
+ for opt in ['>=', '>', '==','!=','<=', '<',
+ 'ge', 'gt', 'eq', 'ne', 'le', 'lt']:
+ if opt in thresh_value:
+ thresh_opt = opt
+ thresh_value = thresh_value.replace(opt, '')
+ if thresh_opt in ['>', 'gt']:
+ thresh_symbol+='>'+thresh_value
+ thresh_letter+='gt'+thresh_value
+ elif thresh_opt in ['>=', 'ge']:
+ thresh_symbol+='>='+thresh_value
+ thresh_letter+='ge'+thresh_value
+ elif thresh_opt in ['<', 'lt']:
+ thresh_symbol+='<'+thresh_value
+ thresh_letter+='lt'+thresh_value
+ elif thresh_opt in ['<=', 'le']:
+ thresh_symbol+='<='+thresh_value
+ thresh_letter+='le'+thresh_value
+ elif thresh_opt in ['==', 'eq']:
+ thresh_symbol+='=='+thresh_value
+ thresh_letter+='eq'+thresh_value
+ elif thresh_opt in ['!=', 'ne']:
+ thresh_symbol+='!='+thresh_value
+ thresh_letter+='ne'+thresh_value
+ return thresh_symbol, thresh_letter
+
+def get_stat_file_base_columns(met_version):
+ """! Get the standard MET .stat file columns based on
+ version number
+
+ Args:
+ met_version - string of MET version
+ number being used to
+ run stat_analysis
+
+ Returns:
+ stat_file_base_columns - list of the standard
+ columns shared among the
+ different line types
+ """
+ met_version = float(met_version)
+ if met_version < 8.1:
+ stat_file_base_columns = [
+ 'VERSION', 'MODEL', 'DESC', 'FCST_LEAD', 'FCST_VALID_BEG',
+ 'FCST_VALID_END', 'OBS_LEAD', 'OBS_VALID_BEG', 'OBS_VALID_END',
+ 'FCST_VAR', 'FCST_LEV', 'OBS_VAR', 'OBS_LEV', 'OBTYPE', 'VX_MASK',
+ 'INTERP_MTHD', 'INTERP_PNTS', 'FCST_THRESH', 'OBS_THRESH',
+ 'COV_THRESH', 'ALPHA', 'LINE_TYPE'
+ ]
+ else:
+ stat_file_base_columns = [
+ 'VERSION', 'MODEL', 'DESC', 'FCST_LEAD', 'FCST_VALID_BEG',
+ 'FCST_VALID_END', 'OBS_LEAD', 'OBS_VALID_BEG', 'OBS_VALID_END',
+ 'FCST_VAR', 'FCST_UNITS', 'FCST_LEV', 'OBS_VAR', 'OBS_UNITS',
+ 'OBS_LEV', 'OBTYPE', 'VX_MASK', 'INTERP_MTHD', 'INTERP_PNTS',
+ 'FCST_THRESH', 'OBS_THRESH', 'COV_THRESH', 'ALPHA', 'LINE_TYPE'
+ ]
+ return stat_file_base_columns
+
+def get_stat_file_line_type_columns(logger, met_version, line_type):
+ """! Get the MET .stat file columns for line type based on
+ version number
+
+ Args:
+ met_version - string of MET version number
+ being used to run stat_analysis
+ line_type - string of the line type of the MET
+ .stat file being read
+
+ Returns:
+ stat_file_line_type_columns - list of the line
+ type columns
+ """
+ met_version = float(met_version)
+ if line_type == 'SL1L2':
+ if met_version >= 6.0:
+ stat_file_line_type_columns = [
+ 'TOTAL', 'FBAR', 'OBAR', 'FOBAR', 'FFBAR', 'OOBAR', 'MAE'
+ ]
+ elif line_type == 'SAL1L2':
+ if met_version >= 6.0:
+ stat_file_line_type_columns = [
+ 'TOTAL', 'FABAR', 'OABAR', 'FOABAR', 'FFABAR', 'OOABAR', 'MAE'
+ ]
+ elif line_type == 'VL1L2':
+ if met_version <= 6.1:
+ stat_file_line_type_columns = [
+ 'TOTAL', 'UFBAR', 'VFBAR', 'UOBAR', 'VOBAR', 'UVFOBAR',
+ 'UVFFBAR', 'UVOOBAR'
+ ]
+ elif met_version >= 7.0:
+ stat_file_line_type_columns = [
+ 'TOTAL', 'UFBAR', 'VFBAR', 'UOBAR', 'VOBAR', 'UVFOBAR',
+ 'UVFFBAR', 'UVOOBAR', 'F_SPEED_BAR', 'O_SPEED_BAR'
+ ]
+ elif line_type == 'VAL1L2':
+ if met_version >= 6.0:
+ stat_file_line_type_columns = [
+ 'TOTAL', 'UFABAR', 'VFABAR', 'UOABAR', 'VOABAR', 'UVFOABAR',
+ 'UVFFABAR', 'UVOOABAR'
+ ]
+ elif line_type == 'VCNT':
+ if met_version >= 7.0:
+ stat_file_line_type_columns = [
+ 'TOTAL', 'FBAR', 'FBAR_NCL', 'FBAR_NCU', 'OBAR', 'OBAR_NCL',
+ 'OBAR_NCU', 'FS_RMS', 'FS_RMS_NCL', 'FS_RMS_NCU', 'OS_RMS',
+ 'OS_RMS_NCL', 'OS_RMS_NCU', 'MSVE', 'MSVE_NCL', 'MSVE_NCU',
+ 'RMSVE', 'RMSVE_NCL', 'RMSVE_NCU', 'FSTDEV', 'FSTDEV_NCL',
+ 'FSTDEV_NCU', 'OSTDEV', 'OSTDEV_NCL', 'OSTDEV_NCU', 'FDIR',
+ 'FDIR_NCL', 'FDIR_NCU', 'ODIR', 'ODIR_NCL', 'ODIR_NCU',
+ 'FBAR_SPEED', 'FBAR_SPEED_NCL', 'FBAR_SPEED_NCU', 'OBAR_SPEED',
+ 'OBAR_SPEED_NCL', 'OBAR_SPEED_NCU', 'VDIFF_SPEED',
+ 'VDIFF_SPEED_NCL', 'VDIFF_SPEED_NCU', 'VDIFF_DIR',
+ 'VDIFF_DIR_NCL', 'VDIFF_DIR_NCU', 'SPEED_ERR', 'SPEED_ERR_NCL',
+ 'SPEED_ERR_NCU', 'SPEED_ABSERR', 'SPEED_ABSERR_NCL',
+ 'SPEED_ABSERR_NCU', 'DIR_ERR', 'DIR_ERR_NCL', 'DIR_ERR_NCU',
+ 'DIR_ABSERR', 'DIR_ABSERR_NCL', 'DIR_ABSERR_NCU'
+ ]
+ else:
+ logger.error("VCNT is not a valid LINE_TYPE in METV"+met_version)
+ exit(1)
+ elif line_type == 'CTC':
+ if met_version >= 6.0:
+ stat_file_line_type_columns = [
+ 'TOTAL', 'FY_OY', 'FY_ON', 'FN_OY', 'FN_ON'
+ ]
+ return stat_file_line_type_columns
+
+def get_clevels(data, spacing):
+ """! Get contour levels for plotting differences
+ or bias (centered on 0)
+
+ Args:
+ data - array of data to be contoured
+ spacing - float for spacing for power function,
+ value of 1.0 gives evenly spaced
+ contour intervals
+ Returns:
+ clevels - array of contour levels
+ """
+ if np.abs(np.nanmin(data)) > np.nanmax(data):
+ cmax = np.abs(np.nanmin(data))
+ cmin = np.nanmin(data)
+ else:
+ cmax = np.nanmax(data)
+ cmin = -1 * np.nanmax(data)
+ if cmax > 100:
+ cmax = cmax - (cmax * 0.2)
+ cmin = cmin + (cmin * 0.2)
+ elif cmax > 10:
+ cmax = cmax - (cmax * 0.1)
+ cmin = cmin + (cmin * 0.1)
+ if cmax > 1:
+ cmin = round(cmin-1,0)
+ cmax = round(cmax+1,0)
+ else:
+ cmin = round(cmin-0.1,1)
+ cmax = round(cmax+0.1,1)
+ steps = 6
+ span = cmax
+ dx = 1.0 / (steps-1)
+ pos = np.array([0 + (i*dx)**spacing*span for i in range(steps)],
+ dtype=float)
+ neg = np.array(pos[1:], dtype=float) * -1
+ clevels = np.append(neg[::-1], pos)
+ return clevels
+
+def calculate_average(logger, average_method, stat, model_dataframe,
+ model_stat_values):
+ """! Calculate average of dataset
+
+ Args:
+ logger - logging file
+ average_method - string of the method to
+ use to calculate the
+ average
+ stat - string of the statistic the
+ average is being taken for
+ model_dataframe - dataframe of model .stat
+ columns
+ model_stat_values - array of statistic values
+
+ Returns:
+ average_array - array of average value(s)
+ """
+ average_array = np.empty_like(model_stat_values[:,0])
+ if average_method == 'MEAN':
+ for l in range(len(model_stat_values[:,0])):
+ average_array[l] = np.ma.mean(model_stat_values[l,:])
+ elif average_method == 'MEDIAN':
+ for l in range(len(model_stat_values[:,0])):
+ logger.info(np.ma.median(model_stat_values[l,:]))
+ average_array[l] = np.ma.median(model_stat_values[l,:])
+ elif average_method == 'AGGREGATION':
+ ndays = model_dataframe.shape[0]
+ model_dataframe_aggsum = (
+ model_dataframe.groupby('model_plot_name').agg(['sum'])
+ )
+ model_dataframe_aggsum.columns = (
+ model_dataframe_aggsum.columns.droplevel(1)
+ )
+ avg_values, avg_array, stat_plot_name = (
+ calculate_stat(logger, model_dataframe_aggsum/ndays, stat)
+ )
+ for l in range(len(avg_array[:,0])):
+ average_array[l] = avg_array[l]
+ else:
+ logger.error("Invalid entry for MEAN_METHOD, "
+ +"use MEAN, MEDIAN, or AGGREGATION")
+ exit(1)
+ return average_array
+
+def calculate_ci(logger, ci_method, modelB_values, modelA_values, total_days,
+ stat, average_method, randx):
+ """! Calculate confidence intervals between two sets of data
+
+ Args:
+ logger - logging file
+ ci_method - string of the method to use to
+ calculate the confidence intervals
+ modelB_values - array of values
+ modelA_values - array of values
+ total_days - float of total number of days
+ being considered, sample size
+ stat - string of the statistic the
+ confidence intervals are being
+ calculated for
+ average_method - string of the method to
+ use to calculate the
+ average
+ randx - 2D array of random numbers [0,1)
+
+ Returns:
+ intvl - float of the confidence interval
+ """
+ if ci_method == 'EMC':
+ modelB_modelA_diff = modelB_values - modelA_values
+ ndays = total_days - np.ma.count_masked(modelB_modelA_diff)
+ modelB_modelA_diff_mean = modelB_modelA_diff.mean()
+ modelB_modelA_std = np.sqrt(
+ ((modelB_modelA_diff - modelB_modelA_diff_mean)**2).mean()
+ )
+ if ndays >= 80:
+ intvl = 1.960*modelB_modelA_std/np.sqrt(ndays-1)
+ elif ndays >= 40 and ndays < 80:
+ intvl = 2.000*modelB_modelA_std/np.sqrt(ndays-1)
+ elif ndays >= 20 and ndays < 40:
+ intvl = 2.042*modelB_modelA_std/np.sqrt(ndays-1)
+ elif ndays < 20 and ndays > 0:
+ intvl = 2.228*modelB_modelA_std/np.sqrt(ndays-1)
+ elif ndays == 0:
+ intvl = '--'
+ elif ci_method == 'EMC_MONTE_CARLO':
+ ntest, ntests = 1, 10000
+ dates = []
+ for idx_val in modelB_values.index.values:
+ dates.append(idx_val[1])
+ ndays = len(dates)
+ rand1_data_index = pd.MultiIndex.from_product(
+ [['rand1'], np.arange(1, ntests+1, dtype=int), dates],
+ names=['model_plot_name', 'ntest', 'dates']
+ )
+ rand2_data_index = pd.MultiIndex.from_product(
+ [['rand2'], np.arange(1, ntests+1, dtype=int), dates],
+ names=['model_plot_name', 'ntest', 'dates']
+ )
+ rand1_data = pd.DataFrame(
+ np.nan, index=rand1_data_index,
+ columns=modelB_values.columns
+ )
+ rand2_data = pd.DataFrame(
+ np.nan, index=rand2_data_index,
+ columns=modelB_values.columns
+ )
+ ncolumns = len(modelB_values.columns)
+ rand1_data_values = np.empty([ntests, ndays, ncolumns])
+ rand2_data_values = np.empty([ntests, ndays, ncolumns])
+ randx_ge0_idx = np.where(randx - 0.5 >= 0)
+ randx_lt0_idx = np.where(randx - 0.5 < 0)
+ rand1_data_values[randx_ge0_idx[0], randx_ge0_idx[1],:] = (
+ modelA_values.iloc[randx_ge0_idx[1],:]
+ )
+ rand2_data_values[randx_ge0_idx[0], randx_ge0_idx[1],:] = (
+ modelB_values.iloc[randx_ge0_idx[1],:]
+ )
+ rand1_data_values[randx_lt0_idx[0], randx_lt0_idx[1],:] = (
+ modelB_values.iloc[randx_lt0_idx[1],:]
+ )
+ rand2_data_values[randx_lt0_idx[0], randx_lt0_idx[1],:] = (
+ modelA_values.iloc[randx_lt0_idx[1],:]
+ )
+ ntest = 1
+ while ntest <= ntests:
+ rand1_data.loc[('rand1', ntest)] = rand1_data_values[ntest-1,:,:]
+ rand2_data.loc[('rand2', ntest)] = rand2_data_values[ntest-1,:,:]
+ ntest+=1
+ intvl = np.nan
+ rand1_stat_values, rand1_stat_values_array, stat_plot_name = (
+ calculate_stat(logger, rand1_data, stat)
+ )
+ rand2_stat_values, rand2_stat_values_array, stat_plot_name = (
+ calculate_stat(logger, rand2_data, stat)
+ )
+ rand1_average_array = (
+ calculate_average(logger, average_method, stat, rand1_data,
+ rand1_stat_values_array[0,0,:,:])
+ )
+ rand2_average_array = (
+ calculate_average(logger, average_method, stat, rand2_data,
+ rand2_stat_values_array[0,0,:,:])
+ )
+ scores_diff = rand2_average_array - rand1_average_array
+ scores_diff_mean = np.sum(scores_diff)/ntests
+ scores_diff_var = np.sum((scores_diff-scores_diff_mean)**2)
+ scores_diff_std = np.sqrt(scores_diff_var/(ntests-1))
+ intvl = 1.96*scores_diff_std
+ else:
+ logger.error("Invalid entry for MAKE_CI_METHOD, "
+ +"use EMC, EMC_MONTE_CARLO")
+ exit(1)
+ return intvl
+
+def get_stat_plot_name(logger, stat):
+ """! Get the formalized name of the statistic being plotted
+
+ Args:
+ stat - string of the simple statistic
+ name being plotted
+
+ Returns:
+ stat_plot_name - string of the formal statistic
+ name being plotted
+ """
+ if stat == 'bias':
+ stat_plot_name = 'Bias'
+ elif stat == 'rmse':
+ stat_plot_name = 'Root Mean Square Error'
+ elif stat == 'msess':
+ stat_plot_name = "Murphy's Mean Square Error Skill Score"
+ elif stat == 'rsd':
+ stat_plot_name = 'Ratio of Standard Deviation'
+ elif stat == 'rmse_md':
+ stat_plot_name = 'Root Mean Square Error from Mean Error'
+ elif stat == 'rmse_pv':
+ stat_plot_name = 'Root Mean Square Error from Pattern Variation'
+ elif stat == 'pcor':
+ stat_plot_name = 'Pattern Correlation'
+ elif stat == 'acc':
+ stat_plot_name = 'Anomaly Correlation Coefficient'
+ elif stat == 'fbar':
+ stat_plot_name = 'Forecast Averages'
+ elif stat == 'fbar_obar':
+ stat_plot_name = 'Forecast and Observation Averages'
+ elif stat == 'speed_err':
+ stat_plot_name = (
+ 'Difference in Average FCST and OBS Wind Vector Speeds'
+ )
+ elif stat == 'dir_err':
+ stat_plot_name = (
+ 'Difference in Average FCST and OBS Wind Vector Direction'
+ )
+ elif stat == 'rmsve':
+ stat_plot_name = 'Root Mean Square Difference Vector Error'
+ elif stat == 'vdiff_speed':
+ stat_plot_name = 'Difference Vector Speed'
+ elif stat == 'vdiff_dir':
+ stat_plot_name = 'Difference Vector Direction'
+ elif stat == 'fbar_obar_speed':
+ stat_plot_name = 'Average Wind Vector Speed'
+ elif stat == 'fbar_obar_dir':
+ stat_plot_name = 'Average Wind Vector Direction'
+ elif stat == 'fbar_speed':
+ stat_plot_name = 'Average Forecast Wind Vector Speed'
+ elif stat == 'fbar_dir':
+ stat_plot_name = 'Average Forecast Wind Vector Direction'
+ elif stat == 'orate':
+ stat_plot_name = 'Observation Rate'
+ elif stat == 'baser':
+ stat_plot_name = 'Base Rate'
+ elif stat == 'frate':
+ stat_plot_name = 'Forecast Rate'
+ elif stat == 'orate_frate':
+ stat_plot_name = 'Observation and Forecast Rates'
+ elif stat == 'baser_frate':
+ stat_plot_name = 'Base and Forecast Rates'
+ elif stat == 'accuracy':
+ stat_plot_name = 'Accuracy'
+ elif stat == 'fbias':
+ stat_plot_name = 'Frequency Bias'
+ elif stat == 'pod':
+ stat_plot_name = 'Probability of Detection'
+ elif stat == 'hrate':
+ stat_plot_name = 'Hit Rate'
+ elif stat == 'pofd':
+ stat_plot_name = 'Probability of False Detection'
+ elif stat == 'farate':
+ stat_plot_name = 'False Alarm Rate'
+ elif stat == 'podn':
+ stat_plot_name = 'Probability of Detection of the Non-Event'
+ elif stat == 'faratio':
+ stat_plot_name = 'False Alarm Ratio'
+ elif stat == 'csi':
+ stat_plot_name = 'Critical Success Index'
+ elif stat == 'ts':
+ stat_plot_name = 'Threat Score'
+ elif stat == 'gss':
+ stat_plot_name = 'Gilbert Skill Score'
+ elif stat == 'ets':
+ stat_plot_name = 'Equitable Threat Score'
+ elif stat == 'hk':
+ stat_plot_name = 'Hanssen-Kuipers Discriminant'
+ elif stat == 'tss':
+ stat_plot_name = 'True Skill Score'
+ elif stat == 'pss':
+ stat_plot_name = 'Peirce Skill Score'
+ elif stat == 'hss':
+ stat_plot_name = 'Heidke Skill Score'
+ else:
+ logger.error(stat+" is not a valid option")
+ exit(1)
+ return stat_plot_name
+
+def calculate_stat(logger, model_data, stat):
+ """! Calculate the statistic from the data from the
+ read in MET .stat file(s)
+
+ Args:
+ model_data - Dataframe containing the model(s)
+ information from the MET .stat
+ files
+ stat - string of the simple statistic
+ name being plotted
+
+ Returns:
+ stat_values - Dataframe of the statistic values
+ stat_values_array - array of the statistic values
+ stat_plot_name - string of the formal statistic
+ name being plotted
+ """
+ model_data_columns = model_data.columns.values.tolist()
+ if model_data_columns == [ 'TOTAL' ]:
+ logger.warning("Empty model_data dataframe")
+ line_type = 'NULL'
+ if (stat == 'fbar_obar' or stat == 'orate_frate'
+ or stat == 'baser_frate'):
+ stat_values = model_data.loc[:][['TOTAL']]
+ stat_values_fbar = model_data.loc[:]['TOTAL']
+ stat_values_obar = model_data.loc[:]['TOTAL']
+ else:
+ stat_values = model_data.loc[:]['TOTAL']
+ else:
+ if all(elem in model_data_columns for elem in
+ ['FBAR', 'OBAR', 'MAE']):
+ line_type = 'SL1L2'
+ fbar = model_data.loc[:]['FBAR']
+ obar = model_data.loc[:]['OBAR']
+ fobar = model_data.loc[:]['FOBAR']
+ ffbar = model_data.loc[:]['FFBAR']
+ oobar = model_data.loc[:]['OOBAR']
+ elif all(elem in model_data_columns for elem in
+ ['FABAR', 'OABAR', 'MAE']):
+ line_type = 'SAL1L2'
+ fabar = model_data.loc[:]['FABAR']
+ oabar = model_data.loc[:]['OABAR']
+ foabar = model_data.loc[:]['FOABAR']
+ ffabar = model_data.loc[:]['FFABAR']
+ ooabar = model_data.loc[:]['OOABAR']
+ elif all(elem in model_data_columns for elem in
+ ['UFBAR', 'VFBAR']):
+ line_type = 'VL1L2'
+ ufbar = model_data.loc[:]['UFBAR']
+ vfbar = model_data.loc[:]['VFBAR']
+ uobar = model_data.loc[:]['UOBAR']
+ vobar = model_data.loc[:]['VOBAR']
+ uvfobar = model_data.loc[:]['UVFOBAR']
+ uvffbar = model_data.loc[:]['UVFFBAR']
+ uvoobar = model_data.loc[:]['UVOOBAR']
+ elif all(elem in model_data_columns for elem in
+ ['UFABAR', 'VFABAR']):
+ line_type = 'VAL1L2'
+ ufabar = model_data.loc[:]['UFABAR']
+ vfabar = model_data.loc[:]['VFABAR']
+ uoabar = model_data.loc[:]['UOABAR']
+ voabar = model_data.loc[:]['VOABAR']
+ uvfoabar = model_data.loc[:]['UVFOABAR']
+ uvffabar = model_data.loc[:]['UVFFABAR']
+ uvooabar = model_data.loc[:]['UVOOABAR']
+ elif all(elem in model_data_columns for elem in
+ ['VDIFF_SPEED', 'VDIFF_DIR']):
+ line_type = 'VCNT'
+ fbar = model_data.loc[:]['FBAR']
+ obar = model_data.loc[:]['OBAR']
+ fs_rms = model_data.loc[:]['FS_RMS']
+ os_rms = model_data.loc[:]['OS_RMS']
+ msve = model_data.loc[:]['MSVE']
+ rmsve = model_data.loc[:]['RMSVE']
+ fstdev = model_data.loc[:]['FSTDEV']
+ ostdev = model_data.loc[:]['OSTDEV']
+ fdir = model_data.loc[:]['FDIR']
+ odir = model_data.loc[:]['ODIR']
+ fbar_speed = model_data.loc[:]['FBAR_SPEED']
+ obar_speed = model_data.loc[:]['OBAR_SPEED']
+ vdiff_speed = model_data.loc[:]['VDIFF_SPEED']
+ vdiff_dir = model_data.loc[:]['VDIFF_DIR']
+ speed_err = model_data.loc[:]['SPEED_ERR']
+ dir_err = model_data.loc[:]['DIR_ERR']
+ elif all(elem in model_data_columns for elem in
+ ['FY_OY', 'FN_ON']):
+ line_type = 'CTC'
+ total = model_data.loc[:]['TOTAL']
+ fy_oy = model_data.loc[:]['FY_OY']
+ fy_on = model_data.loc[:]['FY_ON']
+ fn_oy = model_data.loc[:]['FN_OY']
+ fn_on = model_data.loc[:]['FN_ON']
+ else:
+ logger.error("Could not recognize line type from columns")
+ exit(1)
+ if stat == 'bias':
+ stat_plot_name = 'Bias'
+ if line_type == 'SL1L2':
+ stat_values = fbar - obar
+ elif line_type == 'VL1L2':
+ stat_values = np.sqrt(uvffbar) - np.sqrt(uvoobar)
+ elif line_type == 'VCNT':
+ stat_values = fbar - obar
+ elif line_type == 'CTC':
+ stat_values = (fy_oy + fy_on)/(fy_oy + fn_oy)
+ elif stat == 'rmse':
+ stat_plot_name = 'Root Mean Square Error'
+ if line_type == 'SL1L2':
+ stat_values = np.sqrt(ffbar + oobar - 2*fobar)
+ elif line_type == 'VL1L2':
+ stat_values = np.sqrt(uvffbar + uvoobar - 2*uvfobar)
+ elif stat == 'msess':
+ stat_plot_name = "Murphy's Mean Square Error Skill Score"
+ if line_type == 'SL1L2':
+ mse = ffbar + oobar - 2*fobar
+ var_o = oobar - obar*obar
+ stat_values = 1 - mse/var_o
+ elif line_type == 'VL1L2':
+ mse = uvffbar + uvoobar - 2*uvfobar
+ var_o = uvoobar - uobar*uobar - vobar*vobar
+ stat_values = 1 - mse/var_o
+ elif stat == 'rsd':
+ stat_plot_name = 'Ratio of Standard Deviation'
+ if line_type == 'SL1L2':
+ var_f = ffbar - fbar*fbar
+ var_o = oobar - obar*obar
+ stat_values = np.sqrt(var_f)/np.sqrt(var_o)
+ elif line_type == 'VL1L2':
+ var_f = uvffbar - ufbar*ufbar - vfbar*vfbar
+ var_o = uvoobar - uobar*uobar - vobar*vobar
+ stat_values = np.sqrt(var_f)/np.sqrt(var_o)
+ elif line_type == 'VCNT':
+ stat_values = fstdev/ostdev
+ elif stat == 'rmse_md':
+ stat_plot_name = 'Root Mean Square Error from Mean Error'
+ if line_type == 'SL1L2':
+ stat_values = np.sqrt((fbar-obar)**2)
+ elif line_type == 'VL1L2':
+ stat_values = np.sqrt((ufbar - uobar)**2 + (vfbar - vobar)**2)
+ elif stat == 'rmse_pv':
+ stat_plot_name = 'Root Mean Square Error from Pattern Variation'
+ if line_type == 'SL1L2':
+ var_f = ffbar - fbar**2
+ var_o = oobar - obar**2
+ R = (fobar - (fbar*obar))/(np.sqrt(var_f*var_o))
+ stat_values = np.sqrt(var_f + var_o - 2*np.sqrt(var_f*var_o)*R)
+ elif line_type == 'VL1L2':
+ var_f = uvffbar - ufbar*ufbar - vfbar*vfbar
+ var_o = uvoobar - uobar*uobar - vobar*vobar
+ R = (uvfobar - ufbar*uobar - vfbar*vobar)/(np.sqrt(var_f*var_o))
+ stat_values = np.sqrt(var_f + var_o - 2*np.sqrt(var_f*var_o)*R)
+ elif stat == 'pcor':
+ stat_plot_name = 'Pattern Correlation'
+ if line_type == 'SL1L2':
+ var_f = ffbar - fbar*fbar
+ var_o = oobar - obar*obar
+ stat_values = (fobar - fbar*obar)/(np.sqrt(var_f*var_o))
+ elif line_type == 'VL1L2':
+ var_f = uvffbar - ufbar*ufbar - vfbar*vfbar
+ var_o = uvoobar - uobar*uobar - vobar*vobar
+ stat_values = (uvfobar - ufbar*uobar - vfbar*vobar)/(np.sqrt(
+ var_f*var_o))
+ elif stat == 'acc':
+ stat_plot_name = 'Anomaly Correlation Coefficient'
+ if line_type == 'SAL1L2':
+ stat_values = \
+ (foabar - fabar*oabar)/(np.sqrt(
+ (ffabar - fabar*fabar)*(ooabar - oabar*oabar)))
+ elif line_type == 'VAL1L2':
+ stat_values = (uvfoabar)/(np.sqrt(uvffabar*uvooabar))
+ elif stat == 'fbar':
+ stat_plot_name = 'Forecast Averages'
+ if line_type == 'SL1L2':
+ stat_values = fbar
+ elif line_type == 'VL1L2':
+ stat_values = np.sqrt(uvffbar)
+ elif line_type == 'VCNT':
+ stat_values = fbar
+ elif stat == 'fbar_obar':
+ stat_plot_name = 'Forecast and Observation Averages'
+ if line_type == 'SL1L2':
+ stat_values = model_data.loc[:][['FBAR', 'OBAR']]
+ stat_values_fbar = model_data.loc[:]['FBAR']
+ stat_values_obar = model_data.loc[:]['OBAR']
+ elif line_type == 'VL1L2':
+ stat_values = model_data.loc[:][['UVFFBAR', 'UVOOBAR']]
+ stat_values_fbar = np.sqrt(model_data.loc[:]['UVFFBAR'])
+ stat_values_obar = np.sqrt(model_data.loc[:]['UVOOBAR'])
+ elif line_type == 'VCNT':
+ stat_values = model_data.loc[:][['FBAR', 'OBAR']]
+ stat_values_fbar = model_data.loc[:]['FBAR']
+ stat_values_obar = model_data.loc[:]['OBAR']
+ elif stat == 'speed_err':
+ stat_plot_name = (
+ 'Difference in Average FCST and OBS Wind Vector Speeds'
+ )
+ if line_type == 'VCNT':
+ stat_values = speed_err
+ elif stat == 'dir_err':
+ stat_plot_name = (
+ 'Difference in Average FCST and OBS Wind Vector Direction'
+ )
+ if line_type == 'VCNT':
+ stat_values = dir_err
+ elif stat == 'rmsve':
+ stat_plot_name = 'Root Mean Square Difference Vector Error'
+ if line_type == 'VCNT':
+ stat_values = rmsve
+ elif stat == 'vdiff_speed':
+ stat_plot_name = 'Difference Vector Speed'
+ if line_type == 'VCNT':
+ stat_values = vdiff_speed
+ elif stat == 'vdiff_dir':
+ stat_plot_name = 'Difference Vector Direction'
+ if line_type == 'VCNT':
+ stat_values = vdiff_dir
+ elif stat == 'fbar_obar_speed':
+ stat_plot_name = 'Average Wind Vector Speed'
+ if line_type == 'VCNT':
+ stat_values = model_data.loc[:][('FBAR_SPEED', 'OBAR_SPEED')]
+ elif stat == 'fbar_obar_dir':
+ stat_plot_name = 'Average Wind Vector Direction'
+ if line_type == 'VCNT':
+ stat_values = model_data.loc[:][('FDIR', 'ODIR')]
+ elif stat == 'fbar_speed':
+ stat_plot_name = 'Average Forecast Wind Vector Speed'
+ if line_type == 'VCNT':
+ stat_values = fbar_speed
+ elif stat == 'fbar_dir':
+ stat_plot_name = 'Average Forecast Wind Vector Direction'
+ if line_type == 'VCNT':
+ stat_values = fdir
+ elif stat == 'orate' or stat == 'baser':
+ if stat == 'orate':
+ stat_plot_name = 'Observation Rate'
+ elif stat == 'baser':
+ stat_plot_name = 'Base Rate'
+ if line_type == 'CTC':
+ stat_values = (fy_oy + fn_oy)/total
+ elif stat == 'frate':
+ stat_plot_name = 'Forecast Rate'
+ if line_type == 'CTC':
+ stat_values = (fy_oy + fy_on)/total
+ elif stat == 'orate_frate' or stat == 'baser_frate':
+ if stat == 'orate_frate':
+ stat_plot_name = 'Observation and Forecast Rates'
+ elif stat == 'baser_frate':
+ stat_plot_name = 'Base and Forecast Rates'
+ if line_type == 'CTC':
+ stat_values_fbar = (fy_oy + fy_on)/total
+ stat_values_obar = (fy_oy + fn_oy)/total
+ stat_values = pd.concat([stat_values_fbar, stat_values_obar],
+ axis=1)
+ elif stat == 'accuracy':
+ stat_plot_name = 'Accuracy'
+ if line_type == 'CTC':
+ stat_values = (fy_oy + fn_on)/total
+ elif stat == 'fbias':
+ stat_plot_name = 'Frequency Bias'
+ if line_type == 'CTC':
+ stat_values = (fy_oy + fy_on)/(fy_oy + fn_oy)
+ elif stat == 'pod' or stat == 'hrate':
+ if stat == 'pod':
+ stat_plot_name = 'Probability of Detection'
+ elif stat == 'hrate':
+ stat_plot_name = 'Hit Rate'
+ if line_type == 'CTC':
+ stat_values = fy_oy/(fy_oy + fn_oy)
+ elif stat == 'pofd' or stat == 'farate':
+ if stat == 'pofd':
+ stat_plot_name = 'Probability of False Detection'
+ elif stat == 'farate':
+ stat_plot_name = 'False Alarm Rate'
+ if line_type == 'CTC':
+ stat_values = fy_on/(fy_on + fn_on)
+ elif stat == 'podn':
+ stat_plot_name = 'Probability of Detection of the Non-Event'
+ if line_type == 'CTC':
+ stat_values = fn_on/(fy_on + fn_on)
+ elif stat == 'faratio':
+ stat_plot_name = 'False Alarm Ratio'
+ if line_type == 'CTC':
+ stat_values = fy_on/(fy_on + fy_oy)
+ elif stat == 'csi' or stat == 'ts':
+ if stat == 'csi':
+ stat_plot_name = 'Critical Success Index'
+ elif stat == 'ts':
+ stat_plot_name = 'Threat Score'
+ if line_type == 'CTC':
+ stat_values = fy_oy/(fy_oy + fy_on + fn_oy)
+ elif stat == 'gss' or stat == 'ets':
+ if stat == 'gss':
+ stat_plot_name = 'Gilbert Skill Score'
+ elif stat == 'ets':
+ stat_plot_name = 'Equitable Threat Score'
+ if line_type == 'CTC':
+ C = ((fy_oy + fy_on)*(fy_oy + fn_oy))/total
+ stat_values = (fy_oy - C)/(fy_oy + fy_on+ fn_oy - C)
+ elif stat == 'hk' or stat == 'tss' or stat == 'pss':
+ if stat == 'hk':
+ stat_plot_name = 'Hanssen-Kuipers Discriminant'
+ elif stat == 'tss':
+ stat_plot_name = 'True Skill Score'
+ elif stat == 'pss':
+ stat_plot_name = 'Peirce Skill Score'
+ if line_type == 'CTC':
+ stat_values = (
+ ((fy_oy*fn_on)-(fy_on*fn_oy))/((fy_oy+fn_oy)*(fy_on+fn_on))
+ )
+ elif stat == 'hss':
+ stat_plot_name = 'Heidke Skill Score'
+ if line_type == 'CTC':
+ Ca = (fy_oy+fy_on)*(fy_oy+fn_oy)
+ Cb = (fn_oy+fn_on)*(fy_on+fn_on)
+ C = (Ca + Cb)/total
+ stat_values = (fy_oy + fn_on - C)/(total - C)
+ else:
+ logger.error(stat+" is not a valid option")
+ exit(1)
+ nindex = stat_values.index.nlevels
+ if stat == 'fbar_obar' or stat == 'orate_frate' or stat == 'baser_frate':
+ if nindex == 1:
+ index0 = len(stat_values_fbar.index.get_level_values(0).unique())
+ stat_values_array_fbar = (
+ np.ma.masked_invalid(
+ stat_values_fbar.values.reshape(index0)
+ )
+ )
+ index0 = len(stat_values_obar.index.get_level_values(0).unique())
+ stat_values_array_obar = (
+ np.ma.masked_invalid(
+ stat_values_obar.values.reshape(index0)
+ )
+ )
+ elif nindex == 2:
+ index0 = len(stat_values_fbar.index.get_level_values(0).unique())
+ index1 = len(stat_values_fbar.index.get_level_values(1).unique())
+ stat_values_array_fbar = (
+ np.ma.masked_invalid(
+ stat_values_fbar.values.reshape(index0,index1)
+ )
+ )
+ index0 = len(stat_values_obar.index.get_level_values(0).unique())
+ index1 = len(stat_values_obar.index.get_level_values(1).unique())
+ stat_values_array_obar = (
+ np.ma.masked_invalid(
+ stat_values_obar.values.reshape(index0,index1)
+ )
+ )
+ elif nindex == 3:
+ index0 = len(stat_values_fbar.index.get_level_values(0).unique())
+ index1 = len(stat_values_fbar.index.get_level_values(1).unique())
+ index2 = len(stat_values_fbar.index.get_level_values(2).unique())
+ stat_values_array_fbar = (
+ np.ma.masked_invalid(
+ stat_values_fbar.values.reshape(index0,index1,index2)
+ )
+ )
+ index0 = len(stat_values_obar.index.get_level_values(0).unique())
+ index1 = len(stat_values_obar.index.get_level_values(1).unique())
+ index2 = len(stat_values_obar.index.get_level_values(2).unique())
+ stat_values_array_obar = (
+ np.ma.masked_invalid(
+ stat_values_obar.values.reshape(index0,index1,index2)
+ )
+ )
+ stat_values_array = np.ma.array([stat_values_array_fbar,
+ stat_values_array_obar])
+ else:
+ if nindex == 1:
+ index0 = len(stat_values.index.get_level_values(0).unique())
+ stat_values_array = (
+ np.ma.masked_invalid(
+ stat_values.values.reshape(1,index0)
+ )
+ )
+ elif nindex == 2:
+ index0 = len(stat_values.index.get_level_values(0).unique())
+ index1 = len(stat_values.index.get_level_values(1).unique())
+ stat_values_array = (
+ np.ma.masked_invalid(
+ stat_values.values.reshape(1,index0,index1)
+ )
+ )
+ elif nindex == 3:
+ index0 = len(stat_values.index.get_level_values(0).unique())
+ index1 = len(stat_values.index.get_level_values(1).unique())
+ index2 = len(stat_values.index.get_level_values(2).unique())
+ stat_values_array = (
+ np.ma.masked_invalid(
+ stat_values.values.reshape(1,index0,index1,index2)
+ )
+ )
+ return stat_values, stat_values_array, stat_plot_name
+
+def get_lead_avg_file(stat, input_filename, fcst_lead, output_base_dir):
+ lead_avg_filename = stat + '_' + os.path.basename(input_filename) \
+ .replace('_dump_row.stat', '')
+ # if fcst_leadX is in filename, replace it with fcst_lead_avgs
+ # and add .txt to end of filename
+ if f'fcst_lead{fcst_lead}' in lead_avg_filename:
+ lead_avg_filename = (
+ lead_avg_filename.replace(f'fcst_lead{fcst_lead}',
+ 'fcst_lead_avgs')
+ )
+ lead_avg_filename += '.txt'
+
+ # if not, remove mention of forecast lead and
+ # add fcst_lead_avgs.txt to end of filename
+ elif 'fcst_lead_avgs' not in input_filename:
+ lead_avg_filename = lead_avg_filename.replace(f'fcst_lead{fcst_lead}',
+ '')
+ lead_avg_filename += '_fcst_lead_avgs.txt'
+
+ lead_avg_file = os.path.join(output_base_dir, 'data',
+ lead_avg_filename)
+ return lead_avg_file
+
+def get_ci_file(stat, input_filename, fcst_lead, output_base_dir, ci_method):
+ CI_filename = stat + '_' + os.path.basename(input_filename) \
+ .replace('_dump_row.stat', '')
+ # if fcst_leadX is in filename, replace it with fcst_lead_avgs
+ # and add .txt to end of filename
+ if f'fcst_lead{fcst_lead}' in CI_filename:
+ CI_filename = CI_filename.replace(f'fcst_lead{fcst_lead}',
+ 'fcst_lead_avgs')
+
+ # if not and fcst_lead_avgs isn't already in filename,
+ # remove mention of forecast lead and
+ # add fcst_lead_avgs.txt to end of filename
+ elif 'fcst_lead_avgs' not in CI_filename:
+ CI_filename = CI_filename.replace(f'fcst_lead{fcst_lead}',
+ '')
+ CI_filename += '_fcst_lead_avgs'
+
+ CI_filename += '_CI_' + ci_method + '.txt'
+
+ CI_file = os.path.join(output_base_dir, 'data',
+ CI_filename)
+ return CI_file
diff --git a/ush/hurricane/stats/tcgen_performance_diagram.py b/ush/hurricane/stats/tcgen_performance_diagram.py
new file mode 100755
index 0000000000..91ba5d893f
--- /dev/null
+++ b/ush/hurricane/stats/tcgen_performance_diagram.py
@@ -0,0 +1,159 @@
+#!/usr/bin/python
+
+###############################################################
+# This script defines and calls a function to create a #
+# blank performance diagram, as introduced in Roebber (2009). #
+# Commented lines 64-68 provide the syntax for plotting text #
+# and markers on the diagram. This script is provided as is. #
+# Author: Dan Halperin #
+###############################################################
+
+#Import necessary modules
+import numpy as np
+import matplotlib as mpl
+import matplotlib.pyplot as plt
+import sys
+import subprocess
+import os
+
+#Function definition
+def get_bias_label_position(bias_value, radius):
+ x = np.sqrt(np.power(radius, 2)/(np.power(bias_value, 2)+1))
+ y = np.sqrt(np.power(radius, 2) - np.power(x, 2))
+ return (x, y)
+
+def performance_diag():
+ #INPUT file with CTC line
+ CTCfile01 = os.environ['CTCfile01']
+ row = np.loadtxt(CTCfile01,dtype=str)
+ total = float(row[24])
+ hit = float(row[25])
+ falsea = float(row[26])
+ miss = float(row[27])
+ POD = hit/(hit + miss)
+ FAR = falsea/(hit + falsea)
+ SUR = 1.0 - FAR
+# print(POD)
+# print(FAR)
+# print(SUR)
+ POD1 = POD + 0.03
+
+ CTCfile02 = os.environ['CTCfile02']
+ xrow = np.loadtxt(CTCfile02,dtype=str)
+ xtotal = float(xrow[24])
+ xhit = float(xrow[25])
+ xfalsea = float(xrow[26])
+ xmiss = float(xrow[27])
+ xPOD = xhit/(xhit + xmiss)
+ xFAR = xfalsea/(xhit + xfalsea)
+ xSUR = 1.0 - xFAR
+ xPOD1 = xPOD + 0.03
+
+ CTCfile03 = os.environ['CTCfile03']
+ yrow = np.loadtxt(CTCfile03,dtype=str)
+ ytotal = float(yrow[24])
+ yhit = float(yrow[25])
+ yfalsea = float(yrow[26])
+ ymiss = float(yrow[27])
+ yPOD = yhit/(yhit + ymiss)
+ yFAR = yfalsea/(yhit + yfalsea)
+ ySUR = 1.0 - yFAR
+ yPOD1 = yPOD + 0.03
+
+ #Output file name
+ outf = "tcgen_performance_diagram.png"
+
+ #Define figure, plot axis
+ fig = plt.figure(figsize=(8,8))
+ ax = fig.add_axes([0.1, 0.1, 0.8, 0.8])
+
+ #Define range for x, y; create 2D array of x, y values
+ x = np.arange(0,1.01,0.01)
+ y = np.arange(0,1.01,0.01)
+ xi,yi = np.meshgrid(x, y)
+
+ #Calculate bias and CSI; set contour levels
+ bias = yi/xi
+ blevs = [0.1, 0.25, 0.5, 0.75, 1, 1.25, 2.5, 5, 10]
+ csi = 1/( (1/xi) + (1/yi) - 1 )
+ csilevs = np.arange(0.1,1,0.1)
+
+ #The second way-Calculate bias and CSI; set contour levels
+ grid_ticks = np.arange(0, 1.01, 0.01)
+ sr_g, pod_g = np.meshgrid(grid_ticks, grid_ticks)
+ biasp = pod_g / sr_g
+# csip = 1.0 / (1.0 / sr_g + 1.0 / pod_g - 1.0)
+# csi_cmap="Blues"
+# csi_contour = plt.contourf(sr_g, pod_g, csip, np.arange(0.1, 1.1, 0.1), extend="max", cmap=csi_cmap)
+# cbar = plt.colorbar(csi_contour)
+# csi_label="Critical Success Index"
+# cbar.set_label(csi_label, fontsize=14)
+
+ bias_contour_vals = [0.1, 0.3, 0.6, 1., 1.5, 3., 10.]
+ b_contour = plt.contour(sr_g, pod_g, biasp, bias_contour_vals,colors='gray', linestyles='--')
+ plt.clabel(
+ b_contour, fmt='%1.1f',
+ manual=[
+ get_bias_label_position(bias_value, .75)
+ for bias_value in bias_contour_vals
+ ]
+ )
+
+ #Axis labels, tickmarks
+ ax.set_xlabel('Success Ratio (1 - False Alarm Ratio)',fontsize=12,fontweight='bold')
+ ax.set_ylabel('Probability of Detection',fontsize=12,fontweight='bold')
+ ax.set_xticks(np.arange(0,1.1,0.1))
+ ax.set_yticks(np.arange(0,1.1,0.1))
+ plt.setp(ax.get_xticklabels(),fontsize=13)
+ plt.setp(ax.get_yticklabels(),fontsize=13)
+
+ #Second y-axis for bias values < 1
+# ax2 = ax.twinx()
+# ax2.set_yticks(blevs[0:5])
+# plt.setp(ax2.get_yticklabels(),fontsize=13)
+
+ #Axis labels for bias values > 1
+# ax.text(0.1,1.015,'10',fontsize=13,va='center',ha='center')
+# ax.text(0.2,1.015,'5',fontsize=13,va='center',ha='center')
+# ax.text(0.4,1.015,'2.5',fontsize=13,va='center',ha='center')
+# ax.text(0.8,1.015,'1.25',fontsize=13,va='center',ha='center')
+
+ #Plot bias and CSI lines at specified contour intervals
+# cbias = ax.contour(x,y,bias,blevs,colors='black',linewidths=1,linestyles='--')
+ ccsi = ax.contour(x,y,csi,csilevs,colors='gray',linewidths=1,linestyles='-')
+ plt.clabel(ccsi,csilevs,inline=True,fmt='%.1f',fontsize=10,colors='black')
+
+ #Test/sample markers/text
+ ax.plot(SUR,POD,marker='o',markersize=10,c='black')
+# ax.text(SUR,POD1,'GFS',fontsize=11,fontweight='bold',ha='center',va='center',color='black')
+
+ ax.plot(xSUR,xPOD,marker='o',markersize=10,c='red')
+# ax.text(xSUR,xPOD1,'ECMWF',fontsize=11,fontweight='bold',ha='center',va='center',color='red')
+
+ ax.plot(ySUR,yPOD,marker='o',markersize=10,c='blue')
+# ax.text(ySUR,yPOD1,'CMC',fontsize=11,fontweight='bold',ha='center',va='center',color='blue')
+
+ ax.plot(0.3,0.95,marker='o',markersize=6,c='black')
+ ax.text(0.35,0.95,'GFS',fontsize=6,fontweight='bold',ha='center',va='center',color='black')
+
+ ax.plot(0.5,0.95,marker='o',markersize=6,c='red')
+ ax.text(0.55,0.95,'ECMWF',fontsize=6,fontweight='bold',ha='center',va='center',color='red')
+
+ ax.plot(0.7,0.95,marker='o',markersize=6,c='blue')
+ ax.text(0.75,0.95,'CMC',fontsize=6,fontweight='bold',ha='center',va='center',color='blue')
+
+# title="Performance Diagram"
+# plt.title(title, fontsize=12, fontweight="bold")
+ TCGENdays = os.environ['TCGENdays']
+ plt.title(TCGENdays, fontsize=12, fontweight="bold")
+
+ #Save, close, and trim whitespace around plot
+# plt.savefig(outf,pad_inches=0.01, orientation='landscape')
+ plt.savefig(outf,dpi=160,bbox_inches='tight')
+ plt.close()
+# subprocess.call("convert -trim "+outf+" "+outf, shell=True)
+
+#Function call
+performance_diag()
+
+sys.exit()
diff --git a/ush/hurricane/stats/tcgen_space_al.py b/ush/hurricane/stats/tcgen_space_al.py
new file mode 100755
index 0000000000..d500bf85f9
--- /dev/null
+++ b/ush/hurricane/stats/tcgen_space_al.py
@@ -0,0 +1,115 @@
+##Python script to plot TC genesis/HITS
+
+from __future__ import print_function
+
+#import tkinter as tk
+import os
+import sys
+import glob
+import matplotlib
+matplotlib.use('Agg')
+import matplotlib.pyplot as plt
+import numpy as np
+import pandas as pd
+from ast import literal_eval
+import ast
+import datetime
+import matplotlib.path as mpath
+import matplotlib.ticker as mticker
+
+import cartopy
+import cartopy.crs as ccrs
+from cartopy.feature import GSHHSFeature
+from cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter
+import cartopy.feature as cfeature
+
+cartopyDataDir = os.environ['cartopyDataDir']
+cartopy.config['data_dir'] = cartopyDataDir
+
+# RE -------------------
+#Initialize the empty Plot (only need to call the figure to write the map.)
+fig = plt.figure()
+
+###FOR THE INTERSTED DOMAIN PART###############
+##e.g Alaska, CONUS etc
+## Map proj are different for different areas.
+domain = "atlantic"
+if(domain == "atlantic"):
+ ax = plt.axes(projection=ccrs.Miller(central_longitude=-55.0))
+# ax.set_extent([260, 370, -5, 65], crs=ccrs.PlateCarree())
+ ax.set_extent([260, 370, -5, 50], crs=ccrs.PlateCarree())
+
+##PLS NOTE THAT THE ax_extent is diff from tick marks and that is by design
+ xticks = [-130, -120, -110, -100, -90, -80, -70, -60, -50, -40, -30, -20, -10, 0, 10]
+# yticks = [-5, 0, 10, 20, 30, 40, 50, 60, 65]
+ yticks = [-5, 0, 10, 20, 30, 40, 50]
+ ax.gridlines(xlocs=xticks, ylocs=yticks,color='gray', alpha=0.9, linestyle='--')
+###Add topography, lakes, borders etc
+ ax.coastlines('10m',linewidth=0.30, color='black')
+ ax.add_feature(cfeature.GSHHSFeature('low', levels=[2],
+ facecolor='white'), color='black', linewidth=0.1)
+ land_10m = cfeature.NaturalEarthFeature('physical', 'land', '10m',
+ edgecolor='face',
+ facecolor='None')
+ ax.add_feature(cfeature.LAKES)
+ ax.add_feature(cfeature.BORDERS)
+ ax.add_feature(land_10m)
+
+########This part is a roundabout
+###for the known issues with
+###cartopy LCC/Northpolarstero projection.
+ plt.annotate("0 " , (0,0), (-6,18), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("10N " , (0,0), (-15,52), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("20N " , (0,0), (-15,85), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("30N " , (0,0), (-15,120), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("40N " , (0,0), (-15,157), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+# plt.annotate("50N " , (0,0), (-15,198), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+# plt.annotate("60N " , (0,0), (-15,245), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+
+ plt.annotate("90W " , (0,0), (27,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("80W " , (0,0), (60,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("70W " , (0,0), (92,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("60W " , (0,0), (125,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("50W " , (0,0), (157,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("40W " , (0,0), (189,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("30W " , (0,0), (220,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("20W " , (0,0), (253,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("10W " , (0,0), (286,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("0 " , (0,0), (323,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+
+######################atlantic##################################################
+ hitfile = os.environ['hitfile']
+ hits = np.loadtxt(hitfile,dtype=str)
+ numhits = 0
+ numhits = numhits + len(hits)
+ #Plot each hit event
+ for i in range(len(hits)):
+ lat = float(hits[i,31])
+ lon = float(hits[i,32]) + 360.
+ plt.scatter(lon, lat,transform=ccrs.PlateCarree(), marker='o', color='green',s=12, facecolor='none')
+
+ falsefile = os.environ['falsefile']
+ fals = np.loadtxt(falsefile,dtype=str)
+ numfals = 0
+ numfals = numfals + len(fals)
+ #Plot each fals event
+ for i in range(len(fals)):
+ lat1 = float(fals[i,31])
+ lon1 = float(fals[i,32]) + 360.
+ plt.scatter(lon1, lat1,transform=ccrs.PlateCarree(), marker='s', color='red',s=12, facecolor='none')
+
+ plt.scatter(346, 47.5,transform=ccrs.PlateCarree(), marker='o', color='green',s=12, facecolor='none')
+ plt.scatter(346, 45,transform=ccrs.PlateCarree(), marker='s', color='red',s=12, facecolor='none')
+ plt.annotate("Hits ("+str(numhits)+")", (0,0), (286,186), xycoords='axes fraction', textcoords='offset points', va='top', color='Green', fontsize=6.5)
+ plt.annotate("False alarms ("+str(numfals)+")", (0,0), (286,177), xycoords='axes fraction', textcoords='offset points', va='top', color='Red', fontsize=6.5)
+
+ TCGENdays = os.environ['TCGENdays']
+ plt.title(TCGENdays)
+####################################################################
+##The plt is saved as png and converted to gif in the bash script.
+
+plt.savefig('TC_genesis.png', dpi=160,bbox_inches='tight')
+
+exit
+ # ----------------------------
+
diff --git a/ush/hurricane/stats/tcgen_space_ep.py b/ush/hurricane/stats/tcgen_space_ep.py
new file mode 100755
index 0000000000..6c5a493aea
--- /dev/null
+++ b/ush/hurricane/stats/tcgen_space_ep.py
@@ -0,0 +1,112 @@
+##Python script to plot TC genesis HITS
+
+from __future__ import print_function
+
+#import tkinter as tk
+import os
+import sys
+import glob
+import matplotlib
+matplotlib.use('Agg')
+import matplotlib.pyplot as plt
+import numpy as np
+import pandas as pd
+from ast import literal_eval
+import ast
+import datetime
+import matplotlib.path as mpath
+import matplotlib.ticker as mticker
+
+import cartopy
+import cartopy.crs as ccrs
+from cartopy.feature import GSHHSFeature
+from cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter
+import cartopy.feature as cfeature
+
+cartopyDataDir = os.environ['cartopyDataDir']
+cartopy.config['data_dir'] = cartopyDataDir
+
+# RE -------------------
+#Initialize the empty Plot (only need to call the figure to write the map.)
+fig = plt.figure()
+
+domain = "eastpac"
+if(domain == "eastpac"):
+ ax = plt.axes(projection=ccrs.Miller(central_longitude=-60.0))
+# ax.set_extent([180, 290, 0, 60], crs=ccrs.PlateCarree())
+ ax.set_extent([180, 290, 0, 50], crs=ccrs.PlateCarree())
+##PLS NOTE THAT THE ax_extent is diff from tick marks and that is by design
+ xticks = [-70, -80, -90, -100, -110, -120, -130, -140, -150, -160, -170, -180]
+# yticks = [ 0, 10, 20, 30, 40, 50, 60]
+ yticks = [ 0, 10, 20, 30, 40, 50]
+ ax.gridlines(xlocs=xticks, ylocs=yticks,color='gray', alpha=0.9, linestyle='--')
+###Add topography, lakes, borders etc
+ ax.coastlines('10m',linewidth=0.30, color='black')
+ ax.add_feature(cfeature.GSHHSFeature('low', levels=[2],
+ facecolor='white'), color='black', linewidth=0.1)
+ land_10m = cfeature.NaturalEarthFeature('physical', 'land', '10m',
+ edgecolor='face',
+ facecolor='None')
+ ax.add_feature(cfeature.LAKES)
+ ax.add_feature(cfeature.BORDERS)
+ ax.add_feature(land_10m)
+########This part is a roundabout
+###for the known issues with
+###cartopy LCC/Northpolarstero projection.
+# plt.annotate("50N " , (0,0), (-15,180), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("40N " , (0,0), (-15,140), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("30N " , (0,0), (-15,104), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("20N " , (0,0), (-15,69), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("10N " , (0,0), (-15,35), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+
+ plt.annotate("80W " , (0,0), (318,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("90W " , (0,0), (286,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("100W " , (0,0), (250,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("110W " , (0,0), (218,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("120W " , (0,0), (185,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("130W " , (0,0), (152,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("140W " , (0,0), (119,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("150W " , (0,0), (87,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("160W " , (0,0), (55,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("170W " , (0,0), (23,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+
+###############END OF AREAS##################################
+ hitfile = os.environ['hitfile']
+ hits = np.loadtxt(hitfile,dtype=str)
+
+ numhits = 0
+ numhits = numhits + len(hits)
+ #Plot each hit event
+ for i in range(len(hits)):
+ lat = float(hits[i,31])
+ lon = float(hits[i,32]) + 360.
+ print(lat)
+ print(lon)
+ plt.scatter(lon, lat,transform=ccrs.PlateCarree(), marker='o', color='green',s=12, facecolor='none')
+
+ falsefile = os.environ['falsefile']
+ fals = np.loadtxt(falsefile,dtype=str)
+ numfals = 0
+ numfals = numfals + len(fals)
+ #Plot each fals event
+ for i in range(len(fals)):
+ lat1 = float(fals[i,31])
+ lon1 = float(fals[i,32]) + 360.
+ plt.scatter(lon1, lat1,transform=ccrs.PlateCarree(), marker='s', color='red',s=12, facecolor='none')
+
+ plt.scatter(185, 47,transform=ccrs.PlateCarree(), marker='o', color='green',s=12, facecolor='none')
+ plt.scatter(185, 45,transform=ccrs.PlateCarree(), marker='s', color='red',s=12, facecolor='none')
+ plt.annotate("Hits ("+str(numhits)+")", (0,0), (20,168), xycoords='axes fraction', textcoords='offset points', va='top', color='Green', fontsize=6.5)
+ plt.annotate("False alarms ("+str(numfals)+")", (0,0), (20,160), xycoords='axes fraction', textcoords='offset points', va='top', color='Red', fontsize=6.5)
+
+# plt.title(f"East Pacific TC Genesis Hits")
+ TCGENdays = os.environ['TCGENdays']
+ plt.title(TCGENdays)
+####################################################################
+##The plt is saved as png and converted to gif in the bash script.
+
+plt.savefig('TC_genesis.png', dpi=160,bbox_inches='tight')
+
+exit
+ # ----------------------------
+
diff --git a/ush/hurricane/stats/tcgen_space_wp.py b/ush/hurricane/stats/tcgen_space_wp.py
new file mode 100755
index 0000000000..d8e653da9c
--- /dev/null
+++ b/ush/hurricane/stats/tcgen_space_wp.py
@@ -0,0 +1,112 @@
+##Python script to plot TC genesis HITS
+
+from __future__ import print_function
+
+#import tkinter as tk
+import os
+import sys
+import glob
+import matplotlib
+matplotlib.use('Agg')
+import matplotlib.pyplot as plt
+import numpy as np
+import pandas as pd
+from ast import literal_eval
+import ast
+import datetime
+import matplotlib.path as mpath
+import matplotlib.ticker as mticker
+
+import cartopy
+import cartopy.crs as ccrs
+from cartopy.feature import GSHHSFeature
+from cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter
+import cartopy.feature as cfeature
+
+cartopyDataDir = os.environ['cartopyDataDir']
+cartopy.config['data_dir'] = cartopyDataDir
+
+# RE -------------------
+#Initialize the empty Plot (only need to call the figure to write the map.)
+fig = plt.figure()
+
+domain = "westpac"
+if(domain == "westpac"):
+ ax = plt.axes(projection=ccrs.Miller(central_longitude=80.0))
+# ax.set_extent([100, 180, 0, 60], crs=ccrs.PlateCarree())
+ ax.set_extent([100, 180, 0, 50], crs=ccrs.PlateCarree())
+
+##PLS NOTE THAT THE ax_extent is diff from tick marks and that is by design
+ xticks = [100, 110, 120, 130, 140, 150, 160, 170, 180]
+# yticks = [0, 10, 20, 30, 40, 50, 60]
+ yticks = [0, 10, 20, 30, 40, 50]
+ ax.gridlines(xlocs=xticks, ylocs=yticks,color='gray', alpha=0.9, linestyle='--')
+###Add topography, lakes, borders etc
+ ax.coastlines('10m',linewidth=0.80, color='black')
+ ax.add_feature(cfeature.GSHHSFeature('low', levels=[2],
+ facecolor='white'), color=cfeature.COLORS['water']
+, linewidth=0.1)
+ land_10m = cfeature.NaturalEarthFeature('physical', 'land', '10m',
+ edgecolor='face',
+ facecolor='None')
+ ax.add_feature(cfeature.LAKES)
+ ax.add_feature(cfeature.BORDERS)
+ ax.add_feature(land_10m)
+
+########This part is a roundabout
+###for the known issues with
+###cartopy LCC/Northpolarstero projection.
+# plt.annotate("50N " , (0,0), (-15,215), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("40N " , (0,0), (-15,191), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("30N " , (0,0), (-15,141), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("20N " , (0,0), (-15,94), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("10N " , (0,0), (-15,47), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+
+ plt.annotate("110E " , (0,0), (36,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("120E " , (0,0), (83,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("130E " , (0,0), (128,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("140E " , (0,0), (171,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("150E " , (0,0), (216,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("160E " , (0,0), (262,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+ plt.annotate("170E " , (0,0), (305,-5), xycoords='axes fraction', textcoords='offset points', va='top', color='Black', fontsize=6.5)
+
+######################asia##################################################
+ hitfile = os.environ['hitfile']
+ hits = np.loadtxt(hitfile,dtype=str)
+
+ numhits = 0
+ numhits = numhits + len(hits)
+ #Plot each hit event
+ for i in range(len(hits)):
+ lat = float(hits[i,31])
+ lon = float(hits[i,32]) + 360.
+ print(lat)
+ print(lon)
+ plt.scatter(lon, lat,transform=ccrs.PlateCarree(), marker='o', color='green',s=12, facecolor='none')
+
+ falsefile = os.environ['falsefile']
+ fals = np.loadtxt(falsefile,dtype=str)
+ numfals = 0
+ numfals = numfals + len(fals)
+ #Plot each fals event
+ for i in range(len(fals)):
+ lat1 = float(fals[i,31])
+ lon1 = float(fals[i,32]) + 360.
+ plt.scatter(lon1, lat1,transform=ccrs.PlateCarree(), marker='s', color='red',s=12, facecolor='none')
+
+ plt.scatter(160, 47,transform=ccrs.PlateCarree(), marker='o', color='green',s=12, facecolor='none')
+ plt.scatter(160, 45.5,transform=ccrs.PlateCarree(), marker='s', color='red',s=12, facecolor='none')
+ plt.annotate("Hits ("+str(numhits)+")", (0,0), (272,230), xycoords='axes fraction', textcoords='offset points', va='top', color='Green', fontsize=6.5)
+ plt.annotate("False alarms ("+str(numfals)+")", (0,0), (272,221), xycoords='axes fraction', textcoords='offset points', va='top', color='Red', fontsize=6.5)
+
+# plt.title(f"West Pacific TC Genesis Hits")
+ TCGENdays = os.environ['TCGENdays']
+ plt.title(TCGENdays)
+####################################################################
+##The plt is saved as png and converted to gif in the bash script.
+
+plt.savefig('TC_genesis.png', dpi=160,bbox_inches='tight')
+
+exit
+ # ----------------------------
+
diff --git a/versions/run.ver b/versions/run.ver
old mode 100644
new mode 100755
index e933d124b6..d6deb5c39d
--- a/versions/run.ver
+++ b/versions/run.ver
@@ -1,31 +1,20 @@
-export evs_ver=1.0
+export evs_ver=1.0.0
-export PrgEnvintel_ver=8.1.0
-export craype_ver=2.7.8
export intel_ver=19.1.3.304
-export craympich_ver=8.1.7
-export craypals_ver=1.0.12
-export wgrib2_ver=2.0.8
-export grib_util_ver=1.2.2
-export bufr_dump_ver=1.0.0
+export gsl_ver=2.7
export python_ver=3.8.6
export netcdf_ver=4.7.4
-export met_ver=10.1.0
-export metplus_ver=4.1.0
+#export met_ver=10.1.1
+#export metplus_ver=4.1.1
+export met_ver=10.1.2
+export metplus_ver=4.1.4
-export nam_ver=v4.2
-export sref_ver=v7.1
-export rap_ver=v5.1
-export aqm_ver=v6.1
-export hrrr_ver=v4.1
-export gfs_ver=v16.2
-export gefs_ver=v12.2
-export hiresw_ver=v8.1
-export narre_ver=v1.2
-export rtma_ver=v2.9
-export urma_ver=v2.9
-export hourly_ver=v0.0
+export libjpeg_ver=9c
+export grib_util_ver=1.2.2
+export prod_util_ver=2.0.13
+export prod_envir_ver=2.0.6
-export COMPATH=/lfs/h1/ops/prod/com/gfs/:/lfs/h1/ops/prod/com/hrrr/:/lfs/h1/ops/prod/com/rtma/:/lfs/h1/ops/prod/com/aqm/:/lfs/h1/ops/prod/com/gefs/:/lfs/h1/ops/prod/com/nam/:/lfs/h1/ops/prod/com/urma/:/lfs/h1/ops/prod/com/narre/:/lfs/h1/ops/prod/com/hiresw/:/lfs/h1/ops/prod/com/rap/:/lfs/h1/ops/prod/com/sref/:/lfs/h1/ops/prod/com/hourly/
-export DATAFS=h1
-export DCOMROOT=/lfs/h1/ops/prod/dcom
+export geos_ver=3.8.1
+export proj_ver=7.1.0
+export libfabric_ver=1.11.0.0.
+export imagemagick_ver=7.0.8-7