diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index c147a4ef79..3550a30a5c 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -167,7 +167,7 @@ jobs: strategy: matrix: tests: - - 'ascii2nc_indy pb2nc_indy tc_dland tc_pairs tc_stat plot_tc tc_rmw rmw_analysis tc_gen' + - 'ascii2nc_indy pb2nc_indy tc_dland tc_pairs tc_stat plot_tc tc_rmw rmw_analysis tc_diag tc_gen' - 'met_test_scripts mode_multivar mode_graphics mtd regrid airnow gsi_tools netcdf modis series_analysis gen_ens_prod wwmca_regrid gen_vx_mask grid_weight interp_shape grid_diag grib_tables lidar2nc shift_data_plane trmm2nc aeronet wwmca_plot ioda2nc gaussian' fail-fast: false steps: diff --git a/INSTALL b/INSTALL index 82fb383f4d..14000719f8 100644 --- a/INSTALL +++ b/INSTALL @@ -219,6 +219,7 @@ sub-directory, or the MET Online Tutorial: - series_analysis - shift_data_plane - stat_analysis + - tc_diag - tc_dland - tc_gen - tc_pairs diff --git a/README.md b/README.md index bb6e08f57e..a91bcc55ba 100644 --- a/README.md +++ b/README.md @@ -9,3 +9,5 @@ Model Evaluation Tools (MET) Repository This repository contains the source code for the Model Evaluation Tools package. Please see the [MET website](https://dtcenter.org/community-code/model-evaluation-tools-met) and the [MET User's Guide](https://met.readthedocs.io/en/latest) for more information. Support for the METplus components is provided through the [METplus Discussions](https://github.com/dtcenter/METplus/discussions) forum. Users are welcome and encouraged to answer or address each other's questions there! For more information, please read "[Welcome to the METplus Components Discussions](https://github.com/dtcenter/METplus/discussions/939)". + +For information about the support provided for releases, see our [Release Support Policy](https://metplus.readthedocs.io/en/develop/Release_Guide/index.html#release-support-policy). diff --git a/configure b/configure index 12f9307754..7f4e796d1c 100755 --- a/configure +++ b/configure @@ -8877,7 +8877,7 @@ done # Create configured files -ac_config_files="$ac_config_files Makefile scripts/Rscripts/Makefile scripts/Rscripts/include/Makefile scripts/python/Makefile scripts/python/examples/Makefile scripts/python/met/Makefile scripts/python/pyembed/Makefile scripts/python/utility/Makefile data/Makefile data/climo/Makefile data/climo/seeps/Makefile data/colortables/Makefile data/colortables/NCL_colortables/Makefile data/config/Makefile data/map/Makefile data/map/admin_by_country/Makefile data/poly/Makefile data/poly/HMT_masks/Makefile data/poly/NCEP_masks/Makefile data/ps/Makefile data/table_files/Makefile data/tc_data/Makefile src/Makefile src/basic/Makefile src/basic/enum_to_string/Makefile src/basic/vx_cal/Makefile src/basic/vx_config/Makefile src/basic/vx_log/Makefile src/basic/vx_math/Makefile src/basic/vx_util/Makefile src/basic/vx_util_math/Makefile src/libcode/Makefile src/libcode/vx_afm/Makefile src/libcode/vx_analysis_util/Makefile src/libcode/vx_color/Makefile src/libcode/vx_data2d/Makefile src/libcode/vx_data2d_factory/Makefile src/libcode/vx_data2d_grib/Makefile src/libcode/vx_data2d_grib2/Makefile src/libcode/vx_data2d_nc_met/Makefile src/libcode/vx_data2d_nc_pinterp/Makefile src/libcode/vx_data2d_nccf/Makefile src/libcode/vx_geodesy/Makefile src/libcode/vx_gis/Makefile src/libcode/vx_gnomon/Makefile src/libcode/vx_grid/Makefile src/libcode/vx_gsl_prob/Makefile src/libcode/vx_nav/Makefile src/libcode/vx_solar/Makefile src/libcode/vx_nc_obs/Makefile src/libcode/vx_nc_util/Makefile src/libcode/vx_pb_util/Makefile src/libcode/vx_plot_util/Makefile src/libcode/vx_ps/Makefile src/libcode/vx_pxm/Makefile src/libcode/vx_render/Makefile src/libcode/vx_shapedata/Makefile src/libcode/vx_stat_out/Makefile src/libcode/vx_statistics/Makefile src/libcode/vx_time_series/Makefile src/libcode/vx_physics/Makefile src/libcode/vx_series_data/Makefile src/libcode/vx_regrid/Makefile src/libcode/vx_tc_util/Makefile src/libcode/vx_summary/Makefile src/libcode/vx_python3_utils/Makefile src/libcode/vx_data2d_python/Makefile src/libcode/vx_bool_calc/Makefile src/libcode/vx_pointdata_python/Makefile src/libcode/vx_seeps/Makefile src/tools/Makefile src/tools/core/Makefile src/tools/core/ensemble_stat/Makefile src/tools/core/grid_stat/Makefile src/tools/core/mode/Makefile src/tools/core/mode_analysis/Makefile src/tools/core/pcp_combine/Makefile src/tools/core/point_stat/Makefile src/tools/core/series_analysis/Makefile src/tools/core/stat_analysis/Makefile src/tools/core/wavelet_stat/Makefile src/tools/other/Makefile src/tools/other/ascii2nc/Makefile src/tools/other/lidar2nc/Makefile src/tools/other/gen_ens_prod/Makefile src/tools/other/gen_vx_mask/Makefile src/tools/other/gis_utils/Makefile src/tools/other/ioda2nc/Makefile src/tools/other/madis2nc/Makefile src/tools/other/mode_graphics/Makefile src/tools/other/modis_regrid/Makefile src/tools/other/pb2nc/Makefile src/tools/other/plot_data_plane/Makefile src/tools/other/plot_point_obs/Makefile src/tools/other/wwmca_tool/Makefile src/tools/other/gsi_tools/Makefile src/tools/other/regrid_data_plane/Makefile src/tools/other/point2grid/Makefile src/tools/other/shift_data_plane/Makefile src/tools/other/mode_time_domain/Makefile src/tools/other/grid_diag/Makefile src/tools/tc_utils/Makefile src/tools/tc_utils/tc_dland/Makefile src/tools/tc_utils/tc_pairs/Makefile src/tools/tc_utils/tc_stat/Makefile src/tools/tc_utils/tc_gen/Makefile src/tools/tc_utils/rmw_analysis/Makefile src/tools/tc_utils/tc_rmw/Makefile" +ac_config_files="$ac_config_files Makefile scripts/Rscripts/Makefile scripts/Rscripts/include/Makefile scripts/python/Makefile scripts/python/examples/Makefile scripts/python/met/Makefile scripts/python/pyembed/Makefile scripts/python/utility/Makefile scripts/python/tc_diag/Makefile data/Makefile data/climo/Makefile data/climo/seeps/Makefile data/colortables/Makefile data/colortables/NCL_colortables/Makefile data/config/Makefile data/map/Makefile data/map/admin_by_country/Makefile data/poly/Makefile data/poly/HMT_masks/Makefile data/poly/NCEP_masks/Makefile data/ps/Makefile data/table_files/Makefile data/tc_data/Makefile src/Makefile src/basic/Makefile src/basic/enum_to_string/Makefile src/basic/vx_cal/Makefile src/basic/vx_config/Makefile src/basic/vx_log/Makefile src/basic/vx_math/Makefile src/basic/vx_util/Makefile src/basic/vx_util_math/Makefile src/libcode/Makefile src/libcode/vx_afm/Makefile src/libcode/vx_analysis_util/Makefile src/libcode/vx_color/Makefile src/libcode/vx_data2d/Makefile src/libcode/vx_data2d_factory/Makefile src/libcode/vx_data2d_grib/Makefile src/libcode/vx_data2d_grib2/Makefile src/libcode/vx_data2d_nc_met/Makefile src/libcode/vx_data2d_nc_pinterp/Makefile src/libcode/vx_data2d_nccf/Makefile src/libcode/vx_geodesy/Makefile src/libcode/vx_gis/Makefile src/libcode/vx_gnomon/Makefile src/libcode/vx_grid/Makefile src/libcode/vx_gsl_prob/Makefile src/libcode/vx_nav/Makefile src/libcode/vx_solar/Makefile src/libcode/vx_nc_obs/Makefile src/libcode/vx_nc_util/Makefile src/libcode/vx_pb_util/Makefile src/libcode/vx_plot_util/Makefile src/libcode/vx_ps/Makefile src/libcode/vx_pxm/Makefile src/libcode/vx_render/Makefile src/libcode/vx_shapedata/Makefile src/libcode/vx_stat_out/Makefile src/libcode/vx_statistics/Makefile src/libcode/vx_time_series/Makefile src/libcode/vx_physics/Makefile src/libcode/vx_series_data/Makefile src/libcode/vx_regrid/Makefile src/libcode/vx_tc_util/Makefile src/libcode/vx_summary/Makefile src/libcode/vx_python3_utils/Makefile src/libcode/vx_data2d_python/Makefile src/libcode/vx_bool_calc/Makefile src/libcode/vx_pointdata_python/Makefile src/libcode/vx_seeps/Makefile src/tools/Makefile src/tools/core/Makefile src/tools/core/ensemble_stat/Makefile src/tools/core/grid_stat/Makefile src/tools/core/mode/Makefile src/tools/core/mode_analysis/Makefile src/tools/core/pcp_combine/Makefile src/tools/core/point_stat/Makefile src/tools/core/series_analysis/Makefile src/tools/core/stat_analysis/Makefile src/tools/core/wavelet_stat/Makefile src/tools/other/Makefile src/tools/other/ascii2nc/Makefile src/tools/other/lidar2nc/Makefile src/tools/other/gen_ens_prod/Makefile src/tools/other/gen_vx_mask/Makefile src/tools/other/gis_utils/Makefile src/tools/other/ioda2nc/Makefile src/tools/other/madis2nc/Makefile src/tools/other/mode_graphics/Makefile src/tools/other/modis_regrid/Makefile src/tools/other/pb2nc/Makefile src/tools/other/plot_data_plane/Makefile src/tools/other/plot_point_obs/Makefile src/tools/other/wwmca_tool/Makefile src/tools/other/gsi_tools/Makefile src/tools/other/regrid_data_plane/Makefile src/tools/other/point2grid/Makefile src/tools/other/shift_data_plane/Makefile src/tools/other/mode_time_domain/Makefile src/tools/other/grid_diag/Makefile src/tools/tc_utils/Makefile src/tools/tc_utils/tc_dland/Makefile src/tools/tc_utils/tc_pairs/Makefile src/tools/tc_utils/tc_stat/Makefile src/tools/tc_utils/tc_gen/Makefile src/tools/tc_utils/rmw_analysis/Makefile src/tools/tc_utils/tc_rmw/Makefile src/tools/tc_utils/tc_diag/Makefile" if test -n "$MET_DEVELOPMENT"; then @@ -9769,6 +9769,7 @@ do "scripts/python/met/Makefile") CONFIG_FILES="$CONFIG_FILES scripts/python/met/Makefile" ;; "scripts/python/pyembed/Makefile") CONFIG_FILES="$CONFIG_FILES scripts/python/pyembed/Makefile" ;; "scripts/python/utility/Makefile") CONFIG_FILES="$CONFIG_FILES scripts/python/utility/Makefile" ;; + "scripts/python/tc_diag/Makefile") CONFIG_FILES="$CONFIG_FILES scripts/python/tc_diag/Makefile" ;; "data/Makefile") CONFIG_FILES="$CONFIG_FILES data/Makefile" ;; "data/climo/Makefile") CONFIG_FILES="$CONFIG_FILES data/climo/Makefile" ;; "data/climo/seeps/Makefile") CONFIG_FILES="$CONFIG_FILES data/climo/seeps/Makefile" ;; @@ -9869,6 +9870,7 @@ do "src/tools/tc_utils/tc_gen/Makefile") CONFIG_FILES="$CONFIG_FILES src/tools/tc_utils/tc_gen/Makefile" ;; "src/tools/tc_utils/rmw_analysis/Makefile") CONFIG_FILES="$CONFIG_FILES src/tools/tc_utils/rmw_analysis/Makefile" ;; "src/tools/tc_utils/tc_rmw/Makefile") CONFIG_FILES="$CONFIG_FILES src/tools/tc_utils/tc_rmw/Makefile" ;; + "src/tools/tc_utils/tc_diag/Makefile") CONFIG_FILES="$CONFIG_FILES src/tools/tc_utils/tc_diag/Makefile" ;; "src/tools/dev_utils/Makefile") CONFIG_FILES="$CONFIG_FILES src/tools/dev_utils/Makefile" ;; "src/tools/dev_utils/shapefiles/Makefile") CONFIG_FILES="$CONFIG_FILES src/tools/dev_utils/shapefiles/Makefile" ;; "internal/test_util/Makefile") CONFIG_FILES="$CONFIG_FILES internal/test_util/Makefile" ;; diff --git a/configure.ac b/configure.ac index e54e5ea3b3..28744a1f22 100644 --- a/configure.ac +++ b/configure.ac @@ -1211,6 +1211,7 @@ AC_CONFIG_FILES([Makefile scripts/python/met/Makefile scripts/python/pyembed/Makefile scripts/python/utility/Makefile + scripts/python/tc_diag/Makefile data/Makefile data/climo/Makefile data/climo/seeps/Makefile @@ -1310,7 +1311,8 @@ AC_CONFIG_FILES([Makefile src/tools/tc_utils/tc_stat/Makefile src/tools/tc_utils/tc_gen/Makefile src/tools/tc_utils/rmw_analysis/Makefile - src/tools/tc_utils/tc_rmw/Makefile]) + src/tools/tc_utils/tc_rmw/Makefile + src/tools/tc_utils/tc_diag/Makefile]) if test -n "$MET_DEVELOPMENT"; then AC_CONFIG_FILES([src/tools/dev_utils/Makefile diff --git a/data/config/Ascii2NcConfig_default b/data/config/Ascii2NcConfig_default index 4313b37379..b473cb1757 100644 --- a/data/config/Ascii2NcConfig_default +++ b/data/config/Ascii2NcConfig_default @@ -37,12 +37,15 @@ message_type_map = [ { key = "FM-12 SYNOP"; val = "ADPSFC"; }, { key = "FM-13 SHIP"; val = "SFCSHP"; }, { key = "FM-15 METAR"; val = "ADPSFC"; }, + { key = "FM-16 SPECI"; val = "ADPSFC"; }, { key = "FM-18 BUOY"; val = "SFCSHP"; }, + { key = "FM-18X BUOY"; val = "SFCSHP"; }, { key = "FM-281 QSCAT"; val = "ASCATW"; }, { key = "FM-32 PILOT"; val = "ADPUPA"; }, { key = "FM-35 TEMP"; val = "ADPUPA"; }, { key = "FM-88 SATOB"; val = "SATWND"; }, - { key = "FM-97 ACARS"; val = "AIRCFT"; } + { key = "FM-97 ACARS"; val = "AIRCFT"; }, + { key = "FM-97 AMDAR"; val = "AIRCFT"; } ]; // diff --git a/data/config/Makefile.am b/data/config/Makefile.am index 6766b264dc..8c3e1934de 100644 --- a/data/config/Makefile.am +++ b/data/config/Makefile.am @@ -38,6 +38,7 @@ config_DATA = \ STATAnalysisConfig_default \ STATAnalysisConfig_GO_Index \ STATAnalysisConfig_CBS_Index \ + TCDiagConfig_default \ TCPairsConfig_default \ TCRMWConfig_default \ RMWAnalysisConfig_default \ diff --git a/data/config/Makefile.in b/data/config/Makefile.in index eaad379a8e..661063c4d0 100644 --- a/data/config/Makefile.in +++ b/data/config/Makefile.in @@ -315,6 +315,7 @@ config_DATA = \ STATAnalysisConfig_default \ STATAnalysisConfig_GO_Index \ STATAnalysisConfig_CBS_Index \ + TCDiagConfig_default \ TCPairsConfig_default \ TCRMWConfig_default \ RMWAnalysisConfig_default \ diff --git a/data/config/TCDiagConfig_default b/data/config/TCDiagConfig_default new file mode 100644 index 0000000000..2f4adaa86d --- /dev/null +++ b/data/config/TCDiagConfig_default @@ -0,0 +1,150 @@ +//////////////////////////////////////////////////////////////////////////////// +// +// TC-Diag configuration file. +// +// For additional information, please see the MET User's Guide. +// +//////////////////////////////////////////////////////////////////////////////// + +// +// Filter input track data lines. +// + +// +// Model +// +model = []; + +// +// Storm identifier +// +storm_id = ""; + +// +// Basin +// +basin = ""; + +// +// Cyclone number +// +cyclone = ""; + +// +// Model initialization time +// +init_inc = ""; + +// +// Subset by the valid time +// +valid_beg = ""; +valid_end = ""; +valid_inc = []; +valid_exc = []; + +// +// Subset by the valid hour and lead time. +// +valid_hour = []; +lead = []; + +//////////////////////////////////////////////////////////////////////////////// + +// +// Python diagnostic scripts to be run +// May be set separately in each "domain_info" entry +// +diag_script = [ "MET_BASE/python/tc_diag/compute_tc_diagnostics.py" ]; + +// +// Domain-specific cylindrical coordinate transformation +// +domain_info = [ + { + domain = "parent"; + n_range = 150; + n_azimuth = 8; + delta_range_km = 10.0; + }, + { + domain = "nest"; + n_range = 150; + n_azimuth = 8; + delta_range_km = 2.0; + } +]; + +//////////////////////////////////////////////////////////////////////////////// + +// +// Data censoring and conversion +// May be set separately in each diag_data "field" entry +// +// censor_thresh = []; +// censor_val = []; +// convert(x) = x; +// + +// +// Data fields +// +data = { + + // If empty, the field is processed for all domains + domain = []; + + // Pressure levels to be used, unless overridden below + level = [ "P1000", "P925", "P850", "P700", "P500", + "P400", "P300", "P250", "P200", "P150", + "P100" ]; + + field = [ + { name = "TMP"; }, + { name = "UGRD"; }, + { name = "VGRD"; }, + { name = "RH"; }, + { name = "HGT"; }, + { name = "PRMSL"; level = "Z0"; }, + { name = "PWAT"; level = "L0"; }, + { name = "TMP"; level = "Z0"; }, + { name = "TMP"; level = "Z2"; }, + { name = "RH"; level = "Z2"; }, + { name = "UGRD"; level = "Z10"; }, + { name = "VGRD"; level = "Z10"; } + ]; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Regridding options +// +regrid = { + method = NEAREST; + width = 1; + vld_thresh = 0.5; + shape = SQUARE; +} + +// +// Vortex removal flag +// +vortex_removal = FALSE; + +//////////////////////////////////////////////////////////////////////////////// + +// +// Flags to control output files +// +nc_rng_azi_flag = TRUE; +nc_diag_flag = FALSE; +cira_diag_flag = FALSE; + +//////////////////////////////////////////////////////////////////////////////// + +tmp_dir = "/tmp"; +output_prefix = ""; +version = "V11.1.0"; + +//////////////////////////////////////////////////////////////////////////////// diff --git a/data/table_files/grib2_mrms.txt b/data/table_files/grib2_mrms.txt index 0b00086744..0b539859e0 100644 --- a/data/table_files/grib2_mrms.txt +++ b/data/table_files/grib2_mrms.txt @@ -3,6 +3,7 @@ GRIB2 209 10 0 255 161 1 2 1 "NLDN_CG_005min_AvgDensity" "CG Lightning Density 5-min - NLDN" "flashes/km^2/min" 209 10 0 255 161 1 2 2 "NLDN_CG_015min_AvgDensity" "CG Lightning Density 15-min - NLDN" "flashes/km^2/min" 209 10 0 255 161 1 2 3 "NLDN_CG_030min_AvgDensity" "CG Lightning Density 30-min - NLDN" "flashes/km^2/min" +209 10 0 255 161 1 2 4 "LightningProbabilityNext30min" "Lightning Probability 0-30 minutes - NLDN" "%" 209 10 0 255 161 1 2 5 "LightningProbabilityNext30minGrid" "Lightning Probability 0-30 minutes - NLDN" "%" 209 10 0 255 161 1 2 6 "LightningProbabilityNext60minGrid" "Lightning Probability 0-60 minutes - NLDN" "%" 209 10 0 255 161 1 2 7 "LightningJumpGrid" "Rapid lightning increases and decreases" "non-dim" @@ -50,6 +51,10 @@ GRIB2 209 10 0 255 161 1 3 56 "Reflectivity_-20C" "Isothermal Reflectivity at -20C" "dBZ" 209 10 0 255 161 1 3 57 "ReflectivityAtLowestAltitude" "ReflectivityAtLowestAltitude" "dBZ" 209 10 0 255 161 1 3 58 "MergedReflectivityAtLowestAltitude" "Non Quality Controlled Reflectivity At Lowest Altitude" "dBZ" +209 10 0 255 161 1 4 0 "IRband4" "Infrared (E/W blend)" "K" +209 10 0 255 161 1 4 1 "Visible" "Visible (E/W blend)" "non-dim" +209 10 0 255 161 1 4 2 "WaterVapor" "Water Vapor (E/W blend)" "K" +209 10 0 255 161 1 4 3 "CloudCover" "Cloud Cover" "K" 209 10 0 255 161 1 6 0 "PrecipFlag" "Surface Precipitation Type" "type" 209 10 0 255 161 1 6 1 "PrecipRate" "Radar Precipitation Rate" "mm/hr" 209 10 0 255 161 1 6 2 "RadarOnly_QPE_01H" "Radar precipitation accumulation 1-hour" "mm" @@ -59,6 +64,27 @@ GRIB2 209 10 0 255 161 1 6 6 "RadarOnly_QPE_24H" "Radar precipitation accumulation 24-hour" "mm" 209 10 0 255 161 1 6 7 "RadarOnly_QPE_48H" "Radar precipitation accumulation 48-hour" "mm" 209 10 0 255 161 1 6 8 "RadarOnly_QPE_72H" "Radar precipitation accumulation 72-hour" "mm" +209 10 0 255 161 1 6 9 "GaugeCorrQPE01H" "Local gauge bias corrected radar precipitation accumulation 1-hour" "mm" +209 10 0 255 161 1 6 10 "GaugeCorrQPE03H" "Local gauge bias corrected radar precipitation accumulation 3-hour" "mm" +209 10 0 255 161 1 6 11 "GaugeCorrQPE06H" "Local gauge bias corrected radar precipitation accumulation 6-hour" "mm" +209 10 0 255 161 1 6 12 "GaugeCorrQPE12H" "Local gauge bias corrected radar precipitation accumulation 12-hour" "mm" +209 10 0 255 161 1 6 13 "GaugeCorrQPE24H" "Local gauge bias corrected radar precipitation accumulation 24-hour" "mm" +209 10 0 255 161 1 6 14 "GaugeCorrQPE48H" "Local gauge bias corrected radar precipitation accumulation 48-hour" "mm" +209 10 0 255 161 1 6 15 "GaugeCorrQPE72H" "Local gauge bias corrected radar precipitation accumulation 72-hour" "mm" +209 10 0 255 161 1 6 16 "GaugeOnlyQPE01H" "Gauge only precipitation accumulation 1-hour" "mm" +209 10 0 255 161 1 6 17 "GaugeOnlyQPE03H" "Gauge only precipitation accumulation 3-hour" "mm" +209 10 0 255 161 1 6 18 "GaugeOnlyQPE06H" "Gauge only precipitation accumulation 6-hour" "mm" +209 10 0 255 161 1 6 19 "GaugeOnlyQPE12H" "Gauge only precipitation accumulation 12-hour" "mm" +209 10 0 255 161 1 6 20 "GaugeOnlyQPE24H" "Gauge only precipitation accumulation 24-hour" "mm" +209 10 0 255 161 1 6 21 "GaugeOnlyQPE48H" "Gauge only precipitation accumulation 48-hour" "mm" +209 10 0 255 161 1 6 22 "GaugeOnlyQPE72H" "Gauge only precipitation accumulation 72-hour" "mm" +209 10 0 255 161 1 6 23 "MountainMapperQPE01H" "Mountain Mapper precipitation accumulation 1-hour" "mm" +209 10 0 255 161 1 6 24 "MountainMapperQPE03H" "Mountain Mapper precipitation accumulation 3-hour" "mm" +209 10 0 255 161 1 6 25 "MountainMapperQPE06H" "Mountain Mapper precipitation accumulation 6-hour" "mm" +209 10 0 255 161 1 6 26 "MountainMapperQPE12H" "Mountain Mapper precipitation accumulation 12-hour" "mm" +209 10 0 255 161 1 6 27 "MountainMapperQPE24H" "Mountain Mapper precipitation accumulation 24-hour" "mm" +209 10 0 255 161 1 6 28 "MountainMapperQPE48H" "Mountain Mapper precipitation accumulation 48-hour" "mm" +209 10 0 255 161 1 6 29 "MountainMapperQPE72H" "Mountain Mapper precipitation accumulation 72-hour" "mm" 209 10 0 255 161 1 6 30 "MultiSensor_QPE_01H_Pass1" "Multi-sensor accumulation 1-hour Pass1" "mm" 209 10 0 255 161 1 6 31 "MultiSensor_QPE_03H_Pass1" "Multi-sensor accumulation 3-hour Pass1" "mm" 209 10 0 255 161 1 6 32 "MultiSensor_QPE_06H_Pass1" "Multi-sensor accumulation 6-hour Pass1" "mm" @@ -107,6 +133,7 @@ GRIB2 209 10 0 255 161 1 8 22 "GaugeInflIndex_48H_Pass2" "Gauge Influence Index for 48-hour QPE Pass2" "non-dim" 209 10 0 255 161 1 8 23 "GaugeInflIndex_72H_Pass2" "Gauge Influence Index for 72-hour QPE Pass2" "non-dim" 209 10 0 255 161 1 9 0 "MergedReflectivityQC" "3D Reflectivty Mosaic - 33 CAPPIS (500-19000m)" "dBZ" +209 10 0 255 161 1 9 1 "ConusPlusMergedReflectivityQC" "All Radar 3D Reflectivty Mosaic - 33 CAPPIS (500-19000m)" "dBZ" 209 10 0 255 161 1 9 3 "MergedRhoHV" "3D RhoHV Mosaic - 33 CAPPIS (500-19000m)" "non-dim" 209 10 0 255 161 1 9 4 "MergedZdr" "3D Zdr Mosaic - 33 CAPPIS (500-19000m)" "dB" 209 10 0 255 161 1 10 0 "MergedReflectivityQCComposite" "Composite Reflectivity Mosaic (optimal method)" "dBZ" @@ -123,6 +150,7 @@ GRIB2 209 10 0 255 161 1 11 1 "MergedReflectivityComposite" "Raw Composite Reflectivity Mosaic (max ref)" "dBZ" 209 10 0 255 161 1 11 2 "MergedReflectivityQComposite" "Composite Reflectivity Mosaic (max ref)" "dBZ" 209 10 0 255 161 1 11 3 "MergedBaseReflectivity" "Raw Base Reflectivity Mosaic (optimal method)" "dBZ" +209 10 0 255 161 1 11 4 "Merged_LVL3_BaseHCA" "flag" "Level III Base HCA Mosaic (nearest neighbor)" 209 10 0 255 161 1 12 0 "FLASH_CREST_MAXUNITSTREAMFLOW" "FLASH QPE-CREST Unit Streamflow" "m^3/s/km^2" 209 10 0 255 161 1 12 1 "FLASH_CREST_MAXSTREAMFLOW" "FLASH QPE-CREST Streamflow" "m^3/s" 209 10 0 255 161 1 12 2 "FLASH_CREST_MAXSOILSAT" "FLASH QPE-CREST Soil Saturation" "%" diff --git a/docs/Users_Guide/appendixC.rst b/docs/Users_Guide/appendixC.rst index 2a1e358e68..57e91bf090 100644 --- a/docs/Users_Guide/appendixC.rst +++ b/docs/Users_Guide/appendixC.rst @@ -248,7 +248,7 @@ Heidke Skill Score (HSS) Called "HSS" in CTS output :numref:`table_PS_format_info_CTS` and "HSS" in MCTS output :numref:`table_PS_format_info_MCTS` -HSS is a skill score based on Accuracy, where the Accuracy is corrected by the number of correct forecasts that would be expected by chance. In particular, +HSS is a skill score based on Accuracy, where the Accuracy is compared to the number of correct forecasts that would be expected by chance. In particular, .. math:: \text{HSS } = \frac{n_{11} + n_{00} - C_2}{T - C_2}, @@ -256,17 +256,8 @@ where .. math:: C_2 = \frac{(n_{11} + n_{10}) (n_{11} + n_{01}) + (n_{01} + n_{00}) (n_{10} + n_{00})}{T}. -A more general format that uses percentages is provided by Ou (:ref:`Ou, 2016 `), - -.. only:: latex - - .. math:: \text{HSS(\%) } = 100 \ast \frac{(H - E)}{(T - E)} - -.. only:: html - - .. math:: \text{HSS(%) } = 100 \ast \frac{(H - E)}{(T - E)} - -where H is the number of forecasts in the correct category and E is the expected number of forecasts by chance. +Note that the C_2 value is calculated based on the data fields supplied by the user. Therefore, for C2 to appropriately represent a random forecast, +a sufficiently large sized dataset of forecasts and observations would be needed. HSS can range from minus infinity to 1. A perfect forecast would have HSS = 1. @@ -275,11 +266,13 @@ Heidke Skill Score - Expected Correct (HSS_EC) Called "HSS_EC" in CTS output :numref:`table_PS_format_info_CTS` and MCTS output :numref:`table_PS_format_info_MCTS` -HSS_EC is a skill score based on Accuracy, where the Accuracy is corrected by the number of correct forecasts that would be expected by chance. In particular, +HSS_EC calculates the HSS as described above, but with a C2 value based on a set expected chance (EC) value. +Instead of C2 being calculated by the user’s dataset, -.. math:: \text{HSS } = \frac{n_{11} + n_{00} - C_2}{T - C_2}, +.. math:: \text{HSS } = \text{T*EC }, -The C_2 value is user-configurable with a default value of T divided by the number of contingency table categories. +where EC is allowed to be prescribed by the user ranging from 0 to 1. By default the EC is set to 1 divided by the number of contingency table categories, +e.g. EC is set to 0.33333 for a 3 category (tercile) forecast and 0.5 for a two category (binary) forecast. HSS_EC can range from minus infinity to 1. A perfect forecast would have HSS_EC = 1. diff --git a/docs/Users_Guide/index.rst b/docs/Users_Guide/index.rst index c5948ca235..ee3a5ff6dc 100644 --- a/docs/Users_Guide/index.rst +++ b/docs/Users_Guide/index.rst @@ -67,6 +67,7 @@ The National Center for Atmospheric Research (NCAR) is sponsored by NSF. The DTC met-tc_overview tc-dland tc-pairs + tc-diag tc-stat tc-gen tc-rmw diff --git a/docs/Users_Guide/mode.rst b/docs/Users_Guide/mode.rst index f5e4caf33f..2e3b93a08f 100644 --- a/docs/Users_Guide/mode.rst +++ b/docs/Users_Guide/mode.rst @@ -410,7 +410,7 @@ _____________________ inten_perc_ratio = ratio_if; } -The set of interest function entries listed above define which values are of interest for each pairwise attribute measured. The interest functions may be defined as a piecewise linear function or as an algebraic expression. A piecewise linear function is defined by specifying the corner points of its graph. An algebraic function may be defined in terms of several built-in mathematical functions. See :numref:`MODE_A-Scientific-and-statistical` for how interest values are used by the fuzzy logic engine. By default, many of these functions are defined in terms of the previously defined **grid_res** entry. +The interest function entries listed above define which values are of interest for each pairwise attribute measured. Each interest function is defined as a piecewise linear function by specifying the corner points of its graph. The range of each function must be within **0** and **1**. Including (x, y) points with y-values outside this range results in a runtime error. See :numref:`MODE_A-Scientific-and-statistical` for how interest values are used by the fuzzy logic engine. By default, many of these functions are defined in terms of the previously defined **grid_res** entry. _____________________ diff --git a/docs/Users_Guide/tc-diag.rst b/docs/Users_Guide/tc-diag.rst new file mode 100644 index 0000000000..f9cef3d479 --- /dev/null +++ b/docs/Users_Guide/tc-diag.rst @@ -0,0 +1,192 @@ +.. _tc-diag: + +************ +TC-DIAG Tool +************ + +Introduction +============ + +The TC-Diag tool compute Tropical Cyclone diagnostics. More details to be added. + +Practical information +===================== + +tc_diag usage +------------- + +The following sections describe the usage statement, required arguments, and optional arguments for tc_diag. + +.. code-block:: none + + Usage: tc_diag + -data file_1 ... file_n | data_file_list + -deck file + -config file + -out file + [-log file] + [-v level] + +tc_diag has required arguments and can accept several optional arguments. + +Required arguments for tc_diag +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +1. The **-data file_1 ... file_n | data_file_list** options specify the gridded data files or an ASCII file containing a list of files to be used. + +2. The **-deck source** argument is the ATCF format data source. + +3. The **-config file** argument is the configuration file to be used. The contents of the configuration file are discussed below. + +4. The **-out** argument is the NetCDF output file to be written. + +Optional arguments for tc_diag +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +5. The **-log file** option directs output and errors to the specified log file. All messages will be written to that file as well as standard out and error. Thus, users can save the messages without having to redirect the output on the command line. The default behavior is no logfile. + +6. The **-v level** option indicates the desired level of verbosity. The contents of "level" will override the default setting of 2. Setting the verbosity to 0 will make the tool run with no log messages, while increasing the verbosity above 1 will increase the amount of logging. + +tc_diag configuration file +-------------------------- + +The default configuration file for the TC-Diag tool named **TCDiagConfig_default** can be found in the installed *share/met/config/* directory. It is encouraged for users to copy these default files before modifying their contents. The contents of the configuration file are described in the subsections below. + +The TC-Diag tool should be configured to filter the input track data (**-deck**) down to the subset of tracks that correspond to the gridded data files provided (**-data**). The filtered tracks should contain data for only one initialization time but may contain tracks for multiple models. + +_______________________ + +.. code-block:: none + + model = [ "GFSO", "OFCL" ]; + storm_id = ""; + basin = ""; + cyclone = ""; + init_inc = ""; + valid_beg = ""; + valid_end = ""; + valid_inc = []; + valid_exc = []; + valid_hour = []; + lead = []; + + censor_thresh = []; + censor_val = []; + convert(x) = x; + + data = { + field = [ + { + name = "PRMSL"; + level = ["L0"]; + }, + { + name = "TMP"; + level = ["P1000", "P500"]; + }, + { + name = "UGRD"; + level = ["P1000", "P500"]; + }, + { + name = "VGRD"; + level = ["P1000", "P500"]; + } + ]; + } + regrid = { ... } + +The configuration options listed above are common to many MET tools and are described in :numref:`config_options`. The name and level entries in the data dictionary define the data to be processed. The regrid dictionary defines if and how regridding will be performed. + +_______________________ + +.. code-block:: none + + n_range = 100; + +The **n_range** parameter is the number of equally spaced range intervals in the range-azimuth grid. + +_______________________ + +.. code-block:: none + + n_azimuth = 180; + +The **n_azimuth** parameter is the number of equally spaced azimuth intervals in the range-azimuth grid. The azimuthal grid spacing is 360 / **n_azimuth** degrees. + +_______________________ + +.. code-block:: none + + max_range_km = 100.0; + +The **max_range_km** parameter specifies the maximum range of the range-azimuth grid, in kilometers. If this parameter is specified and not **rmw_scale**, the radial grid spacing will be **max_range_km / n_range**. + +_______________________ + +.. code-block:: none + + delta_range_km = 10.0; + +The **delta_range_km** parameter specifies the spacing of the range rings, in kilometers. + +_______________________ + +.. code-block:: none + + rmw_scale = 0.2; + +The **rmw_scale** parameter overrides the **max_range_km** parameter. When this is set the radial grid spacing will be **rmw_scale** in units of the RMW, which varies along the storm track. + +_______________________ + +.. code-block:: none + + compute_tangential_and_radial_winds = TRUE; + +The **compute_tangential_and_radial_winds** parameter is a flag controlling whether a conversion from U/V to Tangential/Radial winds is done or not. If set to TRUE, additional parameters are used, otherwise they are not. + +_______________________ + +.. code-block:: none + + u_wind_field_name = "UGRD"; + v_wind_field_name = "VGRD"; + +The **u_wind_field_name** and **v_wind_field_name** parameters identify which input data to use in converting to tangential/radial winds. The parameters are used only if **compute_tangential_and_radial_winds** is set to TRUE. + +_______________________ + +.. code-block:: none + + tangential_velocity_field_name = "VT"; + tangential_velocity_long_field_name = "Tangential Velocity"; + + +The **tangential_velocity_field_name** and **tangential_velocity_long_field_name** parameters define the field names to give the output tangential velocity grid in the netCDF output file. The parameters are used only if **compute_tangential_and_radial_winds** is set to TRUE. + +_______________________ + +.. code-block:: none + + radial_velocity_field_name = "VT"; + radial_velocity_long_field_name = "Radial Velocity"; + + +The **radial_velocity_field_name** and **radial_velocity_long_field_name** parameters define the field names to give the output radial velocity grid in the netCDF output file. The parameters are used only if **compute_radial_and_radial_winds** is set to TRUE. + + +tc_diag output file +------------------- + +The NetCDF output file contains the following dimensions: + +1. *range* - the radial dimension of the range-azimuth grid + +2. *azimuth* - the azimuthal dimension of the range-azimuth grid + +3. *pressure* - if any pressure levels are specified in the data variable list, they will be sorted and combined into a 3D NetCDF variable, which pressure as the vertical dimension and range and azimuth as the horizontal dimensions + +4. *track_point* - the track points corresponding to the model output valid times + +For each data variable specified in the data variable list, a corresponding NetCDF variable will be created with the same name and units. diff --git a/docs/Users_Guide/tc-gen.rst b/docs/Users_Guide/tc-gen.rst index 5cc22a7351..1f690a1477 100644 --- a/docs/Users_Guide/tc-gen.rst +++ b/docs/Users_Guide/tc-gen.rst @@ -22,7 +22,7 @@ As with other extreme events (where the event occurs much less frequently than t For probabilistic forecasts specified using the **-edeck** command line option, it identifies genesis events in the reference dataset. It applies user-specified configuration options to pair the forecast probabilities to the reference genesis events. These pairs are added to an Nx2 probabilistic contingency table. If the reference genesis event occurs within in the predicted time window, the pair is counted in the observation-yes column. Otherwise, it is added to the observation-no column. -For warning area shapefiles specified using the **-shape** command line option, it processes metadata from the corresponding database files. The database file is assumed to exist at exactly the same path as the shapefile, but with a ".dbf" suffix instead of ".shp". Note that only shapefiles exactly following the NOAA National Hurricane Center's (NHC) "gtwo_areas_YYYYMMDDHHMM.shp" file naming and corresonding metadata conventions are supported. For each shapefile record, the database file defines up to three corresponding probability values. The first percentage is interpreted as the probability of genesis inside the shape within 48 hours. The second and, if provided, third percentages are interpreted as the 120-hour and 168-hour probabilities, respectively. Care is taken to identify and either ignore or update duplicate shapes found in the input. +For warning area shapefiles specified using the **-shape** command line option, it processes metadata from the corresponding database files. The database file is assumed to exist at exactly the same path as the shapefile, but with a ".dbf" suffix instead of ".shp". Note that only shapefiles exactly following the NOAA National Hurricane Center's (NHC) "gtwo_areas_YYYYMMDDHHMM.shp" file naming and corresonding metadata conventions are supported. For each shapefile record, the database file defines corresponding probability values for one or more time periods. Percentages may be provided for the probability of genesis inside the shape within 2, 5, or 7 days from issuance time that is parsed from the file name. Note that 5 day probabilities were discontinued in 2023. The 2 and 7 day probabilities are provided in database file fields named "PROB2DAY" and "PROB7DAY", respectively. Care is taken to identify and either ignore or update duplicate shapes found in the input. The shapes are then subset based on the filtering criteria in the configuration file. For each probability and shape, the reference genesis events are searched for a match within the defined time window. These pairs are added to an Nx2 probabilistic contingency table. The probabilistic contingeny tables and statistics are computed and reported separately for filter defined and lead hour encountered in the input. @@ -63,7 +63,7 @@ Required arguments for tc_gen 2. The **-edeck source** argument is the path to one or more ATCF edeck files, an ASCII file list containing them, or a top-level directory with files matching the regular expression ".dat". The probability of genesis are read from each edeck input file and verified against at the **-track** data. -3. The **-shape source** argument is the path to one or more NHC genesis warning area shapefiles, an ASCII file list containing them, or a top-level directory with files matching the regular expression "gtwo_areas.*.shp". The genesis warning areas and corresponding 2, 5, and 7 day probability values area verified against the **-track** data. +3. The **-shape source** argument is the path to one or more NHC genesis warning area shapefiles, an ASCII file list containing them, or a top-level directory with files matching the regular expression "gtwo_areas.*.shp". The genesis warning areas and corresponding forecast probability values area verified against the **-track** data. Note: At least one of the **-genesis**, **-edeck**, or **-shape** command line options are required. diff --git a/docs/Users_Guide/tc-pairs.rst b/docs/Users_Guide/tc-pairs.rst index 556f7358c2..0533d5f649 100644 --- a/docs/Users_Guide/tc-pairs.rst +++ b/docs/Users_Guide/tc-pairs.rst @@ -532,6 +532,7 @@ TC-Pairs produces output in TCST format. The default output file name can be ove * - 35, 36 - A/BAL_WIND_34 - a/bdeck 34-knot radius winds in full circle + or the mean of the non-zero 34-knot wind quadrants * - 37, 38 - A/BNE_WIND_34 - a/bdeck 34-knot radius winds in NE quadrant @@ -547,6 +548,7 @@ TC-Pairs produces output in TCST format. The default output file name can be ove * - 45, 46 - A/BAL_WIND_50 - a/bdeck 50-knot radius winds in full circle + or the mean of the non-zero 50-knot wind quadrants * - 47, 48 - A/BNE_WIND_50 - a/bdeck 50-knot radius winds in NE quadrant @@ -562,6 +564,7 @@ TC-Pairs produces output in TCST format. The default output file name can be ove * - 55, 56 - A/BAL_WIND_64 - a/bdeck 64-knot radius winds in full circle + or the mean of the non-zero 64-knot wind quadrants * - 57, 58 - A/BNE_WIND_64 - a/bdeck 64-knot radius winds in NE quadrant diff --git a/internal/scripts/installation/compile_MET_all.sh b/internal/scripts/installation/compile_MET_all.sh old mode 100755 new mode 100644 index 2eb0ff62e6..47333ca25e --- a/internal/scripts/installation/compile_MET_all.sh +++ b/internal/scripts/installation/compile_MET_all.sh @@ -6,11 +6,19 @@ # # This compile_MET_all.sh script expects certain environment # variables to be set: -# TEST_BASE, COMPILER, MET_SUBDIR, MET_TARBALL, -# and USE_MODULES. +# TEST_BASE, COMPILER (or COMPILER_FAMILY and COMPILER_VERSION), +# MET_SUBDIR, MET_TARBALL, and USE_MODULES. # # If compiling support for Python embedding, users will need to # set MET_PYTHON, MET_PYTHON_BIN_EXE, MET_PYTHON_CC, and MET_PYTHON_LD. +# Users can directly set the python module to be loaded by setting +# either PYTHON_MODULE or by setting PYTHON_NAME and PYTHON_VERSION: +# - PYTHON_MODULE (only used if USE_MODULES=TRUE) - format is the name +# of the Python module to load followed by an underscore and then the +# version number (e.g. python_3.8.6, The script will then run "module +# load python/3.8.6") +# - PYTHON_NAME = python (or e.g. python3, etc.) +# - PYTHON_VERSION = 3.8.6 # # For a description of these and other variables, visit the MET # downloads page under "Sample Script For Compiling External @@ -83,12 +91,16 @@ fi echo echo "TEST_BASE = ${TEST_BASE? "ERROR: TEST_BASE must be set"}" -echo "COMPILER = ${COMPILER? "ERROR: COMPILER must be set"}" echo "MET_SUBDIR = ${MET_SUBDIR? "ERROR: MET_SUBDIR must be set"}" echo "MET_TARBALL = ${MET_TARBALL? "ERROR: MET_TARBALL must be set"}" echo "USE_MODULES = ${USE_MODULES? "ERROR: USE_MODULES must be set to TRUE if using modules or FALSE otherwise"}" +if [[ -z "$COMPILER" ]] && [[ -z "$COMPILER_FAMILY" && -z "$COMPILER_VERSION" ]]; then + echo "ERROR: COMPILER or COMPILER_FAMILY and COMPILER_VERSION must be set" + exit 1 +fi echo ${MAKE_ARGS:+MAKE_ARGS = $MAKE_ARGS} + LIB_DIR=${TEST_BASE}/external_libs MET_DIR=${MET_SUBDIR} @@ -252,8 +264,17 @@ if [ ! -e ${LIB_DIR}/lib ]; then fi # Load compiler version -COMPILER_FAMILY=` echo $COMPILER | cut -d'_' -f1` -COMPILER_VERSION=`echo $COMPILER | cut -d'_' -f2` +if [ -z ${COMPILER_FAMILY} ]; then + COMPILER_FAMILY=` echo $COMPILER | cut -d'_' -f1` +fi + +if [ -z ${COMPILER_VERSION} ]; then + COMPILER_VERSION=`echo $COMPILER | cut -d'_' -f2` +fi + +echo "COMPILER = $COMPILER" +echo "COMPILER_FAMILY = $COMPILER_FAMILY" +echo "COMPILER_VERSION = $COMPILER_VERSION" COMPILER_MAJOR_VERSION=`echo $COMPILER_VERSION | cut -d'.' -f1` echo @@ -271,6 +292,12 @@ if [ ${USE_MODULES} = "TRUE" ]; then fi fi +# After loading the compiler module, strip any extra +# characters off of "gnu" (e.g. "gnu9") +if [[ ${COMPILER_FAMILY} == *gnu* ]]; then + export COMPILER_FAMILY="gnu" +fi + if [ ${COMPILER_FAMILY} = "gnu" ]; then if [ -z ${CC} ]; then CC=`which gcc`; fi if [ -z ${CXX} ]; then CXX=`which g++`; fi @@ -314,11 +341,16 @@ echo if [ ${USE_MODULES} = "TRUE" ]; then if [ ! -z ${PYTHON_MODULE} ]; then - PYTHON_NAME=` echo $PYTHON_MODULE | cut -d'_' -f1` + PYTHON_NAME=`echo $PYTHON_MODULE | cut -d'_' -f1` PYTHON_VERSION_NUM=`echo $PYTHON_MODULE | cut -d'_' -f2` echo "module load ${PYTHON_NAME}/${PYTHON_VERSION_NUM}" echo ${PYTHON_NAME}/${PYTHON_VERSION_NUM} module load ${PYTHON_NAME}/${PYTHON_VERSION_NUM} + # Allow the user to specify the name and version of the module to load + elif [[ ! -z ${PYTHON_NAME} && ! -z ${PYTHON_VERSION_NUM} ]]; then + echo "module load ${PYTHON_NAME}/${PYTHON_VERSION_NUM}" + echo ${PYTHON_NAME}/${PYTHON_VERSION_NUM} + module load ${PYTHON_NAME}/${PYTHON_VERSION_NUM} fi fi @@ -343,9 +375,9 @@ if [ $COMPILE_GSL -eq 1 ]; then tar -xf ${TAR_DIR}/gsl-${vrs}.tar.gz -C ${LIB_DIR}/gsl cd ${LIB_DIR}/gsl/gsl* echo "cd `pwd`" - run_cmd "./configure --prefix=${LIB_DIR} > configure.log 2>&1" - run_cmd "make ${MAKE_ARGS} > make.log 2>&1" - run_cmd "make ${MAKE_ARGS} install > make_install.log 2>&1" + run_cmd "./configure --prefix=${LIB_DIR} > gsl.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > gsl.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > gsl.make_install.log 2>&1" fi # Compile BUFRLIB @@ -366,17 +398,17 @@ if [ $COMPILE_BUFRLIB -eq 1 ]; then # For GNU and Intel follow BUFRLIB11 instructions if [[ ${COMPILER_FAMILY} == "gnu" ]]; then if [[ ${COMPILER_MAJOR_VERSION} -ge 10 ]]; then - ${FC} -c -fno-second-underscore -fallow-argument-mismatch `./getdefflags_F.sh` modv*.F moda*.F `ls -1 *.F *.f | grep -v "mod[av]_"` >> make.log 2>&1 + ${FC} -c -fno-second-underscore -fallow-argument-mismatch `./getdefflags_F.sh` modv*.F moda*.F `ls -1 *.F *.f | grep -v "mod[av]_"` >> bufr.make.log 2>&1 elif [[ ${COMPILER_MAJOR_VERSION} -lt 10 ]]; then - ${FC} -c -fno-second-underscore -Wno-argument-mismatch `./getdefflags_F.sh` modv*.F moda*.F `ls -1 *.F *.f | grep -v "mod[av]_"` >> make.log 2>&1 + ${FC} -c -fno-second-underscore -Wno-argument-mismatch `./getdefflags_F.sh` modv*.F moda*.F `ls -1 *.F *.f | grep -v "mod[av]_"` >> bufr.make.log 2>&1 fi elif [[ ${COMPILER_FAMILY} == "intel" ]] || [[ ${COMPILER_FAMILY} == "ics" ]] || [[ ${COMPILER_FAMILY} == "ips" ]] || [[ ${COMPILER_FAMILY} == "PrgEnv-intel" ]]; then - ${FC} -c `./getdefflags_F.sh` modv*.F moda*.F `ls -1 *.F *.f | grep -v "mod[av]_"` >> make.log 2>&1 + ${FC} -c `./getdefflags_F.sh` modv*.F moda*.F `ls -1 *.F *.f | grep -v "mod[av]_"` >> bufr.make.log 2>&1 elif [[ ${COMPILER_FAMILY} == "pgi" ]]; then - ${FC} -c -Mnosecond_underscore `./getdefflags_F.sh` modv*.F moda*.F `ls -1 *.F *.f | grep -v "mod[av]_"` >> make.log 2>&1 + ${FC} -c -Mnosecond_underscore `./getdefflags_F.sh` modv*.F moda*.F `ls -1 *.F *.f | grep -v "mod[av]_"` >> bufr.make.log 2>&1 fi - ar crv libbufr.a *.o >> make.log 2>&1 + ar crv libbufr.a *.o >> bufr.make.log 2>&1 cp *.a ${LIB_DIR}/lib/. fi @@ -390,9 +422,9 @@ if [ $COMPILE_ZLIB -eq 1 ]; then tar -xzf ${TAR_DIR}/zlib*.tar.gz -C ${LIB_DIR}/zlib cd ${LIB_DIR}/zlib/zlib* echo "cd `pwd`" - run_cmd "./configure --prefix=${LIB_DIR} > configure.log 2>&1" - run_cmd "make ${MAKE_ARGS} > make.log 2>&1" - run_cmd "make ${MAKE_ARGS} install > make_install.log 2>&1" + run_cmd "./configure --prefix=${LIB_DIR} > zlib.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > zlib.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > zlib.make_install.log 2>&1" # GPM: why is this removed? Could we add a comment to # describe why this is needed? @@ -409,9 +441,9 @@ if [[ $COMPILE_LIBPNG -eq 1 && $HOST != ys* ]]; then tar -xzf ${TAR_DIR}/libpng*.tar.gz -C ${LIB_DIR}/libpng cd ${LIB_DIR}/libpng/libpng* echo "cd `pwd`" - run_cmd "./configure --prefix=${LIB_DIR} LDFLAGS=-L${LIB_DIR}/lib CPPFLAGS=-I${LIB_DIR}/include > configure.log 2>&1" - run_cmd "make ${MAKE_ARGS} > make.log 2>&1" - run_cmd "make ${MAKE_ARGS} install > make_install.log 2>&1" + run_cmd "./configure --prefix=${LIB_DIR} LDFLAGS=-L${LIB_DIR}/lib CPPFLAGS=-I${LIB_DIR}/include > libpng.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > libpng.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > libpng.make_install.log 2>&1" fi # Compile JASPER @@ -424,9 +456,9 @@ if [ $COMPILE_JASPER -eq 1 ]; then cd ${LIB_DIR}/jasper/jasper* export CPPFLAGS="-I${LIB_DIR}/include" echo "cd `pwd`" - run_cmd "./configure --prefix=${LIB_DIR} > configure.log 2>&1" - run_cmd "make ${MAKE_ARGS} > make.log 2>&1" - run_cmd "make ${MAKE_ARGS} install > make_install.log 2>&1" + run_cmd "./configure --prefix=${LIB_DIR} > jasper.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > jasper.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > jasper.make_install.log 2>&1" fi # Compile G2CLIB @@ -451,7 +483,7 @@ if [ $COMPILE_G2CLIB -eq 1 ]; then echo "cd `pwd`" # g2clib appears to compile but causes failure compiling MET if -j argument is used # so exclude it from this call - run_cmd "make > make.log 2>&1" + run_cmd "make > g2clib.make.log 2>&1" cp libg2c*.a ${LIB_DIR}/lib/libgrib2c.a cp *.h ${LIB_DIR}/include/. @@ -471,7 +503,7 @@ if [ $COMPILE_HDF -eq 1 ]; then tar -xf ${TAR_DIR}/HDF4.2*.tar.gz -C ${LIB_DIR}/hdf cd ${LIB_DIR}/hdf/HDF* echo "cd `pwd`" - run_cmd "./configure --prefix=${LIB_DIR} --disable-netcdf --with-jpeg=${LIB_DIR} --with-zlib=${LIB_DIR} > configure.log 2>&1" + run_cmd "./configure --prefix=${LIB_DIR} --disable-netcdf --with-jpeg=${LIB_DIR} --with-zlib=${LIB_DIR} > hdf4.configure.log 2>&1" cat mfhdf/hdiff/Makefile | \ sed 's/LIBS = -ljpeg -lz/LIBS = -ljpeg -lz -lm/g' \ > Makefile_new @@ -486,8 +518,8 @@ if [ $COMPILE_HDF -eq 1 ]; then > Makefile_new fi mv Makefile_new hdf/src/Makefile - run_cmd "make ${MAKE_ARGS} > make.log 2>&1" - run_cmd "make ${MAKE_ARGS} install > make_install.log 2>&1" + run_cmd "make ${MAKE_ARGS} > hdf4.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > hdf4.make_install.log 2>&1" fi # Compile HDFEOS @@ -500,9 +532,9 @@ if [ $COMPILE_HDFEOS -eq 1 ]; then tar -xzf ${TAR_DIR}/HDF-EOS*.tar.* -C ${LIB_DIR}/hdfeos cd ${LIB_DIR}/hdfeos/hdfeos echo "cd `pwd`" - run_cmd "./configure --prefix=${LIB_DIR} --with-hdf4=${LIB_DIR} --with-jpeg=${LIB_DIR} > configure.log 2>&1" - run_cmd "make ${MAKE_ARGS} > make.log 2>&1" - run_cmd "make ${MAKE_ARGS} install > make_install.log 2>&1" + run_cmd "./configure --prefix=${LIB_DIR} --with-hdf4=${LIB_DIR} --with-jpeg=${LIB_DIR} > hdf-eos.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > hed-eos.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > hsf-eos.make_install.log 2>&1" cp include/*.h ${LIB_DIR}/include/ fi @@ -517,8 +549,8 @@ if [ $COMPILE_NETCDF -eq 1 ]; then tar -xzf ${TAR_DIR}/hdf5*.tar.gz -C ${LIB_DIR}/hdf5 cd ${LIB_DIR}/hdf5/hdf5* echo "cd `pwd`" - run_cmd "./configure --prefix=${LIB_DIR} --with-zlib=${LIB_Z} CFLAGS=-fPIC CXXFLAGS=-fPIC FFLAGS=-fPIC LDFLAGS=-L${LIB_DIR}/lib:${LIB_Z} CPPFLAGS=-I${LIB_DIR}/include > configure.log 2>&1" - run_cmd "make ${MAKE_ARGS} install > make_install.log 2>&1" + run_cmd "./configure --prefix=${LIB_DIR} --with-zlib=${LIB_Z} CFLAGS=-fPIC CXXFLAGS=-fPIC FFLAGS=-fPIC LDFLAGS=-L${LIB_DIR}/lib:${LIB_Z} CPPFLAGS=-I${LIB_DIR}/include > hdf5.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > hdf5.make_install.log 2>&1" echo echo "Compiling NetCDF-C at `date`" @@ -529,16 +561,16 @@ if [ $COMPILE_NETCDF -eq 1 ]; then export FC='' export F90='' echo "cd `pwd`" - run_cmd "./configure --prefix=${LIB_DIR} CFLAGS=-fPIC CXXFLAGS=-fPIC LDFLAGS=-L${LIB_DIR}/lib CPPFLAGS=-I${LIB_DIR}/include > configure.log 2>&1" - run_cmd "make ${MAKE_ARGS} install > make_install.log 2>&1" + run_cmd "./configure --prefix=${LIB_DIR} CFLAGS=-fPIC CXXFLAGS=-fPIC LDFLAGS=-L${LIB_DIR}/lib CPPFLAGS=-I${LIB_DIR}/include > netcdf-c.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > netcdf-c.make_install.log 2>&1" echo echo "Compiling NetCDF-CXX at `date`" tar -xzf ${TAR_DIR}/netcdf-cxx*.tar.gz -C ${LIB_DIR}/netcdf cd ${LIB_DIR}/netcdf/netcdf-cxx* echo "cd `pwd`" - run_cmd "./configure --prefix=${LIB_DIR} LDFLAGS=-L${LIB_DIR}/lib CPPFLAGS=-I${LIB_DIR}/include > configure.log 2>&1" - run_cmd "make ${MAKE_ARGS} install > make_install.log 2>&1" + run_cmd "./configure --prefix=${LIB_DIR} LDFLAGS=-L${LIB_DIR}/lib CPPFLAGS=-I${LIB_DIR}/include > netcdf-cxx.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > netcdf-cxx.make_install.log 2>&1" fi # Compile FREETYPE @@ -550,9 +582,9 @@ if [ $COMPILE_FREETYPE -eq 1 ]; then tar -xzf ${TAR_DIR}/freetype*.tar.gz -C ${LIB_DIR}/freetype cd ${LIB_DIR}/freetype/freetype* echo "cd `pwd`" - run_cmd "./configure --prefix=${LIB_DIR} --with-png=yes > configure.log 2>&1" - run_cmd "make ${MAKE_ARGS} > make.log 2>&1" - run_cmd "make ${MAKE_ARGS} install > make_install.log 2>&1" + run_cmd "./configure --prefix=${LIB_DIR} --with-png=yes > freetype.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > freetype.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > freetype.make_install.log 2>&1" fi @@ -568,9 +600,9 @@ if [ $COMPILE_CAIRO -eq 1 ]; then tar -xzf ${TAR_DIR}/pixman*.tar.gz -C ${LIB_DIR}/pixman cd ${LIB_DIR}/pixman/pixman* echo "cd `pwd`" - run_cmd "./configure --prefix=${LIB_DIR} > configure.log 2>&1" - run_cmd "make ${MAKE_ARGS} > make.log 2>&1" - run_cmd "make ${MAKE_ARGS} install > make_install.log 2>&1" + run_cmd "./configure --prefix=${LIB_DIR} > pixman.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > pixman.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > pixman.make_install.log 2>&1" fi echo @@ -584,9 +616,9 @@ if [ $COMPILE_CAIRO -eq 1 ]; then export PKG_CONFIG_PATH=${LIB_DIR}/lib/pkgconfig/ fi echo "cd `pwd`" - run_cmd "./configure --prefix=${LIB_DIR} ax_cv_c_float_words_bigendian=no LDFLAGS=-L${LIB_DIR}/lib CPPFLAGS=-I${LIB_DIR}/include > configure.log 2>&1" - run_cmd "make ${MAKE_ARGS} > make.log 2>&1" - run_cmd "make ${MAKE_ARGS} install > make_install.log 2>&1" + run_cmd "./configure --prefix=${LIB_DIR} ax_cv_c_float_words_bigendian=no LDFLAGS=-L${LIB_DIR}/lib CPPFLAGS=-I${LIB_DIR}/include > cairo.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > cairo.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > cairo.make_install.log 2>&1" fi # Compile MET @@ -636,7 +668,7 @@ export LDFLAGS="-Wl,--disable-new-dtags" # https://www.gnu.org/software/bash/manual/html_node/Shell-Parameter-Expansion.html # ${parameter:+word} # If parameter is null or unset, nothing is substituted, otherwise the expansion of word is substituted. -export LDFLAGS="${LDFLAGS} -Wl,-rpath,${LIB_DIR}/lib${ADDTL_DIR:+:$ADDTL_DIR}${LIB_DIR}/lib${MET_NETCDF:+:$MET_NETCDF/lib}${MET_HDF5:+:$MET_HDF5/lib}${MET_BUFRLIB:+:$MET_BUFRLIB}${MET_GRIB2CLIB:+:$MET_GRIB2CLIB}${MET_PYTHON_LIB:+:$MET_PYTHON_LIB}${MET_GSL:+:$MET_GSL/lib}" +export LDFLAGS="${LDFLAGS} -Wl,-rpath,${LIB_DIR}/lib${MET_NETCDF:+:$MET_NETCDF/lib}${MET_HDF5:+:$MET_HDF5/lib}${MET_BUFRLIB:+:$MET_BUFRLIB}${MET_GRIB2CLIB:+:$MET_GRIB2CLIB}${MET_PYTHON_LIB:+:$MET_PYTHON_LIB}${MET_GSL:+:$MET_GSL/lib}${ADDTL_DIR:+:$ADDTL_DIR}" export LDFLAGS="${LDFLAGS} -Wl,-rpath,${LIB_JASPER:+$LIB_JASPER}${LIB_LIBPNG:+:$LIB_PNG}${LIB_Z:+$LIB_Z}" export LDFLAGS="${LDFLAGS} ${LIB_JASPER:+-L$LIB_JASPER} ${LIB_LIBPNG:+-L$LIB_LIBPNG} ${MET_HDF5:+-L$MET_HDF5/lib} ${ADDTL_DIR:+-L$ADDTL_DIR}" export LIBS="${LIBS} -lhdf5_hl -lhdf5 -lz" @@ -674,9 +706,9 @@ fi configure_cmd="${configure_cmd} ${OPT_ARGS}" echo "cd `pwd`" -run_cmd "${configure_cmd} > configure.log 2>&1" -run_cmd "make ${MAKE_ARGS} > make.log 2>&1" -run_cmd "make ${MAKE_ARGS} install > make_install.log 2>&1" -run_cmd "make ${MAKE_ARGS} test > make_test.log 2>&1" +run_cmd "${configure_cmd} > met.configure.log 2>&1" +run_cmd "make ${MAKE_ARGS} > met.make.log 2>&1" +run_cmd "make ${MAKE_ARGS} install > met.make_install.log 2>&1" +run_cmd "make ${MAKE_ARGS} test > met.make_test.log 2>&1" echo "Finished compiling at `date`" diff --git a/internal/test_unit/bin/unit_test.sh b/internal/test_unit/bin/unit_test.sh index eff6bc65da..caf28047ab 100755 --- a/internal/test_unit/bin/unit_test.sh +++ b/internal/test_unit/bin/unit_test.sh @@ -67,6 +67,7 @@ UNIT_XML="unit_ascii2nc.xml \ unit_plot_tc.xml \ unit_tc_rmw.xml \ unit_rmw_analysis.xml \ + unit_tc_diag.xml \ unit_tc_gen.xml \ unit_met_test_scripts.xml \ unit_modis.xml \ diff --git a/internal/test_unit/config/Ascii2NcConfig_aeronet b/internal/test_unit/config/Ascii2NcConfig_aeronet index 8a1b00f9b9..5bcaa96adc 100644 --- a/internal/test_unit/config/Ascii2NcConfig_aeronet +++ b/internal/test_unit/config/Ascii2NcConfig_aeronet @@ -34,12 +34,15 @@ message_type_map = [ { key = "FM-12 SYNOP"; val = "ADPSFC"; }, { key = "FM-13 SHIP"; val = "SFCSHP"; }, { key = "FM-15 METAR"; val = "ADPSFC"; }, + { key = "FM-16 SPECI"; val = "ADPSFC"; }, { key = "FM-18 BUOY"; val = "SFCSHP"; }, + { key = "FM-18X BUOY"; val = "SFCSHP"; }, { key = "FM-281 QSCAT"; val = "ASCATW"; }, { key = "FM-32 PILOT"; val = "ADPUPA"; }, { key = "FM-35 TEMP"; val = "ADPUPA"; }, { key = "FM-88 SATOB"; val = "SATWND"; }, - { key = "FM-97 ACARS"; val = "AIRCFT"; } + { key = "FM-97 ACARS"; val = "AIRCFT"; }, + { key = "FM-97 AMDAR"; val = "AIRCFT"; } ]; // diff --git a/internal/test_unit/config/Ascii2NcConfig_rain_01H_sum b/internal/test_unit/config/Ascii2NcConfig_rain_01H_sum index 160d93fd5f..0f389c0ad6 100644 --- a/internal/test_unit/config/Ascii2NcConfig_rain_01H_sum +++ b/internal/test_unit/config/Ascii2NcConfig_rain_01H_sum @@ -37,12 +37,15 @@ message_type_map = [ { key = "FM-12 SYNOP"; val = "ADPSFC"; }, { key = "FM-13 SHIP"; val = "SFCSHP"; }, { key = "FM-15 METAR"; val = "ADPSFC"; }, + { key = "FM-16 SPECI"; val = "ADPSFC"; }, { key = "FM-18 BUOY"; val = "SFCSHP"; }, + { key = "FM-18X BUOY"; val = "SFCSHP"; }, { key = "FM-281 QSCAT"; val = "ASCATW"; }, { key = "FM-32 PILOT"; val = "ADPUPA"; }, { key = "FM-35 TEMP"; val = "ADPUPA"; }, { key = "FM-88 SATOB"; val = "SATWND"; }, - { key = "FM-97 ACARS"; val = "AIRCFT"; } + { key = "FM-97 ACARS"; val = "AIRCFT"; }, + { key = "FM-97 AMDAR"; val = "AIRCFT"; } ]; // diff --git a/internal/test_unit/config/TCDiagConfig_ian b/internal/test_unit/config/TCDiagConfig_ian new file mode 100644 index 0000000000..8c9d199f62 --- /dev/null +++ b/internal/test_unit/config/TCDiagConfig_ian @@ -0,0 +1,144 @@ +//////////////////////////////////////////////////////////////////////////////// +// +// TC-Diag configuration file. +// +// For additional information, please see the MET User's Guide. +// +//////////////////////////////////////////////////////////////////////////////// + +// +// Filter input track data lines. +// + +// +// Model +// +model = [ "GFSO" ]; + +// +// Storm identifier +// +storm_id = "AL092022"; + +// +// Basin +// +basin = ""; + +// +// Cyclone number +// +cyclone = ""; + +// +// Model initialization time +// +init_inc = "20220924_00"; + +// +// Subset by the valid time +// +valid_beg = ""; +valid_end = ""; +valid_inc = []; +valid_exc = []; + +// +// Subset by the valid hour and lead time. +// +valid_hour = []; +lead = [ "0", "6", "12", "18", "24" ]; + +//////////////////////////////////////////////////////////////////////////////// + +// +// Python diagnostic scripts to be run +// May be set separately in each "domain_info" entry +// +diag_script = [ "MET_BASE/python/tc_diag/compute_tc_diagnostics.py" ]; + +// +// Domain-specific cylindrical coordinate transformation +// +domain_info = [ + { + domain = "parent"; + n_range = 150; + n_azimuth = 8; + delta_range_km = 10.0; + } +]; + +//////////////////////////////////////////////////////////////////////////////// + +// +// Data censoring and conversion +// May be set separately in each "data.field" entry +// +// censor_thresh = []; +// censor_val = []; +// convert(x) = x; +// + +// +// Data fields +// +data = { + + // If empty, the field is processed for all domains + domain = []; + + // Pressure levels to be used, unless overridden below + level = [ "P1000", "P925", "P850", "P700", "P500", + "P400", "P300", "P250", "P200", "P150", + "P100" ]; + + field = [ + { name = "TMP"; }, + { name = "UGRD"; }, + { name = "VGRD"; }, + { name = "RH"; }, + { name = "HGT"; }, + { name = "PRMSL"; level = "Z0"; }, + { name = "PWAT"; level = "L0"; }, + { name = "TMP"; level = "Z0"; }, + { name = "TMP"; level = "Z2"; }, + { name = "RH"; level = "Z2"; }, + { name = "UGRD"; level = "Z10"; }, + { name = "VGRD"; level = "Z10"; } + ]; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Regridding options +// +regrid = { + method = NEAREST; + width = 1; + vld_thresh = 0.5; + shape = SQUARE; +} + +// +// Vortex removal flag +// +vortex_removal = FALSE; + +//////////////////////////////////////////////////////////////////////////////// + +// +// Flags to control output files +// +nc_rng_azi_flag = TRUE; +nc_diag_flag = FALSE; +cira_diag_flag = FALSE; + +//////////////////////////////////////////////////////////////////////////////// + +tmp_dir = "/tmp"; +output_prefix = ""; +version = "V11.1.0"; + +//////////////////////////////////////////////////////////////////////////////// diff --git a/internal/test_unit/config/TCPairsConfig_CONSENSUS b/internal/test_unit/config/TCPairsConfig_CONSENSUS index 0793bf3a45..4229974317 100644 --- a/internal/test_unit/config/TCPairsConfig_CONSENSUS +++ b/internal/test_unit/config/TCPairsConfig_CONSENSUS @@ -187,11 +187,6 @@ watch_warn = { time_offset = -14400; } -// -// Diagnostics to be extracted -// -diag_name = []; - // // Modify basin names to make them consistent across ATCF input files. // diff --git a/internal/test_unit/xml/unit_tc_diag.xml b/internal/test_unit/xml/unit_tc_diag.xml new file mode 100644 index 0000000000..c21287f201 --- /dev/null +++ b/internal/test_unit/xml/unit_tc_diag.xml @@ -0,0 +1,41 @@ + + + + + + + + +]> + + + + + + &TEST_DIR; + true + + + mkdir -p &OUTPUT_DIR;/tc_diag; \ + echo "&DATA_DIR_MODEL;/grib2/gfs/gfs.0p25.2022092400.f000.grib2 \ + &DATA_DIR_MODEL;/grib2/gfs/gfs.0p25.2022092400.f006.grib2 \ + &DATA_DIR_MODEL;/grib2/gfs/gfs.0p25.2022092400.f012.grib2 \ + &DATA_DIR_MODEL;/grib2/gfs/gfs.0p25.2022092400.f018.grib2 \ + &DATA_DIR_MODEL;/grib2/gfs/gfs.0p25.2022092400.f024.grib2" \ + > &OUTPUT_DIR;/tc_diag/gfs_2022092400_file_list; \ + &MET_BIN;/tc_diag + \ + -deck &DATA_DIR;/adeck/aal092022_OFCL_SHIP_AVNO.dat \ + -data parent GFSO &OUTPUT_DIR;/tc_diag/gfs_2022092400_file_list \ + -config &CONFIG_DIR;/TCDiagConfig_ian \ + -outdir &OUTPUT_DIR;/tc_diag \ + -v 2 + + + &OUTPUT_DIR;/tc_diag/tc_diag_AL092022_GFSO_2022092400_cyl_grid_parent.nc + + + + + diff --git a/scripts/python/Makefile.am b/scripts/python/Makefile.am index c3b7b20042..a6ed5af4c3 100644 --- a/scripts/python/Makefile.am +++ b/scripts/python/Makefile.am @@ -22,7 +22,8 @@ SUBDIRS = \ examples \ met \ pyembed \ - utility + utility \ + tc_diag ## Example of how to Install outside of $(pkgdatadir) ## scriptsrootdir = $(prefix)/share/scripts @@ -30,10 +31,4 @@ SUBDIRS = \ pythonscriptsdir = $(pkgdatadir)/python -#EXTRA_DIST = ${top_DATA} \ -# sample_fcst \ -# sample_obs \ -# python \ -# copyright_notice.txt - MAINTAINERCLEANFILES = Makefile.in diff --git a/scripts/python/Makefile.in b/scripts/python/Makefile.in index 5ff5daed23..99855cad22 100644 --- a/scripts/python/Makefile.in +++ b/scripts/python/Makefile.in @@ -330,15 +330,10 @@ SUBDIRS = \ examples \ met \ pyembed \ - utility + utility \ + tc_diag pythonscriptsdir = $(pkgdatadir)/python - -#EXTRA_DIST = ${top_DATA} \ -# sample_fcst \ -# sample_obs \ -# python \ -# copyright_notice.txt MAINTAINERCLEANFILES = Makefile.in all: all-recursive diff --git a/scripts/python/pyembed/Makefile.am b/scripts/python/pyembed/Makefile.am index ca8a3cb66e..7f821119d8 100644 --- a/scripts/python/pyembed/Makefile.am +++ b/scripts/python/pyembed/Makefile.am @@ -25,11 +25,13 @@ pyembed_DATA = \ read_tmp_dataplane.py \ read_tmp_ascii.py \ read_tmp_point_nc.py \ + read_tmp_tc_diag.py \ set_python_env.py \ write_tmp_dataplane.py \ write_tmp_point.py \ write_tmp_point_nc.py \ - write_tmp_mpr.py + write_tmp_mpr.py \ + write_tmp_tc_diag.py EXTRA_DIST = ${pyembed_DATA} diff --git a/scripts/python/pyembed/Makefile.in b/scripts/python/pyembed/Makefile.in index bd0848e94e..077334a8f3 100644 --- a/scripts/python/pyembed/Makefile.in +++ b/scripts/python/pyembed/Makefile.in @@ -361,11 +361,13 @@ pyembed_DATA = \ read_tmp_dataplane.py \ read_tmp_ascii.py \ read_tmp_point_nc.py \ + read_tmp_tc_diag.py \ set_python_env.py \ write_tmp_dataplane.py \ write_tmp_point.py \ write_tmp_point_nc.py \ - write_tmp_mpr.py + write_tmp_mpr.py \ + write_tmp_tc_diag.py EXTRA_DIST = ${pyembed_DATA} MAINTAINERCLEANFILES = Makefile.in diff --git a/scripts/python/pyembed/read_tmp_tc_diag.py b/scripts/python/pyembed/read_tmp_tc_diag.py new file mode 100644 index 0000000000..0828da3c82 --- /dev/null +++ b/scripts/python/pyembed/read_tmp_tc_diag.py @@ -0,0 +1,15 @@ +######################################################################## +# +# Reads temporary file into memory. +# +# usage: /path/to/python read_tmp_tc_diag.py tc_diag.tmp +# +######################################################################## + +import sys + +# PYTHON path for met.tc_diag is added by write_tmp_dataplane.py +from met.tc_diag import tc_diag + +# read NetCDF file +tc_diag = tc_diag.read_diag(sys.argv[1]) diff --git a/scripts/python/pyembed/write_tmp_tc_diag.py b/scripts/python/pyembed/write_tmp_tc_diag.py new file mode 100644 index 0000000000..aec1120430 --- /dev/null +++ b/scripts/python/pyembed/write_tmp_tc_diag.py @@ -0,0 +1,18 @@ +######################################################################## +# +# usage: /path/to/python write_tmp_tc_diag.py \ +# tmp_output_filename .py +# +######################################################################## + +import sys +try: + from python_embedding import pyembed_tools +except: + from pyembed.python_embedding import pyembed_tools + +if __name__ == '__main__': + argv_org = sys.argv[:] + tmp_filename = sys.argv[1] + met_in = pyembed_tools.call_python(sys.argv) + pyembed_tools.write_tmp_tc_diag(tmp_filename, met_in.tc_diag) diff --git a/scripts/python/tc_diag/Makefile.am b/scripts/python/tc_diag/Makefile.am new file mode 100644 index 0000000000..e61802cbf1 --- /dev/null +++ b/scripts/python/tc_diag/Makefile.am @@ -0,0 +1,33 @@ +## Makefile.am -- Process this file with automake to produce Makefile.in +## Copyright (C) 2000, 2006 Gary V. Vaughan +## +## This program is free software; you can redistribute it and/or modify +## it under the terms of the GNU General Public License as published by +## the Free Software Foundation; either version 2, or (at your option) +## any later version. +## +## This program is distributed in the hope that it will be useful, +## but WITHOUT ANY WARRANTY; without even the implied warranty of +## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +## GNU General Public License for more details. +## +## You should have received a copy of the GNU General Public License +## along with this program; if not, write to the Free Software +## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, +## MA 02110-1301, USA. + +# SUBDIRS = include + + +## Example of how to Install outside of $(pkgdatadir) +## scriptsrootdir = $(prefix)/share/scripts +## pythonutilitydir = ${scriptsrootdir}/utility + +pythontc_diagdir = $(pkgdatadir)/python/tc_diag + +pythontc_diag_DATA = \ + compute_tc_diagnostics.py + +EXTRA_DIST = ${pythontc_diag_DATA} + +MAINTAINERCLEANFILES = Makefile.in diff --git a/scripts/python/tc_diag/Makefile.in b/scripts/python/tc_diag/Makefile.in new file mode 100644 index 0000000000..aa22e5d013 --- /dev/null +++ b/scripts/python/tc_diag/Makefile.in @@ -0,0 +1,521 @@ +# Makefile.in generated by automake 1.16.1 from Makefile.am. +# @configure_input@ + +# Copyright (C) 1994-2018 Free Software Foundation, Inc. + +# This Makefile.in is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY, to the extent permitted by law; without +# even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. + +@SET_MAKE@ + +# SUBDIRS = include + +VPATH = @srcdir@ +am__is_gnu_make = { \ + if test -z '$(MAKELEVEL)'; then \ + false; \ + elif test -n '$(MAKE_HOST)'; then \ + true; \ + elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ + true; \ + else \ + false; \ + fi; \ +} +am__make_running_with_option = \ + case $${target_option-} in \ + ?) ;; \ + *) echo "am__make_running_with_option: internal error: invalid" \ + "target option '$${target_option-}' specified" >&2; \ + exit 1;; \ + esac; \ + has_opt=no; \ + sane_makeflags=$$MAKEFLAGS; \ + if $(am__is_gnu_make); then \ + sane_makeflags=$$MFLAGS; \ + else \ + case $$MAKEFLAGS in \ + *\\[\ \ ]*) \ + bs=\\; \ + sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ + | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ + esac; \ + fi; \ + skip_next=no; \ + strip_trailopt () \ + { \ + flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ + }; \ + for flg in $$sane_makeflags; do \ + test $$skip_next = yes && { skip_next=no; continue; }; \ + case $$flg in \ + *=*|--*) continue;; \ + -*I) strip_trailopt 'I'; skip_next=yes;; \ + -*I?*) strip_trailopt 'I';; \ + -*O) strip_trailopt 'O'; skip_next=yes;; \ + -*O?*) strip_trailopt 'O';; \ + -*l) strip_trailopt 'l'; skip_next=yes;; \ + -*l?*) strip_trailopt 'l';; \ + -[dEDm]) skip_next=yes;; \ + -[JT]) skip_next=yes;; \ + esac; \ + case $$flg in \ + *$$target_option*) has_opt=yes; break;; \ + esac; \ + done; \ + test $$has_opt = yes +am__make_dryrun = (target_option=n; $(am__make_running_with_option)) +am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) +pkgdatadir = $(datadir)/@PACKAGE@ +pkgincludedir = $(includedir)/@PACKAGE@ +pkglibdir = $(libdir)/@PACKAGE@ +pkglibexecdir = $(libexecdir)/@PACKAGE@ +am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd +install_sh_DATA = $(install_sh) -c -m 644 +install_sh_PROGRAM = $(install_sh) -c +install_sh_SCRIPT = $(install_sh) -c +INSTALL_HEADER = $(INSTALL_DATA) +transform = $(program_transform_name) +NORMAL_INSTALL = : +PRE_INSTALL = : +POST_INSTALL = : +NORMAL_UNINSTALL = : +PRE_UNINSTALL = : +POST_UNINSTALL = : +build_triplet = @build@ +host_triplet = @host@ +subdir = scripts/python/tc_diag +ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 +am__aclocal_m4_deps = $(top_srcdir)/configure.ac +am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ + $(ACLOCAL_M4) +DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON) +mkinstalldirs = $(install_sh) -d +CONFIG_HEADER = $(top_builddir)/config.h +CONFIG_CLEAN_FILES = +CONFIG_CLEAN_VPATH_FILES = +AM_V_P = $(am__v_P_@AM_V@) +am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) +am__v_P_0 = false +am__v_P_1 = : +AM_V_GEN = $(am__v_GEN_@AM_V@) +am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) +am__v_GEN_0 = @echo " GEN " $@; +am__v_GEN_1 = +AM_V_at = $(am__v_at_@AM_V@) +am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) +am__v_at_0 = @ +am__v_at_1 = +SOURCES = +DIST_SOURCES = +am__can_run_installinfo = \ + case $$AM_UPDATE_INFO_DIR in \ + n|no|NO) false;; \ + *) (install-info --version) >/dev/null 2>&1;; \ + esac +am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; +am__vpath_adj = case $$p in \ + $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ + *) f=$$p;; \ + esac; +am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; +am__install_max = 40 +am__nobase_strip_setup = \ + srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` +am__nobase_strip = \ + for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" +am__nobase_list = $(am__nobase_strip_setup); \ + for p in $$list; do echo "$$p $$p"; done | \ + sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ + $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ + if (++n[$$2] == $(am__install_max)) \ + { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ + END { for (dir in files) print dir, files[dir] }' +am__base_list = \ + sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ + sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' +am__uninstall_files_from_dir = { \ + test -z "$$files" \ + || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ + || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ + $(am__cd) "$$dir" && rm -f $$files; }; \ + } +am__installdirs = "$(DESTDIR)$(pythontc_diagdir)" +DATA = $(pythontc_diag_DATA) +am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) +am__DIST_COMMON = $(srcdir)/Makefile.in +DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) +ACLOCAL = @ACLOCAL@ +AMTAR = @AMTAR@ +AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ +AUTOCONF = @AUTOCONF@ +AUTOHEADER = @AUTOHEADER@ +AUTOMAKE = @AUTOMAKE@ +AWK = @AWK@ +BUFRLIB_NAME = @BUFRLIB_NAME@ +CC = @CC@ +CCDEPMODE = @CCDEPMODE@ +CFLAGS = @CFLAGS@ +CPP = @CPP@ +CPPFLAGS = @CPPFLAGS@ +CXX = @CXX@ +CXXDEPMODE = @CXXDEPMODE@ +CXXFLAGS = @CXXFLAGS@ +CYGPATH_W = @CYGPATH_W@ +DEFS = @DEFS@ +DEPDIR = @DEPDIR@ +ECHO_C = @ECHO_C@ +ECHO_N = @ECHO_N@ +ECHO_T = @ECHO_T@ +EGREP = @EGREP@ +EXEEXT = @EXEEXT@ +F77 = @F77@ +FC_LIBS = @FC_LIBS@ +FFLAGS = @FFLAGS@ +FLIBS = @FLIBS@ +GREP = @GREP@ +GRIB2CLIB_NAME = @GRIB2CLIB_NAME@ +GRIB2_LIBS = @GRIB2_LIBS@ +INSTALL = @INSTALL@ +INSTALL_DATA = @INSTALL_DATA@ +INSTALL_PROGRAM = @INSTALL_PROGRAM@ +INSTALL_SCRIPT = @INSTALL_SCRIPT@ +INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ +LDFLAGS = @LDFLAGS@ +LEX = @LEX@ +LEXLIB = @LEXLIB@ +LEX_OUTPUT_ROOT = @LEX_OUTPUT_ROOT@ +LIBOBJS = @LIBOBJS@ +LIBS = @LIBS@ +LTLIBOBJS = @LTLIBOBJS@ +MAKEINFO = @MAKEINFO@ +MET_BUFR = @MET_BUFR@ +MET_BUFRLIB = @MET_BUFRLIB@ +MET_CAIRO = @MET_CAIRO@ +MET_CAIROINC = @MET_CAIROINC@ +MET_CAIROLIB = @MET_CAIROLIB@ +MET_FREETYPE = @MET_FREETYPE@ +MET_FREETYPEINC = @MET_FREETYPEINC@ +MET_FREETYPELIB = @MET_FREETYPELIB@ +MET_GRIB2C = @MET_GRIB2C@ +MET_GRIB2CINC = @MET_GRIB2CINC@ +MET_GRIB2CLIB = @MET_GRIB2CLIB@ +MET_GSL = @MET_GSL@ +MET_GSLINC = @MET_GSLINC@ +MET_GSLLIB = @MET_GSLLIB@ +MET_HDF = @MET_HDF@ +MET_HDF5 = @MET_HDF5@ +MET_HDF5INC = @MET_HDF5INC@ +MET_HDF5LIB = @MET_HDF5LIB@ +MET_HDFEOS = @MET_HDFEOS@ +MET_HDFEOSINC = @MET_HDFEOSINC@ +MET_HDFEOSLIB = @MET_HDFEOSLIB@ +MET_HDFINC = @MET_HDFINC@ +MET_HDFLIB = @MET_HDFLIB@ +MET_NETCDF = @MET_NETCDF@ +MET_NETCDFINC = @MET_NETCDFINC@ +MET_NETCDFLIB = @MET_NETCDFLIB@ +MET_PYTHON_BIN_EXE = @MET_PYTHON_BIN_EXE@ +MET_PYTHON_CC = @MET_PYTHON_CC@ +MET_PYTHON_LD = @MET_PYTHON_LD@ +MKDIR_P = @MKDIR_P@ +OBJEXT = @OBJEXT@ +OPENMP_CFLAGS = @OPENMP_CFLAGS@ +PACKAGE = @PACKAGE@ +PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ +PACKAGE_NAME = @PACKAGE_NAME@ +PACKAGE_STRING = @PACKAGE_STRING@ +PACKAGE_TARNAME = @PACKAGE_TARNAME@ +PACKAGE_URL = @PACKAGE_URL@ +PACKAGE_VERSION = @PACKAGE_VERSION@ +PATH_SEPARATOR = @PATH_SEPARATOR@ +PYTHON_LIBS = @PYTHON_LIBS@ +RANLIB = @RANLIB@ +SET_MAKE = @SET_MAKE@ +SHELL = @SHELL@ +STRIP = @STRIP@ +VERSION = @VERSION@ +YACC = @YACC@ +YFLAGS = @YFLAGS@ +abs_builddir = @abs_builddir@ +abs_srcdir = @abs_srcdir@ +abs_top_builddir = @abs_top_builddir@ +abs_top_srcdir = @abs_top_srcdir@ +ac_ct_CC = @ac_ct_CC@ +ac_ct_CXX = @ac_ct_CXX@ +ac_ct_F77 = @ac_ct_F77@ +am__include = @am__include@ +am__leading_dot = @am__leading_dot@ +am__quote = @am__quote@ +am__tar = @am__tar@ +am__untar = @am__untar@ +bindir = @bindir@ +build = @build@ +build_alias = @build_alias@ +build_cpu = @build_cpu@ +build_os = @build_os@ +build_vendor = @build_vendor@ +builddir = @builddir@ +datadir = @datadir@ +datarootdir = @datarootdir@ +docdir = @docdir@ +dvidir = @dvidir@ +exec_prefix = @exec_prefix@ +host = @host@ +host_alias = @host_alias@ +host_cpu = @host_cpu@ +host_os = @host_os@ +host_vendor = @host_vendor@ +htmldir = @htmldir@ +includedir = @includedir@ +infodir = @infodir@ +install_sh = @install_sh@ +libdir = @libdir@ +libexecdir = @libexecdir@ +localedir = @localedir@ +localstatedir = @localstatedir@ +mandir = @mandir@ +mkdir_p = @mkdir_p@ +oldincludedir = @oldincludedir@ +pdfdir = @pdfdir@ +prefix = @prefix@ +program_transform_name = @program_transform_name@ +psdir = @psdir@ +runstatedir = @runstatedir@ +sbindir = @sbindir@ +sharedstatedir = @sharedstatedir@ +srcdir = @srcdir@ +sysconfdir = @sysconfdir@ +target_alias = @target_alias@ +top_build_prefix = @top_build_prefix@ +top_builddir = @top_builddir@ +top_srcdir = @top_srcdir@ +pythontc_diagdir = $(pkgdatadir)/python/tc_diag +pythontc_diag_DATA = \ + compute_tc_diagnostics.py + +EXTRA_DIST = ${pythontc_diag_DATA} +MAINTAINERCLEANFILES = Makefile.in +all: all-am + +.SUFFIXES: +$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) + @for dep in $?; do \ + case '$(am__configure_deps)' in \ + *$$dep*) \ + ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ + && { if test -f $@; then exit 0; else break; fi; }; \ + exit 1;; \ + esac; \ + done; \ + echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign scripts/python/tc_diag/Makefile'; \ + $(am__cd) $(top_srcdir) && \ + $(AUTOMAKE) --foreign scripts/python/tc_diag/Makefile +Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status + @case '$?' in \ + *config.status*) \ + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ + *) \ + echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles)'; \ + cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles);; \ + esac; + +$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh + +$(top_srcdir)/configure: $(am__configure_deps) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(ACLOCAL_M4): $(am__aclocal_m4_deps) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(am__aclocal_m4_deps): +install-pythontc_diagDATA: $(pythontc_diag_DATA) + @$(NORMAL_INSTALL) + @list='$(pythontc_diag_DATA)'; test -n "$(pythontc_diagdir)" || list=; \ + if test -n "$$list"; then \ + echo " $(MKDIR_P) '$(DESTDIR)$(pythontc_diagdir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(pythontc_diagdir)" || exit 1; \ + fi; \ + for p in $$list; do \ + if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ + echo "$$d$$p"; \ + done | $(am__base_list) | \ + while read files; do \ + echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pythontc_diagdir)'"; \ + $(INSTALL_DATA) $$files "$(DESTDIR)$(pythontc_diagdir)" || exit $$?; \ + done + +uninstall-pythontc_diagDATA: + @$(NORMAL_UNINSTALL) + @list='$(pythontc_diag_DATA)'; test -n "$(pythontc_diagdir)" || list=; \ + files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ + dir='$(DESTDIR)$(pythontc_diagdir)'; $(am__uninstall_files_from_dir) +tags TAGS: + +ctags CTAGS: + +cscope cscopelist: + + +distdir: $(BUILT_SOURCES) + $(MAKE) $(AM_MAKEFLAGS) distdir-am + +distdir-am: $(DISTFILES) + @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ + topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ + list='$(DISTFILES)'; \ + dist_files=`for file in $$list; do echo $$file; done | \ + sed -e "s|^$$srcdirstrip/||;t" \ + -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ + case $$dist_files in \ + */*) $(MKDIR_P) `echo "$$dist_files" | \ + sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ + sort -u` ;; \ + esac; \ + for file in $$dist_files; do \ + if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ + if test -d $$d/$$file; then \ + dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ + if test -d "$(distdir)/$$file"; then \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ + if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ + cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ + cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ + else \ + test -f "$(distdir)/$$file" \ + || cp -p $$d/$$file "$(distdir)/$$file" \ + || exit 1; \ + fi; \ + done +check-am: all-am +check: check-am +all-am: Makefile $(DATA) +installdirs: + for dir in "$(DESTDIR)$(pythontc_diagdir)"; do \ + test -z "$$dir" || $(MKDIR_P) "$$dir"; \ + done +install: install-am +install-exec: install-exec-am +install-data: install-data-am +uninstall: uninstall-am + +install-am: all-am + @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am + +installcheck: installcheck-am +install-strip: + if test -z '$(STRIP)'; then \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + install; \ + else \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ + fi +mostlyclean-generic: + +clean-generic: + +distclean-generic: + -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) + -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) + +maintainer-clean-generic: + @echo "This command is intended for maintainers to use" + @echo "it deletes files that may require special tools to rebuild." + -test -z "$(MAINTAINERCLEANFILES)" || rm -f $(MAINTAINERCLEANFILES) +clean: clean-am + +clean-am: clean-generic mostlyclean-am + +distclean: distclean-am + -rm -f Makefile +distclean-am: clean-am distclean-generic + +dvi: dvi-am + +dvi-am: + +html: html-am + +html-am: + +info: info-am + +info-am: + +install-data-am: install-pythontc_diagDATA + +install-dvi: install-dvi-am + +install-dvi-am: + +install-exec-am: + +install-html: install-html-am + +install-html-am: + +install-info: install-info-am + +install-info-am: + +install-man: + +install-pdf: install-pdf-am + +install-pdf-am: + +install-ps: install-ps-am + +install-ps-am: + +installcheck-am: + +maintainer-clean: maintainer-clean-am + -rm -f Makefile +maintainer-clean-am: distclean-am maintainer-clean-generic + +mostlyclean: mostlyclean-am + +mostlyclean-am: mostlyclean-generic + +pdf: pdf-am + +pdf-am: + +ps: ps-am + +ps-am: + +uninstall-am: uninstall-pythontc_diagDATA + +.MAKE: install-am install-strip + +.PHONY: all all-am check check-am clean clean-generic cscopelist-am \ + ctags-am distclean distclean-generic distdir dvi dvi-am html \ + html-am info info-am install install-am install-data \ + install-data-am install-dvi install-dvi-am install-exec \ + install-exec-am install-html install-html-am install-info \ + install-info-am install-man install-pdf install-pdf-am \ + install-ps install-ps-am install-pythontc_diagDATA \ + install-strip installcheck installcheck-am installdirs \ + maintainer-clean maintainer-clean-generic mostlyclean \ + mostlyclean-generic pdf pdf-am ps ps-am tags-am uninstall \ + uninstall-am uninstall-pythontc_diagDATA + +.PRECIOUS: Makefile + + +# Tell versions [3.59,3.63) of GNU make to not export all variables. +# Otherwise a system limit (for SysV at least) may be exceeded. +.NOEXPORT: diff --git a/scripts/python/tc_diag/compute_tc_diagnostics.py b/scripts/python/tc_diag/compute_tc_diagnostics.py new file mode 100644 index 0000000000..14fff6f746 --- /dev/null +++ b/scripts/python/tc_diag/compute_tc_diagnostics.py @@ -0,0 +1,177 @@ +import os +import sys + +########################################### + +print("Python Script:\t" + repr(sys.argv[0])) + + ## + ## input file specified on the command line + ## + +if len(sys.argv) != 2: + print("ERROR: compute_tc_diagnostics.py -> Must specify exactly one input file.") + sys.exit(1) + +# Read the input file +input_file = os.path.expandvars(sys.argv[1]) + +try: + print("Input File:\t" + repr(input_file)) +except NameError: + print("Can't find the input file") + +# Diagnostics dictionary +tc_diag = { + 'MAXWIND': 9999, + 'RMW': 9999, + 'MIN_SLP': 9999, + 'SHR_MAG': 9999, + 'SHR_HDG': 9999, + 'STM_SPD': 9999, + 'STM_HDG': 9999, + 'SST': 9999, + 'OHC': 9999, + 'TPW': 9999, + 'LAND': 9999, + '850TANG': 9999, + '850VORT': 9999, + '200DVRG': 9999, + 'T_SURF': 9999, + 'R_SURF': 9999, + 'P_SURF': 9999, + 'U_SURF': 9999, + 'V_SURF': 9999, + 'T_1000': 9999, + 'R_1000': 9999, + 'Z_1000': 9999, + 'U_1000': 9999, + 'V_1000': 9999, + 'T_0975': 9999, + 'R_0975': 9999, + 'Z_0975': 9999, + 'U_0975': 9999, + 'V_0975': 9999, + 'T_0950': 9999, + 'R_0950': 9999, + 'Z_0950': 9999, + 'U_0950': 9999, + 'V_0950': 9999, + 'T_0925': 9999, + 'R_0925': 9999, + 'Z_0925': 9999, + 'U_0925': 9999, + 'V_0925': 9999, + 'T_0900': 9999, + 'R_0900': 9999, + 'Z_0900': 9999, + 'U_0900': 9999, + 'V_0900': 9999, + 'T_0850': 9999, + 'R_0850': 9999, + 'Z_0850': 9999, + 'U_0850': 9999, + 'V_0850': 9999, + 'T_0800': 9999, + 'R_0800': 9999, + 'Z_0800': 9999, + 'U_0800': 9999, + 'V_0800': 9999, + 'T_0750': 9999, + 'R_0750': 9999, + 'Z_0750': 9999, + 'U_0750': 9999, + 'V_0750': 9999, + 'T_0700': 9999, + 'R_0700': 9999, + 'Z_0700': 9999, + 'U_0700': 9999, + 'V_0700': 9999, + 'T_0650': 9999, + 'R_0650': 9999, + 'Z_0650': 9999, + 'U_0650': 9999, + 'V_0650': 9999, + 'T_0600': 9999, + 'R_0600': 9999, + 'Z_0600': 9999, + 'U_0600': 9999, + 'V_0600': 9999, + 'T_0550': 9999, + 'R_0550': 9999, + 'Z_0550': 9999, + 'U_0550': 9999, + 'V_0550': 9999, + 'T_0500': 9999, + 'R_0500': 9999, + 'Z_0500': 9999, + 'U_0500': 9999, + 'V_0500': 9999, + 'T_0450': 9999, + 'R_0450': 9999, + 'Z_0450': 9999, + 'U_0450': 9999, + 'V_0450': 9999, + 'T_0400': 9999, + 'R_0400': 9999, + 'Z_0400': 9999, + 'U_0400': 9999, + 'V_0400': 9999, + 'T_0350': 9999, + 'R_0350': 9999, + 'Z_0350': 9999, + 'U_0350': 9999, + 'V_0350': 9999, + 'T_0300': 9999, + 'R_0300': 9999, + 'Z_0300': 9999, + 'U_0300': 9999, + 'V_0300': 9999, + 'T_0250': 9999, + 'R_0250': 9999, + 'Z_0250': 9999, + 'U_0250': 9999, + 'V_0250': 9999, + 'T_0200': 9999, + 'R_0200': 9999, + 'Z_0200': 9999, + 'U_0200': 9999, + 'V_0200': 9999, + 'T_0150': 9999, + 'R_0150': 9999, + 'Z_0150': 9999, + 'U_0150': 9999, + 'V_0150': 9999, + 'T_0100': 9999, + 'R_0100': 9999, + 'Z_0100': 9999, + 'U_0100': 9999, + 'V_0100': 9999, + 'T_0070': 9999, + 'R_0070': 9999, + 'Z_0070': 9999, + 'U_0070': 9999, + 'V_0070': 9999, + 'T_0050': 9999, + 'R_0050': 9999, + 'Z_0050': 9999, + 'U_0050': 9999, + 'V_0050': 9999, + 'T_0030': 9999, + 'R_0030': 9999, + 'Z_0030': 9999, + 'U_0030': 9999, + 'V_0030': 9999, + 'T_0020': 9999, + 'R_0020': 9999, + 'Z_0020': 9999, + 'U_0020': 9999, + 'V_0020': 9999, + 'T_0010': 9999, + 'R_0010': 9999, + 'Z_0010': 9999, + 'U_0010': 9999, + 'V_0010': 9999, + 'TGRD': 9999 +} + diff --git a/src/basic/vx_cal/time_array.cc b/src/basic/vx_cal/time_array.cc index 0cc068d350..5430220f98 100644 --- a/src/basic/vx_cal/time_array.cc +++ b/src/basic/vx_cal/time_array.cc @@ -389,6 +389,30 @@ return; //////////////////////////////////////////////////////////////////////// +void TimeArray::add_const(unixtime u, int n) + +{ + +extend(Nelements + n); + +int j; + +for (j=0; j "; + if(description) cs << description << " "; + cs << "is provided in BETA status for MET " << met_version << ".\n"; + cs << "Please see the release notes of future MET versions for updates."; + mlog << Warning << "\n" << cs << "\n\n"; + + return; +} + + +//////////////////////////////////////////////////////////////////////// + + bool match_met_version(const char * check_version) { // diff --git a/src/basic/vx_util/string_fxns.h b/src/basic/vx_util/string_fxns.h index 78846a4297..47b16ed027 100644 --- a/src/basic/vx_util/string_fxns.h +++ b/src/basic/vx_util/string_fxns.h @@ -6,23 +6,19 @@ // ** P.O.Box 3000, Boulder, Colorado, 80307-3000, USA // *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* - - //////////////////////////////////////////////////////////////////////// - #ifndef __STRING_FXNS_H__ #define __STRING_FXNS_H__ - //////////////////////////////////////////////////////////////////////// - #include "concat_string.h" - //////////////////////////////////////////////////////////////////////// +extern void print_beta_warning(const char *description, + const char *method_name=nullptr); extern bool match_met_version(const char *); @@ -61,10 +57,6 @@ extern int parse_thresh_index(const char *str); //////////////////////////////////////////////////////////////////////// - #endif // __STRING_FXNS_H__ - //////////////////////////////////////////////////////////////////////// - - diff --git a/src/libcode/vx_data2d/var_info.cc b/src/libcode/vx_data2d/var_info.cc index f074b9bc53..1a36210d3d 100644 --- a/src/libcode/vx_data2d/var_info.cc +++ b/src/libcode/vx_data2d/var_info.cc @@ -242,6 +242,27 @@ void VarInfo::dump(ostream &out) const { /////////////////////////////////////////////////////////////////////////////// +ConcatString VarInfo::magic_time_str() const { + ConcatString cs(MagicStr); + + // Report timing information when specified + if(Init != 0) { + cs << ", InitTime = " << unix_to_yyyymmdd_hhmmss(Init); + } + + if(Valid != 0) { + cs << ", ValidTime = " << unix_to_yyyymmdd_hhmmss(Valid); + } + + if(!is_bad_data(Lead)) { + cs << ", LeadTime = " << sec_to_hhmmss(Lead); + } + + return(cs); +} + +/////////////////////////////////////////////////////////////////////////////// + void VarInfo::set_req_name(const char *str) { ReqName = str; return; diff --git a/src/libcode/vx_data2d/var_info.h b/src/libcode/vx_data2d/var_info.h index e990c587a2..de47e09e60 100644 --- a/src/libcode/vx_data2d/var_info.h +++ b/src/libcode/vx_data2d/var_info.h @@ -105,6 +105,7 @@ class VarInfo virtual GrdFileType file_type() const = 0; ConcatString magic_str() const; + ConcatString magic_time_str() const; ConcatString req_name() const; ConcatString name() const; ConcatString units() const; diff --git a/src/libcode/vx_data2d_factory/var_info_factory.cc b/src/libcode/vx_data2d_factory/var_info_factory.cc index a716e44f69..d82a74a8ad 100644 --- a/src/libcode/vx_data2d_factory/var_info_factory.cc +++ b/src/libcode/vx_data2d_factory/var_info_factory.cc @@ -136,3 +136,17 @@ VarInfo * VarInfoFactory::new_var_info(ConcatString s) { } /////////////////////////////////////////////////////////////////////////////// + +VarInfo * VarInfoFactory::new_copy(const VarInfo *vi_in) { + + if(!vi_in) return ( nullptr ); + + VarInfo *vi_copy = new_var_info(vi_in->file_type()); + + *vi_copy = *vi_in; + + return(vi_copy); + +} + +/////////////////////////////////////////////////////////////////////////////// diff --git a/src/libcode/vx_data2d_factory/var_info_factory.h b/src/libcode/vx_data2d_factory/var_info_factory.h index 2f95d3ac4e..747fb9d19a 100644 --- a/src/libcode/vx_data2d_factory/var_info_factory.h +++ b/src/libcode/vx_data2d_factory/var_info_factory.h @@ -25,6 +25,7 @@ class VarInfoFactory public: static VarInfo *new_var_info(GrdFileType t); static VarInfo *new_var_info(ConcatString s); + static VarInfo *new_copy(const VarInfo *); }; /////////////////////////////////////////////////////////////////////////////// diff --git a/src/libcode/vx_grid/tcrmw_grid.h b/src/libcode/vx_grid/tcrmw_grid.h index 771b497bf9..98b0d4debc 100644 --- a/src/libcode/vx_grid/tcrmw_grid.h +++ b/src/libcode/vx_grid/tcrmw_grid.h @@ -74,9 +74,9 @@ class TcrmwGrid : public RotatedLatLonGrid { int azimuth_n () const; double range_max_km () const; - double range_delta_km () const; // Range_Max_km/(Range_n - 1) + double range_delta_km () const; // Range_Max_km/Range_n - double azimuth_delta_deg () const; // 360.0/(Azimuth_n - 1) + double azimuth_delta_deg () const; // 360.0/Azimuth_n double lat_center_deg () const; double lon_center_deg () const; @@ -122,9 +122,9 @@ inline int TcrmwGrid::azimuth_n () const { return ( Azimuth_n ); } inline double TcrmwGrid::range_max_km () const { return ( Range_max_km ); } -inline double TcrmwGrid::range_delta_km () const { return ( Range_max_km/(Range_n - 1.0) ); } +inline double TcrmwGrid::range_delta_km () const { return ( Range_max_km/Range_n ); } -inline double TcrmwGrid::azimuth_delta_deg () const { return ( 360.0/(Azimuth_n - 1.0) ); } +inline double TcrmwGrid::azimuth_delta_deg () const { return ( 360.0/Azimuth_n ); } inline double TcrmwGrid::lat_center_deg () const { return ( Lat_Center_Deg ); } inline double TcrmwGrid::lon_center_deg () const { return ( Lon_Center_Deg ); } diff --git a/src/libcode/vx_nc_util/nc_utils.cc b/src/libcode/vx_nc_util/nc_utils.cc index 443caa837d..6350b6576a 100644 --- a/src/libcode/vx_nc_util/nc_utils.cc +++ b/src/libcode/vx_nc_util/nc_utils.cc @@ -380,7 +380,6 @@ bool get_cf_conventions(const netCDF::NcFile *nc, ConcatString& conventions_valu return has_attr; } - //////////////////////////////////////////////////////////////////////// ConcatString get_log_msg_for_att(const NcVarAtt *att) { @@ -734,7 +733,6 @@ bool get_global_att(const NcFile *nc, const ConcatString &att_name, //////////////////////////////////////////////////////////////////////// - bool get_global_att(const NcFile *nc, const ConcatString& att_name, int &att_val, bool error_out) { static const char *method_name = "\nget_global_att(int) -> "; @@ -875,7 +873,6 @@ void add_att(NcVar *var, const string &att_name, const double att_val) { var->putAtt(att_name, NcType::nc_DOUBLE, att_val); } - //////////////////////////////////////////////////////////////////////// int get_var_names(NcFile *nc, StringArray *var_names) { @@ -2592,6 +2589,7 @@ void copy_nc_att_short(NcVar *var_to, NcVarAtt *from_att) { } } +//////////////////////////////////////////////////////////////////////// NcVar *copy_nc_var(NcFile *to_nc, NcVar *from_var, const int deflate_level, const bool all_attrs) { @@ -2824,7 +2822,7 @@ void copy_nc_data_int(NcVar *var_from, NcVar *var_to, int data_size) { //////////////////////////////////////////////////////////////////////// void copy_nc_data_short(NcVar *var_from, NcVar *var_to, int data_size) { - const string method_name = "copy_nc_data_double"; + //const string method_name = "copy_nc_data_double"; short *data = new short[data_size]; var_from->getVar(data); var_to->putVar(data); @@ -2836,28 +2834,43 @@ void copy_nc_data_short(NcVar *var_from, NcVar *var_to, int data_size) { //////////////////////////////////////////////////////////////////////// +void copy_nc_data_string(NcVar *var_from, NcVar *var_to, int data_size) { + //const string method_name = "copy_nc_data_string"; + string *data = new string[data_size]; + var_from->getVar(data); + var_to->putVar(data); + // mlog << Error << "\n" << method_name << " -> error writing the variable " + // << GET_NC_NAME_P(var_to) << " to the netCDF file\n\n"; + // exit(1); + delete[] data; +} + +//////////////////////////////////////////////////////////////////////// + void copy_nc_var_data(NcVar *var_from, NcVar *var_to) { const string method_name = "copy_nc_var_data()"; int data_size = get_data_size(var_from); int dataType = GET_NC_TYPE_ID_P(var_from); + switch (dataType) { - case NC_DOUBLE: - copy_nc_data_double(var_from, var_to, data_size); - break; - - case NC_FLOAT: - copy_nc_data_float(var_from, var_to, data_size); - break; - case NC_SHORT: - copy_nc_data_short(var_from, var_to, data_size); - break; - case NC_INT: - copy_nc_data_int(var_from, var_to, data_size); - break; - - case NC_CHAR: - copy_nc_data_char(var_from, var_to, data_size); - break; + case NC_DOUBLE: + copy_nc_data_double(var_from, var_to, data_size); + break; + case NC_FLOAT: + copy_nc_data_float(var_from, var_to, data_size); + break; + case NC_SHORT: + copy_nc_data_short(var_from, var_to, data_size); + break; + case NC_INT: + copy_nc_data_int(var_from, var_to, data_size); + break; + case NC_CHAR: + copy_nc_data_char(var_from, var_to, data_size); + break; + case NC_STRING: + copy_nc_data_string(var_from, var_to, data_size); + break; default: mlog << Error << "\n" << method_name << " -> " diff --git a/src/libcode/vx_python3_utils/python3_util.h b/src/libcode/vx_python3_utils/python3_util.h index e81dfe260d..33a5ed63b3 100644 --- a/src/libcode/vx_python3_utils/python3_util.h +++ b/src/libcode/vx_python3_utils/python3_util.h @@ -12,6 +12,8 @@ #include +#include "vx_log.h" + //////////////////////////////////////////////////////////////////////// diff --git a/src/libcode/vx_series_data/series_data.cc b/src/libcode/vx_series_data/series_data.cc index 21d3a32f0e..d7a0e587d9 100644 --- a/src/libcode/vx_series_data/series_data.cc +++ b/src/libcode/vx_series_data/series_data.cc @@ -23,22 +23,33 @@ void get_series_entry(int i_series, VarInfo* data_info, const StringArray& search_files, const GrdFileType type, DataPlane& dp, Grid& grid) { + int i; + bool found; mlog << Debug(3) << "Processing series entry " << i_series + 1 << ": " - << data_info->magic_str() << "\n"; - - ConcatString filename; - filename = search_files[i_series]; + << data_info->magic_time_str() << "\n"; // Initialize dp.clear(); - // Error out if requested data is not found in the i-th file - if(!read_single_entry(data_info, filename, type, dp, grid)) { + // Search for data, beginning with the i_series index + for(i=0,found=false; i " - << "Could not find data for " << data_info->magic_str() - << " in file: " << filename << "\n\n"; + << "Could not find data for " << data_info->magic_time_str() + << " in file list:\n:" << write_css(search_files) << "\n\n"; exit(1); } diff --git a/src/libcode/vx_shapedata/mode_conf_info.cc b/src/libcode/vx_shapedata/mode_conf_info.cc index eec28a0445..af16067528 100644 --- a/src/libcode/vx_shapedata/mode_conf_info.cc +++ b/src/libcode/vx_shapedata/mode_conf_info.cc @@ -468,16 +468,16 @@ PlotInfo plot_info; // Parse the interest functions - centroid_dist_if = dict->lookup_pwl(conf_key_centroid_dist); - boundary_dist_if = dict->lookup_pwl(conf_key_boundary_dist); - convex_hull_dist_if = dict->lookup_pwl(conf_key_convex_hull_dist); - angle_diff_if = dict->lookup_pwl(conf_key_angle_diff); - aspect_diff_if = dict->lookup_pwl(conf_key_aspect_diff); - area_ratio_if = dict->lookup_pwl(conf_key_area_ratio); - int_area_ratio_if = dict->lookup_pwl(conf_key_int_area_ratio); - curvature_ratio_if = dict->lookup_pwl(conf_key_curvature_ratio); - complexity_ratio_if = dict->lookup_pwl(conf_key_complexity_ratio); - inten_perc_ratio_if = dict->lookup_pwl(conf_key_inten_perc_ratio); + centroid_dist_if = parse_interest_function(dict, conf_key_centroid_dist); + boundary_dist_if = parse_interest_function(dict, conf_key_boundary_dist); + convex_hull_dist_if = parse_interest_function(dict, conf_key_convex_hull_dist); + angle_diff_if = parse_interest_function(dict, conf_key_angle_diff); + aspect_diff_if = parse_interest_function(dict, conf_key_aspect_diff); + area_ratio_if = parse_interest_function(dict, conf_key_area_ratio); + int_area_ratio_if = parse_interest_function(dict, conf_key_int_area_ratio); + curvature_ratio_if = parse_interest_function(dict, conf_key_curvature_ratio); + complexity_ratio_if = parse_interest_function(dict, conf_key_complexity_ratio); + inten_perc_ratio_if = parse_interest_function(dict, conf_key_inten_perc_ratio); // Conf: total_interest_thresh @@ -487,7 +487,7 @@ PlotInfo plot_info; if(total_interest_thresh < 0 || total_interest_thresh > 1) { mlog << Error << "\nModeConfInfo::process_config() -> " - << "total_interest_thresh (" << total_interest_thresh + << "\"total_interest_thresh\" (" << total_interest_thresh << ") must be set between 0 and 1.\n\n"; exit(1); } @@ -644,6 +644,43 @@ return; //////////////////////////////////////////////////////////////////////// +PiecewiseLinear * ModeConfInfo::parse_interest_function(Dictionary * dict, const char * conf_key_if) + +{ + + // + // lookup piecewise linear interest function + // + +PiecewiseLinear * pwl_if = dict->lookup_pwl(conf_key_if); + + // + // range check the points + // + +for (int j=0; jn_points(); ++j) { + + if ( pwl_if->y(j) < 0 || pwl_if->y(j) > 1 ) { + + mlog << Error << "\nModeConfInfo::parse_interest_function() -> " + << "all \"" << conf_key_if << "\" interest function points (" + << pwl_if->x(j) << ", " << pwl_if->y(j) + << ") must be in the range of 0 and 1.\n\n"; + + exit(1); + + } + +} // for j + +return ( pwl_if ); + +} + + +//////////////////////////////////////////////////////////////////////// + + void ModeConfInfo::set_field_index(int k) { diff --git a/src/libcode/vx_shapedata/mode_conf_info.h b/src/libcode/vx_shapedata/mode_conf_info.h index 136ac0641d..c9c063477a 100644 --- a/src/libcode/vx_shapedata/mode_conf_info.h +++ b/src/libcode/vx_shapedata/mode_conf_info.h @@ -117,6 +117,8 @@ class ModeConfInfo { void read_fields (Mode_Field_Info * &, Dictionary * dict, GrdFileType, char _fo); + PiecewiseLinear * parse_interest_function(Dictionary * dict, const char * conf_key_if); + // // weights // diff --git a/src/libcode/vx_tc_util/gen_shape_info.cc b/src/libcode/vx_tc_util/gen_shape_info.cc index b912d3d387..633c559c7c 100644 --- a/src/libcode/vx_tc_util/gen_shape_info.cc +++ b/src/libcode/vx_tc_util/gen_shape_info.cc @@ -82,7 +82,8 @@ ConcatString GenShapeInfo::serialize() const { unix_to_yyyymmdd_hhmmss(IssueTime).text() : na_str) << "\"" << ", NPoints = " << Poly.n_points << ", Lat = " << Poly.y_min() << " to " << Poly.y_max() - << ", Lon = " << Poly.x_min() << " to " << Poly.x_max(); + << ", Lon = " << Poly.x_min() << " to " << Poly.x_max() + << ", NProb = " << ProbVal.n(); return(s); diff --git a/src/libcode/vx_tc_util/track_point.cc b/src/libcode/vx_tc_util/track_point.cc index 52849ceb10..31ee582835 100644 --- a/src/libcode/vx_tc_util/track_point.cc +++ b/src/libcode/vx_tc_util/track_point.cc @@ -199,6 +199,9 @@ void QuadInfo::set_wind(const ATCFTrackLine &l) { l.radius1(), l.radius2(), l.radius3(), l.radius4()); + // MET #2532 Derive ALVal from quadrants + if(is_bad_data(ALVal)) set_al_from_quad_vals(); + return; } @@ -273,6 +276,29 @@ void QuadInfo::set_quad_vals(QuadrantType ref_quad, return; } +//////////////////////////////////////////////////////////////////////// +// +// MET #2532: +// Compute the full circle value as the mean of the non-zero quadrants. +// +//////////////////////////////////////////////////////////////////////// + +void QuadInfo::set_al_from_quad_vals() { + double s = 0.0; + int n = 0; + + if(!is_bad_data(NEVal) || !is_bad_data(SEVal) || + !is_bad_data(SWVal) || !is_bad_data(NWVal)) { + if(NEVal > 0) { s += NEVal; n++; } + if(SEVal > 0) { s += SEVal; n++; } + if(SWVal > 0) { s += SWVal; n++; } + if(NWVal > 0) { s += NWVal; n++; } + ALVal = (n > 0 ? s/n : 0.0); + } + + return; +} + //////////////////////////////////////////////////////////////////////// bool QuadInfo::is_match_wind(const ATCFTrackLine &l) const { diff --git a/src/libcode/vx_tc_util/track_point.h b/src/libcode/vx_tc_util/track_point.h index f4acb29b6b..b6e4fb89be 100644 --- a/src/libcode/vx_tc_util/track_point.h +++ b/src/libcode/vx_tc_util/track_point.h @@ -64,6 +64,7 @@ class QuadInfo { void set_wind(const ATCFTrackLine &); void set_seas(const ATCFTrackLine &); void set_quad_vals(QuadrantType, int, int, int, int); + void set_al_from_quad_vals(); void set_intensity(int); void set_al_val(double); diff --git a/src/libcode/vx_tc_util/vx_tc_nc_util.cc b/src/libcode/vx_tc_util/vx_tc_nc_util.cc index 2cde15bce3..4c5297d8a1 100644 --- a/src/libcode/vx_tc_util/vx_tc_nc_util.cc +++ b/src/libcode/vx_tc_util/vx_tc_nc_util.cc @@ -17,78 +17,148 @@ using namespace netCDF; //////////////////////////////////////////////////////////////////////// -void write_tc_tracks(NcFile* nc_out, - const NcDim& track_point_dim, - const TrackInfoArray& tracks) { +void write_tc_track_lines(NcFile* nc_out, + const TrackInfo& track) { - TrackInfo track = tracks[0]; StringArray track_lines = track.track_lines(); - NcDim track_line_dim = add_dim(nc_out, "track_line", track_lines.n()); - - NcVar track_lines_var = nc_out->addVar( + NcDim track_line_dim = add_dim(nc_out, + "track_line", track_lines.n()); + NcVar track_line_var = nc_out->addVar( "TrackLines", ncString, track_line_dim); + vector counts; + counts.push_back(1); + + for(int i = 0; i < track_lines.n(); i++) { + vector offsets; + offsets.push_back(i); + string line = track_lines[i]; + const char* str = line.c_str(); + track_line_var.putVar(offsets, counts, &str); + } +} + +//////////////////////////////////////////////////////////////////////// + +void write_tc_track_lat_lon(NcFile* nc_out, + const NcDim& track_point_dim, + const TrackInfo& track) { + NcVar track_lat_var = nc_out->addVar( - "Lat", ncDouble, track_point_dim); - add_att(&track_lat_var, "long_name", "Track Point Latitude"); - add_att(&track_lat_var, "units", "degrees_east"); + "FullTrackLat", ncDouble, track_point_dim); + add_att(&track_lat_var, "long_name", "Full Track Point Latitude"); + add_att(&track_lat_var, "units", "degrees_north"); add_att(&track_lat_var, "standard_name", "latitude_track"); NcVar track_lon_var = nc_out->addVar( - "Lon", ncDouble, track_point_dim); - add_att(&track_lon_var, "long_name", "Track Point Longitude"); - add_att(&track_lon_var, "units", "degrees_north"); + "FullTrackLon", ncDouble, track_point_dim); + add_att(&track_lon_var, "long_name", "Full Track Point Longitude"); + add_att(&track_lon_var, "units", "degrees_east"); add_att(&track_lon_var, "standard_name", "longitude_track"); - NcVar track_mrd_var = nc_out->addVar( - "RMW", ncDouble, track_point_dim); - add_att(&track_mrd_var, "long_name", "Radius of Maximum Winds"); - add_att(&track_mrd_var, "units", "nautical_miles"); - add_att(&track_mrd_var, "standard_name", "radius_max_wind"); double* track_lat_data = new double[track.n_points()]; double* track_lon_data = new double[track.n_points()]; - double* track_mrd_data = new double[track.n_points()]; for(int i = 0; i < track.n_points(); i++) { - mlog << Debug(4) << track[i].serialize() << "\n"; + mlog << Debug(5) << track[i].serialize() << "\n"; track_lat_data[i] = track[i].lat(); track_lon_data[i] = track[i].lon(); - track_mrd_data[i] = track[i].mrd(); } vector offsets; + offsets.push_back(0); + vector counts; + counts.push_back(track.n_points()); - mlog << Debug(2) << "Writing " << track_lines.n() << " track lines.\n"; + track_lat_var.putVar(offsets, counts, track_lat_data); + track_lon_var.putVar(offsets, counts, track_lon_data); - for(int i = 0; i < track_lines.n(); i++) { - offsets.clear(); - offsets.push_back(i); - counts.clear(); - counts.push_back(1); - string line = track_lines[i]; - mlog << Debug(3) << line << "\n"; - const char* str = line.c_str(); - track_lines_var.putVar(offsets, counts, &str); + delete[] track_lat_data; + delete[] track_lon_data; +} + +//////////////////////////////////////////////////////////////////////// + +void write_tc_track_point(NcFile* nc_out, + const NcDim& valid_dim, + const TrackPoint& point) { + double v; + + vector offsets; + vector counts; + + offsets.push_back(0); + counts.push_back(1); + + // Write track point values for lat, lon, vmax, and mslp + + NcVar lat_var = nc_out->addVar( + "TrackLat", ncDouble, valid_dim); + add_att(&lat_var, "long_name", "Track Point Latitude"); + add_att(&lat_var, "units", "degrees_north"); + add_att(&lat_var, "standard_name", "latitude_track"); + v = point.lat(); + lat_var.putVar(offsets, counts, &v); + + NcVar lon_var = nc_out->addVar( + "TrackLon", ncDouble, valid_dim); + add_att(&lon_var, "long_name", "Track Point Longitude"); + add_att(&lon_var, "units", "degrees_east"); + add_att(&lon_var, "standard_name", "longitude_track"); + v = point.lon(); + lon_var.putVar(offsets, counts, &v); + + NcVar vmax_var = nc_out->addVar( + "TrackVMax", ncDouble, valid_dim); + add_att(&vmax_var, "long_name", "Maximum sustained wind speed"); + add_att(&vmax_var, "units", "kts"); + add_att(&vmax_var, "_FillValue", bad_data_double); + v = point.v_max(); + vmax_var.putVar(offsets, counts, &v); + + NcVar mslp_var = nc_out->addVar( + "TrackMSLP", ncDouble, valid_dim); + add_att(&mslp_var, "long_name", "Minimum sea level pressure"); + add_att(&mslp_var, "units", "millibars"); + add_att(&mslp_var, "_FillValue", bad_data_double); + v = point.mslp(); + mslp_var.putVar(offsets, counts, &v); + +} + +//////////////////////////////////////////////////////////////////////// + +void write_tc_rmw(NcFile* nc_out, + const NcDim& track_point_dim, + const TrackInfo& track) { + + NcVar track_mrd_var = nc_out->addVar( + "RMW", ncDouble, track_point_dim); + add_att(&track_mrd_var, "long_name", "Radius of Maximum Winds"); + add_att(&track_mrd_var, "units", "nautical_miles"); + add_att(&track_mrd_var, "standard_name", "radius_max_wind"); + + double* track_mrd_data = new double[track.n_points()]; + + for(int i = 0; i < track.n_points(); i++) { + track_mrd_data[i] = track[i].mrd(); } - offsets.clear(); + vector offsets; offsets.push_back(0); - counts.clear(); + vector counts; counts.push_back(track.n_points()); - track_lat_var.putVar(offsets, counts, track_lat_data); - track_lon_var.putVar(offsets, counts, track_lon_data); track_mrd_var.putVar(offsets, counts, track_mrd_data); - delete[] track_lat_data; - delete[] track_lon_data; delete[] track_mrd_data; } //////////////////////////////////////////////////////////////////////// + set get_pressure_level_strings( map > variable_levels) { @@ -201,7 +271,7 @@ void def_tc_pressure(NcFile* nc_out, // Define variable pressure_var = nc_out->addVar("pressure", ncDouble, pressure_dim); - // Set attributes + // Add attributes add_att(&pressure_var, "long_name", "pressure"); add_att(&pressure_var, "units", "millibars"); add_att(&pressure_var, "standard_name", "pressure"); @@ -239,9 +309,16 @@ void def_tc_range_azimuth(NcFile* nc_out, range_var = nc_out->addVar("range", ncDouble, range_dim); azimuth_var = nc_out->addVar("azimuth", ncDouble, azimuth_dim); - // Set attributes + // Add attributes add_att(&range_var, "long_name", "range"); - add_att(&range_var, "units", "fraction of RMW"); + + // Range is defined as a fraction of RMW or in kilometers + if(is_bad_data(rmw_scale)) { + add_att(&range_var, "units", "km"); + } + else { + add_att(&range_var, "units", "fraction of RMW"); + } add_att(&range_var, "standard_name", "range"); add_att(&range_var, "_FillValue", bad_data_double); @@ -252,7 +329,8 @@ void def_tc_range_azimuth(NcFile* nc_out, // Compute grid coordinates for (int i = 0; i < grid.range_n(); i++) { - range_data[i] = i * rmw_scale; + if(is_bad_data(rmw_scale)) range_data[i] = i * grid.range_delta_km(); + else range_data[i] = i * rmw_scale; } for (int j = 0; j < grid.azimuth_n(); j++) { azimuth_data[j] = j * grid.azimuth_delta_deg(); @@ -271,39 +349,146 @@ void def_tc_range_azimuth(NcFile* nc_out, //////////////////////////////////////////////////////////////////////// -void def_tc_time_lat_lon(NcFile* nc_out, +void def_tc_init_time(NcFile* nc_out, + NcVar& var_str, NcVar& var_ut) { + + // Initialization time, as a formatted string + var_str = nc_out->addVar("init_time", ncString); + add_att(&var_str, "long_name", "Initialization Time"); + add_att(&var_str, "units", "YYYYMMDD_HHMMSS"); + add_att(&var_str, "standard_name", "init_time"); + + // Initialization time, as a unixtime string + var_ut = nc_out->addVar("init_time_ut", ncString); + add_att(&var_ut, "long_name", "Init Time"); + add_att(&var_ut, "units", "unixtime"); + add_att(&var_ut, "standard_name", "init_time"); +} + +//////////////////////////////////////////////////////////////////////// + +void def_tc_valid_time(NcFile* nc_out, + const NcDim& track_point_dim, + NcVar& var_str, NcVar& var_ut) { + + // Valid time, as a formatted string + var_str = nc_out->addVar("valid_time", ncString, + track_point_dim); + add_att(&var_str, "long_name", "Valid Time"); + add_att(&var_str, "units", "YYYYMMDD_HHMMSS"); + add_att(&var_str, "standard_name", "valid_time"); + + // Valid time, as a unixtime string + var_ut = nc_out->addVar("valid_time_ut", ncString, + track_point_dim); + add_att(&var_ut, "long_name", "Valid Time"); + add_att(&var_ut, "units", "unixtime"); + add_att(&var_ut, "standard_name", "valid_time"); +} + +//////////////////////////////////////////////////////////////////////// + +void def_tc_lead_time(NcFile* nc_out, + const NcDim& track_point_dim, + NcVar& var_str, NcVar& var_sec) { + + // Lead time, as a formatted string + var_str = nc_out->addVar("lead_time", ncString, + track_point_dim); + add_att(&var_str, "long_name", "Lead Time"); + add_att(&var_str, "units", "HHMMSS"); + add_att(&var_str, "standard_name", "lead_time"); + + // Lead time, as an integer number of seconds + var_sec = nc_out->addVar("lead_time_sec", ncInt, + track_point_dim); + add_att(&var_sec, "long_name", "Lead Time"); + add_att(&var_sec, "units", "seconds"); + add_att(&var_sec, "standard_name", "lead_time"); +} + +//////////////////////////////////////////////////////////////////////// + +void def_tc_lat_lon(NcFile* nc_out, const NcDim& track_point_dim, const NcDim& range_dim, const NcDim& azimuth_dim, - NcVar& valid_time_var, NcVar& lat_var, NcVar& lon_var) { + NcVar& lat_var, NcVar& lon_var) { vector dims; dims.push_back(track_point_dim); dims.push_back(range_dim); dims.push_back(azimuth_dim); - valid_time_var = nc_out->addVar("valid_time", ncUint64, - track_point_dim); lat_var = nc_out->addVar("lat", ncDouble, dims); lon_var = nc_out->addVar("lon", ncDouble, dims); - // Set attributes - add_att(&valid_time_var, "long_name", "valid_time"); - add_att(&valid_time_var, "units", "yyyymmddhh"); - add_att(&valid_time_var, "standard_name", "valid_time"); - - add_att(&lat_var, "long_name", "latitude"); + // Add attributes + add_att(&lat_var, "long_name", "Latitude"); add_att(&lat_var, "units", "degrees_north"); add_att(&lat_var, "standard_name", "latitude"); - add_att(&lon_var, "long_name", "longitude"); + add_att(&lon_var, "long_name", "Longitude"); add_att(&lon_var, "units", "degrees_east"); add_att(&lon_var, "standard_name", "longitude"); } //////////////////////////////////////////////////////////////////////// -void write_tc_valid_time(NcFile* nc_out, - const int& i_point, const NcVar& var, - const long& valid_time) { +void write_tc_init_time(NcFile* nc_out, + const NcVar& var_str, const NcVar& var_ut, + const unixtime& ut) { + + ConcatString cs; + const char* str; + + // Initialization time, as a formatted string + unix_to_yyyymmdd_hhmmss(ut, cs); + str = cs.c_str(); + var_str.putVar(&str); + + // Initialization time, as a unixtime string + cs << cs_erase << ut; + str = cs.c_str(); + var_ut.putVar(&str); +} + +//////////////////////////////////////////////////////////////////////// + +void write_tc_valid_time(NcFile* nc_out, const int& i_point, + const NcVar& var_str, const NcVar& var_ut, + const unixtime& ut) { + + ConcatString cs; + const char* str; + + vector offsets; + vector counts; + + offsets.clear(); + offsets.push_back(i_point); + + counts.clear(); + counts.push_back(1); + + // Valid time, as a formatted string + unix_to_yyyymmdd_hhmmss(ut, cs); + str = cs.c_str(); + var_str.putVar(offsets, counts, &str); + + + // Valid time, as a unixtime string + cs << cs_erase << ut; + str = cs.c_str(); + var_ut.putVar(offsets, counts, &str); +} + +//////////////////////////////////////////////////////////////////////// + +void write_tc_lead_time(NcFile* nc_out, const int& i_point, + const NcVar& var_str, const NcVar& var_sec, + const int& sec) { + + ConcatString cs; + const char* str; vector offsets; vector counts; @@ -314,7 +499,13 @@ void write_tc_valid_time(NcFile* nc_out, counts.clear(); counts.push_back(1); - var.putVar(offsets, counts, &valid_time); + // Lead time, as a formatted string + sec_to_hhmmss(sec, cs); + str = cs.c_str(); + var_str.putVar(offsets, counts, &str); + + // Lead time, as an integer number of seconds + var_sec.putVar(offsets, counts, &sec); } //////////////////////////////////////////////////////////////////////// @@ -383,7 +574,7 @@ void def_tc_data(NcFile* nc_out, data_var = nc_out->addVar( var_name, ncDouble, dims); - // Set attributes + // Add attributes add_att(&data_var, "long_name", data_info->long_name_attr()); add_att(&data_var, "units", data_info->units_attr()); add_att(&data_var, "_FillValue", bad_data_double); @@ -405,7 +596,7 @@ void def_tc_data_3d(NcFile* nc_out, data_var = nc_out->addVar( data_info->name_attr(), ncDouble, dims); - // Set attributes + // Add attributes add_att(&data_var, "long_name", data_info->long_name_attr()); add_att(&data_var, "units", data_info->units_attr()); add_att(&data_var, "_FillValue", bad_data_double); @@ -429,7 +620,7 @@ void def_tc_azi_mean_data(NcFile* nc_out, data_var = nc_out->addVar(var_name, ncDouble, dims); - // Set attributes + // Add attributes add_att(&data_var, "long_name", data_info->long_name_attr()); add_att(&data_var, "units", data_info->units_attr()); add_att(&data_var, "_FillValue", bad_data_double); @@ -542,32 +733,28 @@ extern void write_tc_pressure_level_data( map pressure_level_indices, const string& level_str, const int& i_point, const NcVar& var, const double* data) { - vector offsets; - vector counts; + write_tc_pressure_level_data(nc_out, grid, i_point, + pressure_level_indices[level_str], var, data); +} + +//////////////////////////////////////////////////////////////////////// + +extern void write_tc_pressure_level_data( + NcFile* nc_out, const TcrmwGrid& grid, + const int& i_point, const int& i_level, + const NcVar& var, const double* data) { vector offsets_3d; vector counts_3d; double* data_rev; - int i_level = pressure_level_indices[level_str]; - - offsets.clear(); - offsets.push_back(i_point); - offsets.push_back(0); - offsets.push_back(0); - offsets_3d.clear(); offsets_3d.push_back(i_point); offsets_3d.push_back(i_level); offsets_3d.push_back(0); offsets_3d.push_back(0); - counts.clear(); - counts.push_back(1); - counts.push_back(grid.range_n()); - counts.push_back(grid.azimuth_n()); - counts_3d.clear(); counts_3d.push_back(1); counts_3d.push_back(1); diff --git a/src/libcode/vx_tc_util/vx_tc_nc_util.h b/src/libcode/vx_tc_util/vx_tc_nc_util.h index d909cffe83..981d55e08e 100644 --- a/src/libcode/vx_tc_util/vx_tc_nc_util.h +++ b/src/libcode/vx_tc_util/vx_tc_nc_util.h @@ -23,8 +23,17 @@ //////////////////////////////////////////////////////////////////////// -extern void write_tc_tracks(netCDF::NcFile*, - const netCDF::NcDim&, const TrackInfoArray&); +extern void write_tc_track_lines(netCDF::NcFile*, + const TrackInfo&); + +extern void write_tc_track_lat_lon(netCDF::NcFile*, + const netCDF::NcDim&, const TrackInfo&); + +extern void write_tc_track_point(netCDF::NcFile*, + const netCDF::NcDim&, const TrackPoint&); + +extern void write_tc_rmw(netCDF::NcFile*, + const netCDF::NcDim&, const TrackInfo&); extern std::set get_pressure_level_strings( std::map >); @@ -47,9 +56,18 @@ extern void def_tc_pressure(netCDF::NcFile*, extern void def_tc_range_azimuth(netCDF::NcFile*, const netCDF::NcDim&, const netCDF::NcDim&, const TcrmwGrid&, double); -extern void def_tc_time_lat_lon(netCDF::NcFile*, +extern void def_tc_init_time(netCDF::NcFile*, + netCDF::NcVar&, netCDF::NcVar&); + +extern void def_tc_valid_time(netCDF::NcFile*, + const netCDF::NcDim&, netCDF::NcVar&, netCDF::NcVar&); + +extern void def_tc_lead_time(netCDF::NcFile*, + const netCDF::NcDim&, netCDF::NcVar&, netCDF::NcVar&); + +extern void def_tc_lat_lon(netCDF::NcFile*, const netCDF::NcDim&, const netCDF::NcDim&, const netCDF::NcDim&, - netCDF::NcVar&, netCDF::NcVar&, netCDF::NcVar&); + netCDF::NcVar&, netCDF::NcVar&); extern void def_tc_variables(netCDF::NcFile*, std::map >, @@ -69,8 +87,19 @@ extern void def_tc_azi_mean_data(netCDF::NcFile*, const netCDF::NcDim&, const netCDF::NcDim&, netCDF::NcVar&, VarInfo*); +extern void write_tc_init_time(netCDF::NcFile*, + const netCDF::NcVar&, const netCDF::NcVar&, + const unixtime&); + extern void write_tc_valid_time(netCDF::NcFile*, - const int&, const netCDF::NcVar&, const long&); + const int&, + const netCDF::NcVar&, const netCDF::NcVar&, + const unixtime&); + +extern void write_tc_lead_time(netCDF::NcFile*, + const int&, + const netCDF::NcVar&, const netCDF::NcVar&, + const int&); extern void write_tc_data(netCDF::NcFile*, const TcrmwGrid&, const int&, const netCDF::NcVar&, const double*); @@ -85,6 +114,9 @@ extern void write_tc_pressure_level_data(netCDF::NcFile*, const TcrmwGrid&, std::map, const std::string&, const int&, const netCDF::NcVar&, const double*); +extern void write_tc_pressure_level_data(netCDF::NcFile*, const TcrmwGrid&, + const int&, const int&, const netCDF::NcVar&, const double*); + //////////////////////////////////////////////////////////////////////// #endif // __VX_TC_NC_UTIL_H__ diff --git a/src/tools/tc_utils/Makefile.am b/src/tools/tc_utils/Makefile.am index 31ba943541..bbb74c967a 100644 --- a/src/tools/tc_utils/Makefile.am +++ b/src/tools/tc_utils/Makefile.am @@ -24,4 +24,8 @@ SUBDIRS = tc_dland \ rmw_analysis \ tc_stat +if ENABLE_PYTHON + SUBDIRS += tc_diag +endif + MAINTAINERCLEANFILES = Makefile.in diff --git a/src/tools/tc_utils/Makefile.in b/src/tools/tc_utils/Makefile.in index 97a28db892..63280f6d5e 100644 --- a/src/tools/tc_utils/Makefile.in +++ b/src/tools/tc_utils/Makefile.in @@ -87,6 +87,7 @@ PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ +@ENABLE_PYTHON_TRUE@am__append_1 = tc_diag subdir = src/tools/tc_utils ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/configure.ac @@ -151,7 +152,8 @@ am__define_uniq_tagged_files = \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags -DIST_SUBDIRS = $(SUBDIRS) +DIST_SUBDIRS = tc_dland tc_pairs tc_stat tc_gen tc_rmw rmw_analysis \ + tc_diag am__DIST_COMMON = $(srcdir)/Makefile.in DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ @@ -324,14 +326,8 @@ target_alias = @target_alias@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ -SUBDIRS = tc_dland \ - tc_pairs \ - tc_stat \ - tc_gen \ - tc_rmw \ - rmw_analysis \ - tc_stat - +SUBDIRS = tc_dland tc_pairs tc_stat tc_gen tc_rmw rmw_analysis tc_stat \ + $(am__append_1) MAINTAINERCLEANFILES = Makefile.in all: all-recursive diff --git a/src/tools/tc_utils/tc_diag/.gitignore b/src/tools/tc_utils/tc_diag/.gitignore new file mode 100644 index 0000000000..7087a92ed6 --- /dev/null +++ b/src/tools/tc_utils/tc_diag/.gitignore @@ -0,0 +1,6 @@ +tc_diag +*.o +*.a +.deps +Makefile +*.dSYM diff --git a/src/tools/tc_utils/tc_diag/Makefile.am b/src/tools/tc_utils/tc_diag/Makefile.am new file mode 100644 index 0000000000..c10b2a9752 --- /dev/null +++ b/src/tools/tc_utils/tc_diag/Makefile.am @@ -0,0 +1,49 @@ +## @start 1 +## Makefile.am -- Process this file with automake to produce Makefile.in +## @end 1 + +MAINTAINERCLEANFILES = Makefile.in + +# Include the project definitions + +include ${top_srcdir}/Make-include + +# The program + +bin_PROGRAMS = tc_diag +tc_diag_SOURCES = tc_diag.cc \ + python_tc_diag.cc \ + tc_diag_conf_info.cc +tc_diag_CPPFLAGS = ${MET_CPPFLAGS} +tc_diag_LDFLAGS = ${MET_LDFLAGS} +tc_diag_LDADD = -lvx_stat_out \ + -lvx_statistics \ + -lvx_analysis_util \ + -lvx_series_data \ + -lvx_tc_util \ + -lvx_data2d_factory \ + -lvx_data2d_nc_met \ + -lvx_data2d_grib $(GRIB2_LIBS) \ + -lvx_data2d_nc_pinterp \ + $(PYTHON_LIBS) \ + -lvx_data2d_nccf \ + -lvx_statistics \ + -lvx_data2d \ + -lvx_nc_util \ + -lvx_regrid \ + -lvx_grid \ + -lvx_config \ + -lvx_gsl_prob \ + -lvx_cal \ + -lvx_nav \ + -lvx_util_math \ + -lvx_util \ + -lvx_math \ + -lvx_color \ + -lvx_log \ + -lm -lnetcdf_c++4 -lnetcdf -lgsl -lgslcblas \ + $(FLIBS) + +EXTRA_DIST = tc_diag_conf_info.h \ + python_tc_diag.h \ + tc_diag.h diff --git a/src/tools/tc_utils/tc_diag/Makefile.in b/src/tools/tc_utils/tc_diag/Makefile.in new file mode 100644 index 0000000000..7ab9727490 --- /dev/null +++ b/src/tools/tc_utils/tc_diag/Makefile.in @@ -0,0 +1,734 @@ +# Makefile.in generated by automake 1.16.1 from Makefile.am. +# @configure_input@ + +# Copyright (C) 1994-2018 Free Software Foundation, Inc. + +# This Makefile.in is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY, to the extent permitted by law; without +# even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. + +@SET_MAKE@ + +VPATH = @srcdir@ +am__is_gnu_make = { \ + if test -z '$(MAKELEVEL)'; then \ + false; \ + elif test -n '$(MAKE_HOST)'; then \ + true; \ + elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ + true; \ + else \ + false; \ + fi; \ +} +am__make_running_with_option = \ + case $${target_option-} in \ + ?) ;; \ + *) echo "am__make_running_with_option: internal error: invalid" \ + "target option '$${target_option-}' specified" >&2; \ + exit 1;; \ + esac; \ + has_opt=no; \ + sane_makeflags=$$MAKEFLAGS; \ + if $(am__is_gnu_make); then \ + sane_makeflags=$$MFLAGS; \ + else \ + case $$MAKEFLAGS in \ + *\\[\ \ ]*) \ + bs=\\; \ + sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ + | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ + esac; \ + fi; \ + skip_next=no; \ + strip_trailopt () \ + { \ + flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ + }; \ + for flg in $$sane_makeflags; do \ + test $$skip_next = yes && { skip_next=no; continue; }; \ + case $$flg in \ + *=*|--*) continue;; \ + -*I) strip_trailopt 'I'; skip_next=yes;; \ + -*I?*) strip_trailopt 'I';; \ + -*O) strip_trailopt 'O'; skip_next=yes;; \ + -*O?*) strip_trailopt 'O';; \ + -*l) strip_trailopt 'l'; skip_next=yes;; \ + -*l?*) strip_trailopt 'l';; \ + -[dEDm]) skip_next=yes;; \ + -[JT]) skip_next=yes;; \ + esac; \ + case $$flg in \ + *$$target_option*) has_opt=yes; break;; \ + esac; \ + done; \ + test $$has_opt = yes +am__make_dryrun = (target_option=n; $(am__make_running_with_option)) +am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) +pkgdatadir = $(datadir)/@PACKAGE@ +pkgincludedir = $(includedir)/@PACKAGE@ +pkglibdir = $(libdir)/@PACKAGE@ +pkglibexecdir = $(libexecdir)/@PACKAGE@ +am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd +install_sh_DATA = $(install_sh) -c -m 644 +install_sh_PROGRAM = $(install_sh) -c +install_sh_SCRIPT = $(install_sh) -c +INSTALL_HEADER = $(INSTALL_DATA) +transform = $(program_transform_name) +NORMAL_INSTALL = : +PRE_INSTALL = : +POST_INSTALL = : +NORMAL_UNINSTALL = : +PRE_UNINSTALL = : +POST_UNINSTALL = : +build_triplet = @build@ +host_triplet = @host@ +bin_PROGRAMS = tc_diag$(EXEEXT) +subdir = src/tools/tc_utils/tc_diag +ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 +am__aclocal_m4_deps = $(top_srcdir)/configure.ac +am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ + $(ACLOCAL_M4) +DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON) +mkinstalldirs = $(install_sh) -d +CONFIG_HEADER = $(top_builddir)/config.h +CONFIG_CLEAN_FILES = +CONFIG_CLEAN_VPATH_FILES = +am__installdirs = "$(DESTDIR)$(bindir)" +PROGRAMS = $(bin_PROGRAMS) +am_tc_diag_OBJECTS = tc_diag-tc_diag.$(OBJEXT) \ + tc_diag-python_tc_diag.$(OBJEXT) \ + tc_diag-tc_diag_conf_info.$(OBJEXT) +tc_diag_OBJECTS = $(am_tc_diag_OBJECTS) +am__DEPENDENCIES_1 = +tc_diag_DEPENDENCIES = $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \ + $(am__DEPENDENCIES_1) +tc_diag_LINK = $(CXXLD) $(AM_CXXFLAGS) $(CXXFLAGS) $(tc_diag_LDFLAGS) \ + $(LDFLAGS) -o $@ +AM_V_P = $(am__v_P_@AM_V@) +am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) +am__v_P_0 = false +am__v_P_1 = : +AM_V_GEN = $(am__v_GEN_@AM_V@) +am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) +am__v_GEN_0 = @echo " GEN " $@; +am__v_GEN_1 = +AM_V_at = $(am__v_at_@AM_V@) +am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) +am__v_at_0 = @ +am__v_at_1 = +DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) +depcomp = $(SHELL) $(top_srcdir)/depcomp +am__maybe_remake_depfiles = depfiles +am__depfiles_remade = ./$(DEPDIR)/tc_diag-python_tc_diag.Po \ + ./$(DEPDIR)/tc_diag-tc_diag.Po \ + ./$(DEPDIR)/tc_diag-tc_diag_conf_info.Po +am__mv = mv -f +AM_V_lt = $(am__v_lt_@AM_V@) +am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) +am__v_lt_0 = --silent +am__v_lt_1 = +CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ + $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) +AM_V_CXX = $(am__v_CXX_@AM_V@) +am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@) +am__v_CXX_0 = @echo " CXX " $@; +am__v_CXX_1 = +CXXLD = $(CXX) +CXXLINK = $(CXXLD) $(AM_CXXFLAGS) $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) \ + -o $@ +AM_V_CXXLD = $(am__v_CXXLD_@AM_V@) +am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@) +am__v_CXXLD_0 = @echo " CXXLD " $@; +am__v_CXXLD_1 = +SOURCES = $(tc_diag_SOURCES) +DIST_SOURCES = $(tc_diag_SOURCES) +am__can_run_installinfo = \ + case $$AM_UPDATE_INFO_DIR in \ + n|no|NO) false;; \ + *) (install-info --version) >/dev/null 2>&1;; \ + esac +am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) +# Read a list of newline-separated strings from the standard input, +# and print each of them once, without duplicates. Input order is +# *not* preserved. +am__uniquify_input = $(AWK) '\ + BEGIN { nonempty = 0; } \ + { items[$$0] = 1; nonempty = 1; } \ + END { if (nonempty) { for (i in items) print i; }; } \ +' +# Make sure the list of sources is unique. This is necessary because, +# e.g., the same source file might be shared among _SOURCES variables +# for different programs/libraries. +am__define_uniq_tagged_files = \ + list='$(am__tagged_files)'; \ + unique=`for i in $$list; do \ + if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ + done | $(am__uniquify_input)` +ETAGS = etags +CTAGS = ctags +am__DIST_COMMON = $(srcdir)/Makefile.in $(top_srcdir)/depcomp +DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) +ACLOCAL = @ACLOCAL@ +AMTAR = @AMTAR@ +AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ +AUTOCONF = @AUTOCONF@ +AUTOHEADER = @AUTOHEADER@ +AUTOMAKE = @AUTOMAKE@ +AWK = @AWK@ +BUFRLIB_NAME = @BUFRLIB_NAME@ +CC = @CC@ +CCDEPMODE = @CCDEPMODE@ +CFLAGS = @CFLAGS@ +CPP = @CPP@ +CPPFLAGS = @CPPFLAGS@ +CXX = @CXX@ +CXXDEPMODE = @CXXDEPMODE@ +CXXFLAGS = @CXXFLAGS@ +CYGPATH_W = @CYGPATH_W@ +DEFS = @DEFS@ +DEPDIR = @DEPDIR@ +ECHO_C = @ECHO_C@ +ECHO_N = @ECHO_N@ +ECHO_T = @ECHO_T@ +EGREP = @EGREP@ +EXEEXT = @EXEEXT@ +F77 = @F77@ +FC_LIBS = @FC_LIBS@ +FFLAGS = @FFLAGS@ +FLIBS = @FLIBS@ +GREP = @GREP@ +GRIB2CLIB_NAME = @GRIB2CLIB_NAME@ +GRIB2_LIBS = @GRIB2_LIBS@ +INSTALL = @INSTALL@ +INSTALL_DATA = @INSTALL_DATA@ +INSTALL_PROGRAM = @INSTALL_PROGRAM@ +INSTALL_SCRIPT = @INSTALL_SCRIPT@ +INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ +LDFLAGS = @LDFLAGS@ +LEX = @LEX@ +LEXLIB = @LEXLIB@ +LEX_OUTPUT_ROOT = @LEX_OUTPUT_ROOT@ +LIBOBJS = @LIBOBJS@ +LIBS = @LIBS@ +LTLIBOBJS = @LTLIBOBJS@ +MAKEINFO = @MAKEINFO@ +MET_BUFR = @MET_BUFR@ +MET_BUFRLIB = @MET_BUFRLIB@ +MET_CAIRO = @MET_CAIRO@ +MET_CAIROINC = @MET_CAIROINC@ +MET_CAIROLIB = @MET_CAIROLIB@ +MET_FREETYPE = @MET_FREETYPE@ +MET_FREETYPEINC = @MET_FREETYPEINC@ +MET_FREETYPELIB = @MET_FREETYPELIB@ +MET_GRIB2C = @MET_GRIB2C@ +MET_GRIB2CINC = @MET_GRIB2CINC@ +MET_GRIB2CLIB = @MET_GRIB2CLIB@ +MET_GSL = @MET_GSL@ +MET_GSLINC = @MET_GSLINC@ +MET_GSLLIB = @MET_GSLLIB@ +MET_HDF = @MET_HDF@ +MET_HDF5 = @MET_HDF5@ +MET_HDF5INC = @MET_HDF5INC@ +MET_HDF5LIB = @MET_HDF5LIB@ +MET_HDFEOS = @MET_HDFEOS@ +MET_HDFEOSINC = @MET_HDFEOSINC@ +MET_HDFEOSLIB = @MET_HDFEOSLIB@ +MET_HDFINC = @MET_HDFINC@ +MET_HDFLIB = @MET_HDFLIB@ +MET_NETCDF = @MET_NETCDF@ +MET_NETCDFINC = @MET_NETCDFINC@ +MET_NETCDFLIB = @MET_NETCDFLIB@ +MET_PYTHON_BIN_EXE = @MET_PYTHON_BIN_EXE@ +MET_PYTHON_CC = @MET_PYTHON_CC@ +MET_PYTHON_LD = @MET_PYTHON_LD@ +MKDIR_P = @MKDIR_P@ +OBJEXT = @OBJEXT@ +OPENMP_CFLAGS = @OPENMP_CFLAGS@ +PACKAGE = @PACKAGE@ +PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ +PACKAGE_NAME = @PACKAGE_NAME@ +PACKAGE_STRING = @PACKAGE_STRING@ +PACKAGE_TARNAME = @PACKAGE_TARNAME@ +PACKAGE_URL = @PACKAGE_URL@ +PACKAGE_VERSION = @PACKAGE_VERSION@ +PATH_SEPARATOR = @PATH_SEPARATOR@ +PYTHON_LIBS = @PYTHON_LIBS@ +RANLIB = @RANLIB@ +SET_MAKE = @SET_MAKE@ +SHELL = @SHELL@ +STRIP = @STRIP@ +VERSION = @VERSION@ +YACC = @YACC@ +YFLAGS = @YFLAGS@ +abs_builddir = @abs_builddir@ +abs_srcdir = @abs_srcdir@ +abs_top_builddir = @abs_top_builddir@ +abs_top_srcdir = @abs_top_srcdir@ +ac_ct_CC = @ac_ct_CC@ +ac_ct_CXX = @ac_ct_CXX@ +ac_ct_F77 = @ac_ct_F77@ +am__include = @am__include@ +am__leading_dot = @am__leading_dot@ +am__quote = @am__quote@ +am__tar = @am__tar@ +am__untar = @am__untar@ +bindir = @bindir@ +build = @build@ +build_alias = @build_alias@ +build_cpu = @build_cpu@ +build_os = @build_os@ +build_vendor = @build_vendor@ +builddir = @builddir@ +datadir = @datadir@ +datarootdir = @datarootdir@ +docdir = @docdir@ +dvidir = @dvidir@ +exec_prefix = @exec_prefix@ +host = @host@ +host_alias = @host_alias@ +host_cpu = @host_cpu@ +host_os = @host_os@ +host_vendor = @host_vendor@ +htmldir = @htmldir@ +includedir = @includedir@ +infodir = @infodir@ +install_sh = @install_sh@ +libdir = @libdir@ +libexecdir = @libexecdir@ +localedir = @localedir@ +localstatedir = @localstatedir@ +mandir = @mandir@ +mkdir_p = @mkdir_p@ +oldincludedir = @oldincludedir@ +pdfdir = @pdfdir@ +prefix = @prefix@ +program_transform_name = @program_transform_name@ +psdir = @psdir@ +runstatedir = @runstatedir@ +sbindir = @sbindir@ +sharedstatedir = @sharedstatedir@ +srcdir = @srcdir@ +sysconfdir = @sysconfdir@ +target_alias = @target_alias@ +top_build_prefix = @top_build_prefix@ +top_builddir = @top_builddir@ +top_srcdir = @top_srcdir@ +MAINTAINERCLEANFILES = Makefile.in +tc_diag_SOURCES = tc_diag.cc \ + python_tc_diag.cc \ + tc_diag_conf_info.cc + +tc_diag_CPPFLAGS = ${MET_CPPFLAGS} +tc_diag_LDFLAGS = ${MET_LDFLAGS} +tc_diag_LDADD = -lvx_stat_out \ + -lvx_statistics \ + -lvx_analysis_util \ + -lvx_series_data \ + -lvx_tc_util \ + -lvx_data2d_factory \ + -lvx_data2d_nc_met \ + -lvx_data2d_grib $(GRIB2_LIBS) \ + -lvx_data2d_nc_pinterp \ + $(PYTHON_LIBS) \ + -lvx_data2d_nccf \ + -lvx_statistics \ + -lvx_data2d \ + -lvx_nc_util \ + -lvx_regrid \ + -lvx_grid \ + -lvx_config \ + -lvx_gsl_prob \ + -lvx_cal \ + -lvx_nav \ + -lvx_util_math \ + -lvx_util \ + -lvx_math \ + -lvx_color \ + -lvx_log \ + -lm -lnetcdf_c++4 -lnetcdf -lgsl -lgslcblas \ + $(FLIBS) + +EXTRA_DIST = tc_diag_conf_info.h \ + python_tc_diag.h \ + tc_diag.h + +all: all-am + +.SUFFIXES: +.SUFFIXES: .cc .o .obj +$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) + @for dep in $?; do \ + case '$(am__configure_deps)' in \ + *$$dep*) \ + ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ + && { if test -f $@; then exit 0; else break; fi; }; \ + exit 1;; \ + esac; \ + done; \ + echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/tools/tc_utils/tc_diag/Makefile'; \ + $(am__cd) $(top_srcdir) && \ + $(AUTOMAKE) --foreign src/tools/tc_utils/tc_diag/Makefile +Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status + @case '$?' in \ + *config.status*) \ + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ + *) \ + echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles)'; \ + cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles);; \ + esac; + +$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh + +$(top_srcdir)/configure: $(am__configure_deps) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(ACLOCAL_M4): $(am__aclocal_m4_deps) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(am__aclocal_m4_deps): +install-binPROGRAMS: $(bin_PROGRAMS) + @$(NORMAL_INSTALL) + @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ + if test -n "$$list"; then \ + echo " $(MKDIR_P) '$(DESTDIR)$(bindir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(bindir)" || exit 1; \ + fi; \ + for p in $$list; do echo "$$p $$p"; done | \ + sed 's/$(EXEEXT)$$//' | \ + while read p p1; do if test -f $$p \ + ; then echo "$$p"; echo "$$p"; else :; fi; \ + done | \ + sed -e 'p;s,.*/,,;n;h' \ + -e 's|.*|.|' \ + -e 'p;x;s,.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/' | \ + sed 'N;N;N;s,\n, ,g' | \ + $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1 } \ + { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ + if ($$2 == $$4) files[d] = files[d] " " $$1; \ + else { print "f", $$3 "/" $$4, $$1; } } \ + END { for (d in files) print "f", d, files[d] }' | \ + while read type dir files; do \ + if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ + test -z "$$files" || { \ + echo " $(INSTALL_PROGRAM_ENV) $(INSTALL_PROGRAM) $$files '$(DESTDIR)$(bindir)$$dir'"; \ + $(INSTALL_PROGRAM_ENV) $(INSTALL_PROGRAM) $$files "$(DESTDIR)$(bindir)$$dir" || exit $$?; \ + } \ + ; done + +uninstall-binPROGRAMS: + @$(NORMAL_UNINSTALL) + @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ + files=`for p in $$list; do echo "$$p"; done | \ + sed -e 'h;s,^.*/,,;s/$(EXEEXT)$$//;$(transform)' \ + -e 's/$$/$(EXEEXT)/' \ + `; \ + test -n "$$list" || exit 0; \ + echo " ( cd '$(DESTDIR)$(bindir)' && rm -f" $$files ")"; \ + cd "$(DESTDIR)$(bindir)" && rm -f $$files + +clean-binPROGRAMS: + -test -z "$(bin_PROGRAMS)" || rm -f $(bin_PROGRAMS) + +tc_diag$(EXEEXT): $(tc_diag_OBJECTS) $(tc_diag_DEPENDENCIES) $(EXTRA_tc_diag_DEPENDENCIES) + @rm -f tc_diag$(EXEEXT) + $(AM_V_CXXLD)$(tc_diag_LINK) $(tc_diag_OBJECTS) $(tc_diag_LDADD) $(LIBS) + +mostlyclean-compile: + -rm -f *.$(OBJEXT) + +distclean-compile: + -rm -f *.tab.c + +@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/tc_diag-python_tc_diag.Po@am__quote@ # am--include-marker +@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/tc_diag-tc_diag.Po@am__quote@ # am--include-marker +@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/tc_diag-tc_diag_conf_info.Po@am__quote@ # am--include-marker + +$(am__depfiles_remade): + @$(MKDIR_P) $(@D) + @echo '# dummy' >$@-t && $(am__mv) $@-t $@ + +am--depfiles: $(am__depfiles_remade) + +.cc.o: +@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< +@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po +@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ +@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $< + +.cc.obj: +@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` +@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po +@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ +@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` + +tc_diag-tc_diag.o: tc_diag.cc +@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(tc_diag_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT tc_diag-tc_diag.o -MD -MP -MF $(DEPDIR)/tc_diag-tc_diag.Tpo -c -o tc_diag-tc_diag.o `test -f 'tc_diag.cc' || echo '$(srcdir)/'`tc_diag.cc +@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/tc_diag-tc_diag.Tpo $(DEPDIR)/tc_diag-tc_diag.Po +@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='tc_diag.cc' object='tc_diag-tc_diag.o' libtool=no @AMDEPBACKSLASH@ +@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(tc_diag_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o tc_diag-tc_diag.o `test -f 'tc_diag.cc' || echo '$(srcdir)/'`tc_diag.cc + +tc_diag-tc_diag.obj: tc_diag.cc +@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(tc_diag_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT tc_diag-tc_diag.obj -MD -MP -MF $(DEPDIR)/tc_diag-tc_diag.Tpo -c -o tc_diag-tc_diag.obj `if test -f 'tc_diag.cc'; then $(CYGPATH_W) 'tc_diag.cc'; else $(CYGPATH_W) '$(srcdir)/tc_diag.cc'; fi` +@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/tc_diag-tc_diag.Tpo $(DEPDIR)/tc_diag-tc_diag.Po +@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='tc_diag.cc' object='tc_diag-tc_diag.obj' libtool=no @AMDEPBACKSLASH@ +@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(tc_diag_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o tc_diag-tc_diag.obj `if test -f 'tc_diag.cc'; then $(CYGPATH_W) 'tc_diag.cc'; else $(CYGPATH_W) '$(srcdir)/tc_diag.cc'; fi` + +tc_diag-python_tc_diag.o: python_tc_diag.cc +@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(tc_diag_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT tc_diag-python_tc_diag.o -MD -MP -MF $(DEPDIR)/tc_diag-python_tc_diag.Tpo -c -o tc_diag-python_tc_diag.o `test -f 'python_tc_diag.cc' || echo '$(srcdir)/'`python_tc_diag.cc +@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/tc_diag-python_tc_diag.Tpo $(DEPDIR)/tc_diag-python_tc_diag.Po +@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='python_tc_diag.cc' object='tc_diag-python_tc_diag.o' libtool=no @AMDEPBACKSLASH@ +@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(tc_diag_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o tc_diag-python_tc_diag.o `test -f 'python_tc_diag.cc' || echo '$(srcdir)/'`python_tc_diag.cc + +tc_diag-python_tc_diag.obj: python_tc_diag.cc +@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(tc_diag_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT tc_diag-python_tc_diag.obj -MD -MP -MF $(DEPDIR)/tc_diag-python_tc_diag.Tpo -c -o tc_diag-python_tc_diag.obj `if test -f 'python_tc_diag.cc'; then $(CYGPATH_W) 'python_tc_diag.cc'; else $(CYGPATH_W) '$(srcdir)/python_tc_diag.cc'; fi` +@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/tc_diag-python_tc_diag.Tpo $(DEPDIR)/tc_diag-python_tc_diag.Po +@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='python_tc_diag.cc' object='tc_diag-python_tc_diag.obj' libtool=no @AMDEPBACKSLASH@ +@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(tc_diag_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o tc_diag-python_tc_diag.obj `if test -f 'python_tc_diag.cc'; then $(CYGPATH_W) 'python_tc_diag.cc'; else $(CYGPATH_W) '$(srcdir)/python_tc_diag.cc'; fi` + +tc_diag-tc_diag_conf_info.o: tc_diag_conf_info.cc +@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(tc_diag_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT tc_diag-tc_diag_conf_info.o -MD -MP -MF $(DEPDIR)/tc_diag-tc_diag_conf_info.Tpo -c -o tc_diag-tc_diag_conf_info.o `test -f 'tc_diag_conf_info.cc' || echo '$(srcdir)/'`tc_diag_conf_info.cc +@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/tc_diag-tc_diag_conf_info.Tpo $(DEPDIR)/tc_diag-tc_diag_conf_info.Po +@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='tc_diag_conf_info.cc' object='tc_diag-tc_diag_conf_info.o' libtool=no @AMDEPBACKSLASH@ +@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(tc_diag_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o tc_diag-tc_diag_conf_info.o `test -f 'tc_diag_conf_info.cc' || echo '$(srcdir)/'`tc_diag_conf_info.cc + +tc_diag-tc_diag_conf_info.obj: tc_diag_conf_info.cc +@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(tc_diag_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT tc_diag-tc_diag_conf_info.obj -MD -MP -MF $(DEPDIR)/tc_diag-tc_diag_conf_info.Tpo -c -o tc_diag-tc_diag_conf_info.obj `if test -f 'tc_diag_conf_info.cc'; then $(CYGPATH_W) 'tc_diag_conf_info.cc'; else $(CYGPATH_W) '$(srcdir)/tc_diag_conf_info.cc'; fi` +@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/tc_diag-tc_diag_conf_info.Tpo $(DEPDIR)/tc_diag-tc_diag_conf_info.Po +@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='tc_diag_conf_info.cc' object='tc_diag-tc_diag_conf_info.obj' libtool=no @AMDEPBACKSLASH@ +@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(tc_diag_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o tc_diag-tc_diag_conf_info.obj `if test -f 'tc_diag_conf_info.cc'; then $(CYGPATH_W) 'tc_diag_conf_info.cc'; else $(CYGPATH_W) '$(srcdir)/tc_diag_conf_info.cc'; fi` + +ID: $(am__tagged_files) + $(am__define_uniq_tagged_files); mkid -fID $$unique +tags: tags-am +TAGS: tags + +tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) + set x; \ + here=`pwd`; \ + $(am__define_uniq_tagged_files); \ + shift; \ + if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ + test -n "$$unique" || unique=$$empty_fix; \ + if test $$# -gt 0; then \ + $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ + "$$@" $$unique; \ + else \ + $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ + $$unique; \ + fi; \ + fi +ctags: ctags-am + +CTAGS: ctags +ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) + $(am__define_uniq_tagged_files); \ + test -z "$(CTAGS_ARGS)$$unique" \ + || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ + $$unique + +GTAGS: + here=`$(am__cd) $(top_builddir) && pwd` \ + && $(am__cd) $(top_srcdir) \ + && gtags -i $(GTAGS_ARGS) "$$here" +cscopelist: cscopelist-am + +cscopelist-am: $(am__tagged_files) + list='$(am__tagged_files)'; \ + case "$(srcdir)" in \ + [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ + *) sdir=$(subdir)/$(srcdir) ;; \ + esac; \ + for i in $$list; do \ + if test -f "$$i"; then \ + echo "$(subdir)/$$i"; \ + else \ + echo "$$sdir/$$i"; \ + fi; \ + done >> $(top_builddir)/cscope.files + +distclean-tags: + -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags + +distdir: $(BUILT_SOURCES) + $(MAKE) $(AM_MAKEFLAGS) distdir-am + +distdir-am: $(DISTFILES) + @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ + topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ + list='$(DISTFILES)'; \ + dist_files=`for file in $$list; do echo $$file; done | \ + sed -e "s|^$$srcdirstrip/||;t" \ + -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ + case $$dist_files in \ + */*) $(MKDIR_P) `echo "$$dist_files" | \ + sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ + sort -u` ;; \ + esac; \ + for file in $$dist_files; do \ + if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ + if test -d $$d/$$file; then \ + dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ + if test -d "$(distdir)/$$file"; then \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ + if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ + cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ + cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ + else \ + test -f "$(distdir)/$$file" \ + || cp -p $$d/$$file "$(distdir)/$$file" \ + || exit 1; \ + fi; \ + done +check-am: all-am +check: check-am +all-am: Makefile $(PROGRAMS) +installdirs: + for dir in "$(DESTDIR)$(bindir)"; do \ + test -z "$$dir" || $(MKDIR_P) "$$dir"; \ + done +install: install-am +install-exec: install-exec-am +install-data: install-data-am +uninstall: uninstall-am + +install-am: all-am + @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am + +installcheck: installcheck-am +install-strip: + if test -z '$(STRIP)'; then \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + install; \ + else \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ + fi +mostlyclean-generic: + +clean-generic: + +distclean-generic: + -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) + -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) + +maintainer-clean-generic: + @echo "This command is intended for maintainers to use" + @echo "it deletes files that may require special tools to rebuild." + -test -z "$(MAINTAINERCLEANFILES)" || rm -f $(MAINTAINERCLEANFILES) +clean: clean-am + +clean-am: clean-binPROGRAMS clean-generic mostlyclean-am + +distclean: distclean-am + -rm -f ./$(DEPDIR)/tc_diag-python_tc_diag.Po + -rm -f ./$(DEPDIR)/tc_diag-tc_diag.Po + -rm -f ./$(DEPDIR)/tc_diag-tc_diag_conf_info.Po + -rm -f Makefile +distclean-am: clean-am distclean-compile distclean-generic \ + distclean-tags + +dvi: dvi-am + +dvi-am: + +html: html-am + +html-am: + +info: info-am + +info-am: + +install-data-am: + +install-dvi: install-dvi-am + +install-dvi-am: + +install-exec-am: install-binPROGRAMS + +install-html: install-html-am + +install-html-am: + +install-info: install-info-am + +install-info-am: + +install-man: + +install-pdf: install-pdf-am + +install-pdf-am: + +install-ps: install-ps-am + +install-ps-am: + +installcheck-am: + +maintainer-clean: maintainer-clean-am + -rm -f ./$(DEPDIR)/tc_diag-python_tc_diag.Po + -rm -f ./$(DEPDIR)/tc_diag-tc_diag.Po + -rm -f ./$(DEPDIR)/tc_diag-tc_diag_conf_info.Po + -rm -f Makefile +maintainer-clean-am: distclean-am maintainer-clean-generic + +mostlyclean: mostlyclean-am + +mostlyclean-am: mostlyclean-compile mostlyclean-generic + +pdf: pdf-am + +pdf-am: + +ps: ps-am + +ps-am: + +uninstall-am: uninstall-binPROGRAMS + +.MAKE: install-am install-strip + +.PHONY: CTAGS GTAGS TAGS all all-am am--depfiles check check-am clean \ + clean-binPROGRAMS clean-generic cscopelist-am ctags ctags-am \ + distclean distclean-compile distclean-generic distclean-tags \ + distdir dvi dvi-am html html-am info info-am install \ + install-am install-binPROGRAMS install-data install-data-am \ + install-dvi install-dvi-am install-exec install-exec-am \ + install-html install-html-am install-info install-info-am \ + install-man install-pdf install-pdf-am install-ps \ + install-ps-am install-strip installcheck installcheck-am \ + installdirs maintainer-clean maintainer-clean-generic \ + mostlyclean mostlyclean-compile mostlyclean-generic pdf pdf-am \ + ps ps-am tags tags-am uninstall uninstall-am \ + uninstall-binPROGRAMS + +.PRECIOUS: Makefile + + +# Include the project definitions + +include ${top_srcdir}/Make-include + +# Tell versions [3.59,3.63) of GNU make to not export all variables. +# Otherwise a system limit (for SysV at least) may be exceeded. +.NOEXPORT: diff --git a/src/tools/tc_utils/tc_diag/python_tc_diag.cc b/src/tools/tc_utils/tc_diag/python_tc_diag.cc new file mode 100644 index 0000000000..997f5c6de6 --- /dev/null +++ b/src/tools/tc_utils/tc_diag/python_tc_diag.cc @@ -0,0 +1,334 @@ +// *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* +// ** Copyright UCAR (c) 1992 - 2023 +// ** University Corporation for Atmospheric Research (UCAR) +// ** National Center for Atmospheric Research (NCAR) +// ** Research Applications Lab (RAL) +// ** P.O.Box 3000, Boulder, Colorado, 80307-3000, USA +// *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* + +//////////////////////////////////////////////////////////////////////// + +using namespace std; + +#include + +#include "vx_config.h" +#include "vx_python3_utils.h" +#include "vx_log.h" + +#include "global_python.h" +#include "wchar_argv.h" + +//////////////////////////////////////////////////////////////////////// + +extern GlobalPython GP; // this needs external linkage + +//////////////////////////////////////////////////////////////////////// + +static const char * user_ppath = 0; +static const char write_tmp_diag [] = "MET_BASE/python/pyembed/write_tmp_tc_diag.py"; +static const char read_tmp_diag [] = "pyembed.read_tmp_tc_diag"; // NO ".py" suffix +static const char tc_diag_dict_name [] = "tc_diag"; + +//////////////////////////////////////////////////////////////////////// + +static bool straight_python_tc_diag(const ConcatString &script_name, + const ConcatString &tmp_file_name, + map &diag_map); + +static bool tmp_nc_tc_diag(const ConcatString &script_name, + const ConcatString &tmp_file_name, + map &diag_map); + +static void diag_map_from_python_dict( + PyObject *diag_dict, + map &diag_map); + +//////////////////////////////////////////////////////////////////////// + +bool python_tc_diag(const ConcatString &script_name, + const ConcatString &tmp_file_name, + map &diag_map) { + bool status = false; + + // Check for MET_PYTHON_EXE + if ((user_ppath = getenv(user_python_path_env)) != 0 ) { + status = tmp_nc_tc_diag(script_name, + tmp_file_name, diag_map); + } + // Use compiled python instance + else { + status = straight_python_tc_diag(script_name, + tmp_file_name, diag_map); + } + + return(status); +} + +//////////////////////////////////////////////////////////////////////// + +bool straight_python_tc_diag(const ConcatString &script_name, + const ConcatString &tmp_file_name, + map &diag_map) { + const char *method_name = "straight_python_tc_diag()"; + + mlog << Debug(3) << "Running Python diagnostics script (" + << script_name << " " << tmp_file_name << ").\n"; + + // Prepare arguments + StringArray arg_sa = script_name.split(" "); + arg_sa.add(tmp_file_name); + Wchar_Argv wa; + wa.set(arg_sa); + + // Reload the module if GP has already been initialized + bool do_reload = GP.is_initialized; + + GP.initialize(); + + if(PyErr_Occurred()) { + PyErr_Print(); + mlog << Warning << "\n" << method_name << " -> " + << "an error occurred initializing python\n\n"; + return(false); + } + + // Set the arguments + run_python_string("import os"); + run_python_string("import sys"); + + ConcatString command; + command << cs_erase + << "sys.path.append(\"" + << script_name.dirname() + << "\")"; + + run_python_string(command.text()); + + if(arg_sa.n() > 0) { + PySys_SetArgv(wa.wargc(), wa.wargv()); + } + + // Import the python script as a module + ConcatString script_base = script_name.basename(); + script_base.chomp(".py"); + + PyObject *module_obj = PyImport_ImportModule(script_base.c_str()); + + // Reload the module, if needed + if(do_reload) { + module_obj = PyImport_ReloadModule(module_obj); + } + + if(PyErr_Occurred()) { + PyErr_Print(); + mlog << Warning << "\n" << method_name << " -> " + << "an error occurred importing module \"" + << script_name << "\"\n\n"; + return(false); + } + + if(!module_obj) { + mlog << Warning << "\n" << method_name << " -> " + << "error running Python script \"" + << script_name << "\"\n\n"; + return(false); + } + + // Get the namespace for the module (as a dictionary) + PyObject *module_dict_obj = PyModule_GetDict(module_obj); + PyObject *key_obj = PyUnicode_FromString(tc_diag_dict_name); + PyObject *data_obj = PyDict_GetItem (module_dict_obj, key_obj); + + if(!data_obj || !PyDict_Check(data_obj)) { + mlog << Warning << "\n" << method_name << " -> " + << "trouble reading data from \"" + << script_name << "\"\n\n"; + return(false); + } + + // Populate the diagnostics map + diag_map_from_python_dict(data_obj, diag_map); + + return(true); +} + +//////////////////////////////////////////////////////////////////////// + +bool tmp_nc_tc_diag(const ConcatString &script_name, + const ConcatString &tmp_file_name, + map &diag_map) { + const char *method_name = "tmp_nc_tc_diag()"; + int i, status; + ConcatString command; + ConcatString path; + ConcatString tmp_nc_path; + const char * tmp_dir = 0; + Wchar_Argv wa; + + // TODO: Implement read/write temp tc_diag python functionality + mlog << Error << "\n" << method_name << " -> " + << "not yet fully implemented ... exiting!\n\n"; + exit(1); + + mlog << Debug(3) << "Calling " << user_ppath + << " to run Python diagnostics script (" + << script_name << " " << tmp_file_name << ").\n"; + + // Create a temp file + tmp_dir = getenv ("MET_TMP_DIR"); + if(!tmp_dir) tmp_dir = default_tmp_dir; + + path << cs_erase + << tmp_dir << '/' + << tmp_nc_base_name; + + tmp_nc_path = make_temp_file_name(path.text(), 0); + + // Construct the system command + command << cs_erase + << user_ppath << ' ' // user's path to python + << replace_path(write_tmp_diag) << ' ' // write_tmp_diag.py + << tmp_nc_path << ' ' // tmp_nc output filename + << script_name << ' ' // python script name + << tmp_file_name; // input temp NetCDF file + + mlog << Debug(4) << "Writing temporary Python dataplane file:\n\t" + << command << "\n"; + + status = system(command.text()); + + if(status) { + mlog << Error << "\n" << method_name << " -> " + << "command \"" << command.text() << "\" failed ... status = " + << status << "\n\n"; + exit(1); + } + + // Reload the module if GP has already been initialized + bool do_reload = GP.is_initialized; + + GP.initialize(); + + if(PyErr_Occurred()) { + PyErr_Print(); + mlog << Warning << "\n" << method_name << " -> " + << "an error occurred initializing python\n\n"; + return(false); + } + + // Prepare arguments to read input + StringArray arg_sa; + arg_sa.add(read_tmp_diag); + arg_sa.add(tmp_nc_path); + wa.set(arg_sa); + + PySys_SetArgv (wa.wargc(), wa.wargv()); + + mlog << Debug(4) << "Reading temporary Python diagnostics file: " + << tmp_nc_path << "\n"; + + // Import the python wrapper script as a module + path = read_tmp_diag; + path = path.basename(); + path.chomp(".py"); + + PyObject * module_obj = PyImport_ImportModule(path.c_str()); + + // Reload the module, if needed + if(do_reload) { + module_obj = PyImport_ReloadModule (module_obj); + } + + if(PyErr_Occurred()) { + PyErr_Print(); + mlog << Warning << "\n" << method_name << " -> " + << "an error occurred importing module " + << '\"' << path << "\"\n\n"; + return(false); + } + + if(!module_obj) { + mlog << Warning << "\n" << method_name << " -> " + << "error running Python script\n\n"; + return(false); + } + + // Get the namespace for the module (as a dictionary) + PyObject *module_dict_obj = PyModule_GetDict(module_obj); + PyObject *key_obj = PyUnicode_FromString(tc_diag_dict_name); + PyObject *data_obj = PyDict_GetItem(module_dict_obj, key_obj); + + if(!data_obj || !PyDict_Check(data_obj)) { + mlog << Warning << "\n" << method_name << " -> " + << "trouble reading data from \"" + << script_name << "\"\n\n"; + exit(1); + } + + // Populate the diagnostics map + diag_map_from_python_dict(data_obj, diag_map); + + // Cleanup + remove_temp_file(tmp_nc_path); + + return(true); +} + +//////////////////////////////////////////////////////////////////////// + +void diag_map_from_python_dict(PyObject *diag_dict, + map &diag_map) { + const char *method_name = "diag_map_from_python_dict()"; + PyObject *key_obj = 0; + PyObject *val_obj = 0; + int status; + double val; + long pos; + + // Initialize + pos = 0; + + // Loop through the dictionary entries + while((status = PyDict_Next(diag_dict, &pos, &key_obj, &val_obj)) != 0) { + + // All keys must be strings + if(!PyUnicode_Check(key_obj)) { + mlog << Error << "\n" << method_name << " -> " + << "key is not a string!\n\n"; + exit(1); + } + + // Parse key as a string and value as a number + string key_str = PyUnicode_AsUTF8(key_obj); + if(PyLong_Check(val_obj)) { + val = (double) PyLong_AsLong(val_obj); + } + else if(PyFloat_Check(val_obj)) { + val = PyFloat_AsDouble(val_obj); + } + else { + mlog << Error << "\n" << method_name << " -> " + << "TC diagnostic \"" << key_str + << "\" not specified as a numeric Python data type!\n\n"; + exit(1); + } + + // Check for duplicates + if(diag_map.count(key_str) > 0) { + mlog << Warning << "\n" << method_name << " -> " + << "ignoring duplicate entry for TC diagnostic \"" + << key_str << "\" = " << val << "!\n\n"; + } + // Store key/value pair in the dictionary + else { + mlog << Debug(5) << "Storing TC diagnostic \"" + << key_str << "\" = " << val << "\n"; + diag_map[key_str] = val; + } + } // end while + + return; +} + +//////////////////////////////////////////////////////////////////////// diff --git a/src/tools/tc_utils/tc_diag/python_tc_diag.h b/src/tools/tc_utils/tc_diag/python_tc_diag.h new file mode 100644 index 0000000000..dc1383267b --- /dev/null +++ b/src/tools/tc_utils/tc_diag/python_tc_diag.h @@ -0,0 +1,32 @@ +// *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* +// ** Copyright UCAR (c) 1992 - 2023 +// ** University Corporation for Atmospheric Research (UCAR) +// ** National Center for Atmospheric Research (NCAR) +// ** Research Applications Lab (RAL) +// ** P.O.Box 3000, Boulder, Colorado, 80307-3000, USA +// *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* + +//////////////////////////////////////////////////////////////////////// + +#ifndef __PYTHON_TC_DIAG__ +#define __PYTHON_TC_DIAG__ + +//////////////////////////////////////////////////////////////////////// + +extern "C" { + +#include "Python.h" + +} + +//////////////////////////////////////////////////////////////////////// + +extern bool python_tc_diag(const ConcatString &script_name, + const ConcatString &tmp_file_name, + std::map &diag_map); + +//////////////////////////////////////////////////////////////////////// + +#endif /* __PYTHON_TC_DIAG__ */ + +//////////////////////////////////////////////////////////////////////// diff --git a/src/tools/tc_utils/tc_diag/tc_diag.cc b/src/tools/tc_utils/tc_diag/tc_diag.cc new file mode 100644 index 0000000000..ae459a5f81 --- /dev/null +++ b/src/tools/tc_utils/tc_diag/tc_diag.cc @@ -0,0 +1,1698 @@ +// *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* +// ** Copyright UCAR (c) 1992 - 2023 +// ** University Corporation for Atmospheric Research (UCAR) +// ** National Center for Atmospheric Research (NCAR) +// ** Research Applications Lab (RAL) +// ** P.O.Box 3000, Boulder, Colorado, 80307-3000, USA +// *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* + +//////////////////////////////////////////////////////////////////////// +// +// Filename: tc_diag.cc +// +// Description: +// +// Mod# Date Name Description +// ---- ---- ---- ----------- +// 000 09/27/22 Halley Gotway New +// +//////////////////////////////////////////////////////////////////////// + +using namespace std; + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +using namespace netCDF; + +#ifdef _OPENMP + #include "omp.h" +#endif + +#include "main.h" +#include "tc_diag.h" +#include "python_tc_diag.h" + +#include "series_data.h" + +#include "vx_grid.h" +#include "vx_regrid.h" +#include "vx_tc_util.h" +#include "vx_nc_util.h" +#include "vx_tc_nc_util.h" +#include "vx_data2d_nc_met.h" +#include "vx_util.h" +#include "vx_log.h" +#include "vx_math.h" + +#include "met_file.h" + +//////////////////////////////////////////////////////////////////////// + +static void usage(); +static void process_command_line(int, char**); +static void get_file_type(); + +static void process_tracks(TrackInfoArray&); +static void get_atcf_files(const StringArray&, + const StringArray&, + StringArray&, + StringArray&); +static void process_track_files(const StringArray&, + const StringArray&, + TrackInfoArray&); +static void process_track_points(const TrackInfoArray &); +static void process_fields(const TrackInfoArray &, + const unixtime, int, + const string &, + const DomainInfo &); +static void process_out_files(const TrackInfoArray &); + +static void merge_tmp_files(const vector); +static void copy_coord_vars(NcFile *to_nc, NcFile *from_nc); +static void copy_time_vars(NcFile *to_nc, NcFile *from_nc, int); + +static bool is_keeper(const ATCFLineBase *); +static void set_deck(const StringArray&); +static void set_atcf_source(const StringArray&, + StringArray&, + StringArray&); +static void set_data(const StringArray&); +static void set_config(const StringArray&); +static void set_outdir(const StringArray&); + +static void setup_out_files(const TrackInfoArray &); +static ConcatString get_out_key(const TrackInfo &); +static ConcatString get_tmp_key(const TrackInfo &, + const TrackPoint &, + const string &); + +static ConcatString build_tmp_file_name(const TrackInfo *, + const TrackPoint *, + const string &); +static ConcatString build_out_file_name(const TrackInfo *, + const char *); + +static void write_tc_storm(NcFile *, const char *, + const char *, const char *); + +static void write_tc_times(NcFile *, const NcDim &, + const TrackInfo *, + const TrackPoint *); + +static void compute_lat_lon(TcrmwGrid&, double *, double *); + +//////////////////////////////////////////////////////////////////////// + +// +// TODO after the MET version 11.1.0 release: +// - Python diagnostics: +// - Incorporate CIRA python diagnostics scripts. +// - Read resulting Xarray dataset in memory. +// - Write CIRA ASCII and NetCDF diagnostics output files. +// - Add support for $MET_PYTHON_EXE. +// - Input data: +// - Instead of reading DataPlanes one at a time, +// read them all at once or perhaps in groups +// (e.g. all pressure levels). +// - Parellelize the processing of valid times. +// - Add support for vortex removal. Print a WARNING if +// the Diag Track differs from the Tech Id for the data +// files and vortex removal has not been requested. +// - NetCDF cylindrical coordinates output: +// - get_var_names() returns a multimap that is sorted by +// the order of the variable names. This reorders the vars +// in the NetCDF cyl coord output. Would prefer that reordering +// not happen. +// - Consider adding support for the "regrid" dictionary to +// control cyl coord regridding step is done. +// + +int met_main(int argc, char *argv[]) { + + // Print beta status warning + print_beta_warning("The TC-Diag tool"); + + // Process command line arguments + process_command_line(argc, argv); + + // Process the track data + TrackInfoArray tracks; + process_tracks(tracks); + + // Setup output files for each track + setup_out_files(tracks); + + // Process the gridded data + process_track_points(tracks); + + // Process the output files + process_out_files(tracks); + + return(0); +} + +//////////////////////////////////////////////////////////////////////// + +const string get_tool_name() { + return "tc_diag"; +} + +//////////////////////////////////////////////////////////////////////// + +void usage() { + + cout << "\n*** Model Evaluation Tools (MET" << met_version + << ") ***\n\n" + << "Usage: " << program_name << "\n" + << "\t-data domain tech_id_list [ file_1 ... file_n | data_file_list ]\n" + << "\t-deck file\n" + << "\t-config file\n" + << "\t[-outdir path]\n" + << "\t[-log file]\n" + << "\t[-v level]\n\n" + + << "\twhere\t\"-data domain tech_id_list [ file_1 ... file_n | data_file_list ]\"\n" + + << "\t\t\tSpecifies a domain name, a comma-separated list of ATCF tech ID's, " + << "\t\t\tand a list of gridded data files or an ASCII file containing " + << "\t\t\ta list of files to be used.\n" + << "\t\t\tSpecify \"-data\" once for each data source (required).\n" + + << "\t\t\"-deck source\" is the ATCF format data source " + << "(required).\n" + + << "\t\t\"-config file\" is a TCDiagConfig file to be used " + << "(required).\n" + + << "\t\t\"-outdir path\" overrides the default output directory " + << "(" << out_dir << ") (optional).\n" + + << "\t\t\"-log file\" outputs log messages to the specified " + << "file (optional).\n" + + << "\t\t\"-v level\" overrides the default level of logging (" + << mlog.verbosity_level() << ") (optional).\n\n" << flush; + + exit(1); +} + +//////////////////////////////////////////////////////////////////////// + +void process_command_line(int argc, char **argv) { + CommandLine cline; + ConcatString default_config_file; + + // Default output directory + out_dir = replace_path(default_out_dir); + + // Default output prefix + out_prefix = replace_path(default_out_prefix); + + // Print usage statement for no arguments + if(argc <= 1) usage(); + + // Parse command line into tokens + cline.set(argc, argv); + + // Set usage function + cline.set_usage(usage); + + // Add function calls for arguments + cline.add(set_data, "-data", -1); + cline.add(set_deck, "-deck", -1); + cline.add(set_config, "-config", 1); + cline.add(set_outdir, "-outdir", 1); + + // Parse command line + cline.parse(); + + // Check for required arguments + if(data_opt_map.size() == 0 || + deck_source.n() == 0 || + config_file.empty()) { + mlog << Error << "\nThe \"-data\", \"-deck\", and \"-config\" " + << "command line arguments are required!\n\n"; + usage(); + } + + // Create default config file name + default_config_file = replace_path(default_config_filename); + + // List config files + mlog << Debug(1) + << "Config File Default: " << default_config_file << "\n" + << "Config File User: " << config_file << "\n"; + + // Read config files + conf_info.read_config(default_config_file.c_str(), config_file.c_str()); + + // Get data file type from input files + get_file_type(); + + // Process the configuration + conf_info.process_config(file_type, data_opt_map); + + return; +} + +//////////////////////////////////////////////////////////////////////// + +void get_file_type() { + Met2dDataFileFactory mtddf_factory; + Met2dDataFile *mtddf = (Met2dDataFile *) 0; + int i; + + // Build one long list of input data files + StringArray file_list; + map::iterator it; + for(it = data_opt_map.begin(); it != data_opt_map.end(); it++) { + file_list.add(it->second.data_files); + } + + // Get data file type from config + GrdFileType conf_file_type = + parse_conf_file_type(conf_info.conf.lookup_dictionary(conf_key_data)); + + // Find the first file that actually exists + for(i=0; i " + << "No valid data files found.\n\n"; + exit(1); + } + + // Read first valid file + if(!(mtddf = mtddf_factory.new_met_2d_data_file( + file_list[i].c_str(), conf_file_type))) { + mlog << Error << "\nget_file_type() -> " + << "Trouble reading data file \"" + << file_list[i] << "\"\n\n"; + exit(1); + } + + // Store the actual file type + file_type = mtddf->file_type(); + + // Clean up + if(mtddf) { delete mtddf; mtddf = (Met2dDataFile *) 0; } + + return; +} + +//////////////////////////////////////////////////////////////////////// + +void process_tracks(TrackInfoArray& tracks) { + StringArray files, files_model_suffix; + TimeArray init_ta; + int i; + + // Get list of track files + get_atcf_files(deck_source, deck_model_suffix, + files, files_model_suffix); + + mlog << Debug(2) << "Processing " << files.n() + << " track data file(s).\n"; + + process_track_files(files, files_model_suffix, tracks); + + // Get list of unique track initialization times + for(i=0; i 1) { + mlog << Error << "\nprocess_tracks() -> " + << "set the \"init_inc\" config option to select one of the " + << init_ta.n() << " track initialization times between " + << unix_to_yyyymmddhh(init_ta.min()) << " and " + << unix_to_yyyymmddhh(init_ta.max()) << ".\n\n"; + exit(1); + } + + return; +} + +//////////////////////////////////////////////////////////////////////// +// +// Automated Tropical cyclone Forecasting System +// https://www.nrlmry.navy.mil/atcf_web/docs/ATCF-FAQ.html +// +//////////////////////////////////////////////////////////////////////// + +void get_atcf_files(const StringArray& source, + const StringArray& model_suffix, + StringArray& files, + StringArray& files_model_suffix) { + + StringArray cur_source, cur_files; + + if(source.n() != model_suffix.n()) { + mlog << Error << "\nget_atcf_files() -> " + << "the source and suffix arrays must be equal length!\n\n"; + exit(1); + } + + // Initialize + files.clear(); + files_model_suffix.clear(); + + // Build list of files from all sources + for(int i = 0; i < source.n(); i++) { + cur_source.clear(); + cur_source.add(source[i]); + cur_files = get_filenames(cur_source, NULL, atcf_suffix); + + for(int j = 0; j < cur_files.n(); j++) { + files.add(cur_files[j]); + files_model_suffix.add(model_suffix[i]); + } + } + + return; +} + +//////////////////////////////////////////////////////////////////////// + +void process_track_files(const StringArray& files, + const StringArray& model_suffix, + TrackInfoArray& tracks) { + int i, cur_read, cur_add, tot_read, tot_add; + LineDataFile f; + ConcatString cs; + ATCFTrackLine line; + + // Initialize + tracks.clear(); + + // Initialize counts + tot_read = tot_add = 0; + + // Process input ATCF files + for(i=0; i " + << "unable to open file \"" << files[i] << "\"\n\n"; + exit(1); + } + + // Initialize counts + cur_read = cur_add = 0; + + // Read each line + while(f >> line) { + + // Increment line counts + cur_read++; + tot_read++; + + // Add model suffix, if specified + if(model_suffix[i].length() > 0) { + cs << cs_erase << line.technique() << model_suffix[i]; + line.set_technique(cs); + } + + // Check the keep status + if(!is_keeper(&line)) continue; + + // Attempt to add current line to TrackInfoArray + if(tracks.add(line, true, false)) { + cur_add++; + tot_add++; + } + } + + // Dump out current number of lines + mlog << Debug(4) + << "[File " << i + 1 << " of " << files.n() + << "] Used " << cur_add << " of " << cur_read + << " lines read from file \n\"" << files[i] << "\"\n"; + + // Close current file + f.close(); + + } // End loop over files + + // Check for no matching tracks + if(tracks.n() == 0) { + mlog << Error << "\nprocess_track_files() -> " + << "no tracks retained! Adjust the config file " + << "filtering options to select a single track.\n\n"; + exit(1); + } + + return; +} + +//////////////////////////////////////////////////////////////////////// +// +// Check if the ATCFLineBase should be kept +// +//////////////////////////////////////////////////////////////////////// + +bool is_keeper(const ATCFLineBase * line) { + bool keep = true; + ConcatString cs; + + // Check model + if(conf_info.model.n() > 0 && + !conf_info.model.has(line->technique())) { + cs << "model " << line->technique() << " not in " << write_css(conf_info.model); + keep = false; + } + + // Check storm id + else if(conf_info.storm_id.nonempty() && + conf_info.storm_id != line->storm_id()) { + cs << "storm_id " << line->storm_id() << " != " << conf_info.storm_id; + keep = false; + } + + // Check basin + else if(conf_info.basin.nonempty() && + conf_info.basin != line->basin()) { + cs << "basin " << line->basin() << " != " << conf_info.basin; + keep = false; + } + + // Check cyclone + else if(conf_info.cyclone.nonempty() && + conf_info.cyclone != line->cyclone_number()) { + cs << "cyclone " << line->cyclone_number() << " != " << conf_info.cyclone; + keep = false; + } + + // Check initialization time + else if(conf_info.init_inc != (unixtime) 0 && + conf_info.init_inc != line->warning_time()) { + cs << "init_inc " << unix_to_yyyymmddhh(line->warning_time()) + << " != " << unix_to_yyyymmddhh(conf_info.init_inc); + keep = false; + } + + // Check valid time + else if((conf_info.valid_beg > 0 && + conf_info.valid_beg > line->valid()) || + (conf_info.valid_end > 0 && + conf_info.valid_end < line->valid()) || + (conf_info.valid_inc.n() > 0 && + !conf_info.valid_inc.has(line->valid())) || + (conf_info.valid_exc.n() > 0 && + conf_info.valid_exc.has(line->valid()))) { + cs << "valid_time " << unix_to_yyyymmddhh(line->valid()); + keep = false; + } + + // Check valid hour + else if(conf_info.valid_hour.n() > 0 && + !conf_info.valid_hour.has(line->valid_hour())) { + cs << "valid_hour " << line->valid_hour(); + keep = false; + } + + // Check lead time + else if(conf_info.lead_time.n() > 0 && + !conf_info.lead_time.has(line->lead())){ + cs << "lead_time " << sec_to_hhmmss(line->lead()); + keep = false; + } + + if(!keep) { + mlog << Debug(4) << "Skipping track line for " << cs << ":\n" + << line->get_line() << "\n"; + } + + // Return the keep status + return(keep); +} + +//////////////////////////////////////////////////////////////////////// + +void set_deck(const StringArray& a) { + set_atcf_source(a, deck_source, deck_model_suffix); +} + +//////////////////////////////////////////////////////////////////////// + +void set_atcf_source(const StringArray& a, + StringArray& source, + StringArray& model_suffix) { + StringArray sa; + ConcatString cs, suffix; + + // Check for optional suffix sub-argument + for(int i = 0; i < a.n(); i++) { + if(a[i] == "suffix") { + cs = a[i]; + sa = cs.split("="); + if(sa.n() != 2) { + mlog << Error << "\nset_atcf_source() -> " + << "the model suffix must be specified as " + << "\"suffix=string\".\n\n"; + } + else { + suffix = sa[1]; + } + } + } + + // Parse remaining sources + for(int i = 0; i < a.n(); i++) { + if( a[i] == "suffix" ) continue; + source.add(a[i]); + model_suffix.add(suffix); + } + + return; +} + +//////////////////////////////////////////////////////////////////////// + +void set_data(const StringArray& a) { + + // Check for enough arguments + // e.g. -data parent GFSO,AEMN gfs_file_list + if(a.n() < 3) { + mlog << Error << "\nset_data() -> " + << "each \"-data\" command line option must specify a domain name, " + << "a comma-separated list of ATCF tech ID's, and the corresponding " + << "gridded data files.\n\n"; + usage(); + } + + // Store current -data options + DataOptInfo info; + + // First argument is the domain name + string domain = a[0]; + + // Second argument is a comma-separated list of tech ID's + info.tech_ids.parse_css(a[1]); + + // Remaining arguments are gridded data files or file lists + StringArray sa; + for(int i=2; i 0) { + mlog << Error << "\nsetup_out_files()-> " + << "found multiple tracks for key \"" + << key << "\"!\n\n"; + exit(1); + } + + // Add new map entry + out_file_map[key] = out_info; + + mlog << Debug(3) << "Preparing output files for " + << key << " track.\n"; + + // Store the track + out_file_map[key].trk_ptr = &tracks[i]; + + // NetCDF diagnostics output + if(conf_info.nc_diag_flag) { + out_file_map[key].nc_diag_file = + build_out_file_name(out_file_map[key].trk_ptr, "_diag.nc"); + out_file_map[key].nc_diag_out = + out_file_map[key].setup_nc_file(out_file_map[key].nc_diag_file); + } + + // CIRA diagnostics output + if(conf_info.cira_diag_flag) { + out_file_map[key].cira_diag_file = + build_out_file_name(out_file_map[key].trk_ptr, "_diag.txt"); + out_file_map[key].cira_diag_out = new ofstream; + out_file_map[key].cira_diag_out->open(out_file_map[key].cira_diag_file); + + if(!(*out_file_map[key].cira_diag_out)) { + mlog << Error << "\nsetup_out_files()-> " + << "can't open the output file \"" + << out_file_map[key].cira_diag_file + << "\" for writing!\n\n"; + exit(1); + } + + // Fixed width + out_file_map[key].cira_diag_out->setf(ios::fixed); + } + } // end for i + + return; +} + +//////////////////////////////////////////////////////////////////////// + +ConcatString get_out_key(const TrackInfo &track) { + ConcatString cs; + + cs << track.storm_id() << "_" + << track.technique() << "_" + << unix_to_yyyymmddhh(track.init()); + + return(cs); +} + +//////////////////////////////////////////////////////////////////////// + +ConcatString get_tmp_key(const TrackInfo &track, + const TrackPoint &point, + const string &domain) { + ConcatString cs; + + cs << track.storm_id() << "_" + << track.technique() << "_" + << unix_to_yyyymmddhh(track.init()) << "_f" + << point.lead() /sec_per_hour << "_" + << domain; + + return(cs); +} + +//////////////////////////////////////////////////////////////////////// + +ConcatString build_tmp_file_name(const TrackInfo *trk_ptr, + const TrackPoint *pnt_ptr, + const string &domain) { + ConcatString cs; + + // Build the temp file name with the program name, + // track/timing information, and domain name + + cs << conf_info.tmp_dir + << "/tmp_" << program_name << "_" + << get_tmp_key(*trk_ptr, *pnt_ptr, domain); + + return(make_temp_file_name(cs.text(), ".nc")); +} + +//////////////////////////////////////////////////////////////////////// + +ConcatString build_out_file_name(const TrackInfo *trk_ptr, + const char *suffix) { + ConcatString cs; + + // Build the output file name + cs << out_dir << "/" << program_name; + + // Append the output prefix, if defined + if(conf_info.output_prefix.nonempty()) { + cs << "_" << conf_info.output_prefix; + } + + // Append the track information + cs << "_" << get_out_key(*trk_ptr); + + // Append the suffix + cs << suffix; + + return(cs); +} + +//////////////////////////////////////////////////////////////////////// + +void write_tc_storm(NcFile *nc_out, const char *storm_id, + const char *model, const char *domain) { + + // Add the storm id + if(storm_id) { + NcVar sid_var = nc_out->addVar("storm_id", ncString); + sid_var.putVar(&storm_id); + } + + // Add the model + if(model) { + NcVar mdl_var = nc_out->addVar("model", ncString); + mdl_var.putVar(&model); + } + + // Add the domain name + if(domain) { + NcVar dmn_var = nc_out->addVar("domain", ncString); + dmn_var.putVar(&domain); + } + + return; +} + +//////////////////////////////////////////////////////////////////////// + +void write_tc_times(NcFile *nc_out, const NcDim &vld_dim, + const TrackInfo *trk_ptr, + const TrackPoint *pnt_ptr) { + + // Check pointer + if(!trk_ptr) { + mlog << Error << "\nwrite_tc_times() -> " + << "null track pointer!\n\n"; + exit(1); + } + + NcVar init_str_var, init_ut_var; + NcVar vld_str_var, vld_ut_var; + NcVar lead_str_var, lead_sec_var; + + // Define and write the track initialization time + def_tc_init_time(nc_out, init_str_var, init_ut_var); + write_tc_init_time(nc_out, init_str_var, init_ut_var, + trk_ptr->init()); + + // Define valid and lead times + def_tc_valid_time(nc_out, vld_dim, vld_str_var, vld_ut_var); + def_tc_lead_time(nc_out, vld_dim, lead_str_var, lead_sec_var); + + // Write valid and lead times for a single point + if(pnt_ptr) { + write_tc_valid_time(nc_out, 0, vld_str_var, vld_ut_var, + pnt_ptr->valid()); + write_tc_lead_time(nc_out, 0, lead_str_var, lead_sec_var, + pnt_ptr->lead()); + } + // Write valid and lead times for all track points + else { + for(int i_pnt=0; i_pntn_points(); i_pnt++) { + write_tc_valid_time(nc_out, i_pnt, vld_str_var, vld_ut_var, + (*trk_ptr)[i_pnt].valid()); + write_tc_lead_time(nc_out, i_pnt, lead_str_var, lead_sec_var, + (*trk_ptr)[i_pnt].lead()); + } + } + + return; +} + +//////////////////////////////////////////////////////////////////////// + +void compute_lat_lon(TcrmwGrid& grid, + double *lat_arr, double *lon_arr) { + + // Compute lat and lon coordinate arrays + for(int ir=0; ir::iterator dom_it; + + // Build list of unique valid times + TimeArray valid_ta; + for(i=0,n_pts=0; i 0) { + mlog << Error << "\nprocess_track_points()-> " + << "found multiple temp file entries for key \"" + << tmp_key << "\"!\n\n"; + exit(1); + } + + // Add new map entry + tmp_file_map[tmp_key] = tmp_info; + + // Setup a temp file for the current point + tmp_file_map[tmp_key].open(&tracks[k], + &tracks[k][i_pnt], + conf_info.domain_info[j], + conf_info.pressure_levels); + + } // end for k + } // end for j + } // end for i + +// TODO: Work on this parallel code + +//#pragma omp parallel default(none) \ +// shared(mlog, conf_info, tracks, valid_ta) \ +// private(i, dom_it) +// { + + // Parallel: Loop over the unique valid times +//#pragma omp for schedule (static) +//#pragma omp parallel for + for(i=0; iset_valid(vld_ut); + + // Find data for this track point + get_series_entry(i_vld, vi, + di.data_files, file_type, + data_dp, grid_dp); + + // Do coordinate transformation for each track point + for(j=0; j domain_tmp_file_list; + + // Loop over tracks + for(int i_trk=0; i_trk " + << "no output file map entry found for key \"" + << out_key << "\"!\n\n"; + exit(1); + } + + // Loop over domains + for(int i_dom=0; i_dom " + << "no temporary file map entry found for key \"" + << tmp_key << "\"!\n\n"; + exit(1); + } + + // Update list of domain-specific temp files + domain_tmp_file_list.push_back(&tmp_file_map[tmp_key]); + + // Store the diagnostics for each track point + out_file_map[out_key].add_diag_map(tmp_file_map[tmp_key].diag_map, i_pnt); + + } // end for i_pnt + + // Write NetCDF range-azimuth output + if(conf_info.nc_rng_azi_flag) { + merge_tmp_files(domain_tmp_file_list); + } + + } // end for i_dom + + // Write NetCDF diagnostics output + if(conf_info.nc_diag_flag) { + out_file_map[out_key].write_nc_diag(); + } + + // Finish the output for this track + out_file_map[out_key].clear(); + + } // end for i_trk + + return; +} + +//////////////////////////////////////////////////////////////////////// + +void merge_tmp_files(const vector tmp_files) { + NcFile *nc_out = (NcFile *) 0; + + // Loop over temp files + for(int i_tmp=0; i_tmpdomain << ".nc"; + file_name = build_out_file_name( + tmp_files[i_tmp]->trk_ptr, + suffix_cs.c_str()); + + mlog << Debug(1) << "Writing output file: " + << file_name << "\n"; + + nc_out = open_ncfile(file_name.c_str(), true); + + if(IS_INVALID_NC_P(nc_out)) { + mlog << Error << "\nmerge_tmp_files() -> " + << "trouble opening output NetCDF file " + << file_name << "\n\n"; + exit(1); + } + + // Add global attributes + write_netcdf_global(nc_out, file_name.c_str(), program_name); + + // Write track info + write_tc_storm(nc_out, + tmp_files[i_tmp]->trk_ptr->storm_id().c_str(), + tmp_files[i_tmp]->trk_ptr->technique().c_str(), + nullptr); + + // Write the track lines + write_tc_track_lines(nc_out, + *(tmp_files[i_tmp]->trk_ptr)); + + // Define the time dimension + NcDim vld_dim = add_dim(nc_out, "time", + tmp_files[i_tmp]->trk_ptr->n_points()); + + // Write timing info for the entire track + write_tc_times(nc_out, vld_dim, + tmp_files[i_tmp]->trk_ptr, nullptr); + + // Copy coordinate variables + copy_coord_vars(nc_out, tmp_files[i_tmp]->tmp_out); + + } // end if !nc_out + + // Copy time variables + copy_time_vars(nc_out, tmp_files[i_tmp]->tmp_out, i_tmp); + + } // end for i_tmp + + return; +} + +//////////////////////////////////////////////////////////////////////// + +void copy_coord_vars(NcFile *to_nc, NcFile *from_nc) { + + // Get the input variable names + StringArray var_names; + get_var_names(from_nc, &var_names); + + // Loop over the variables + for(int i=0; i dims; + for(j=0; jaddVar(var_names[i], ncDouble, dims); + copy_nc_atts(&from_var, &new_var); + } + + // Write data for the current time + NcVar to_var = get_var(to_nc, var_names[i].c_str()); + + vector offsets; + vector counts; + + int buf_size = 1; + + for(j=0; jclose(); + delete nc_diag_out; + nc_diag_out = (NcFile *) 0; + } + nc_diag_file.clear(); + + // Write CIRA diagnostics file + if(cira_diag_out) { + + mlog << Debug(1) << "Writing output file: " + << cira_diag_file << "\n"; + + // Write the output + *cira_diag_out << cira_diag_at; + + // Close the output file + cira_diag_out->close(); + delete cira_diag_out; + cira_diag_out = (ofstream *) 0; + } + cira_diag_file.clear(); + cira_diag_at.clear(); + + return; +} + +//////////////////////////////////////////////////////////////////////// + +NcFile *OutFileInfo::setup_nc_file(const string &out_file) { + + if(!trk_ptr) return(nullptr); + + // Open the output NetCDF file + NcFile *nc_out = open_ncfile(out_file.c_str(), true); + + if(IS_INVALID_NC_P(nc_out)) { + mlog << Error << "\nOutFileInfo::setup_nc_file() -> " + << "trouble opening output NetCDF file " + << out_file << "\n\n"; + exit(1); + } + + // Add global attributes + write_netcdf_global(nc_out, out_file.c_str(), program_name); + + // Define dimension + vld_dim = add_dim(nc_out, "time", + trk_ptr->n_points()); + + // Write track info + write_tc_storm(nc_out, + trk_ptr->storm_id().c_str(), + trk_ptr->technique().c_str(), + nullptr); + + // Write timing info for the entire track + write_tc_times(nc_out, vld_dim, + trk_ptr, nullptr); + + return(nc_out); +} + +//////////////////////////////////////////////////////////////////////// + +void OutFileInfo::add_diag_map(const map &tmp_diag_map, + int i_pnt) { + + // Track pointer must be set + if(!trk_ptr) { + mlog << Error << "\nOutFileInfo::add_diag_map() -> " + << "track pointer not set!\n\n"; + exit(1); + } + + // Check the range + if(i_pnt < 0 || i_pnt >= trk_ptr->n_points()) { + mlog << Error << "\nOutFileInfo::add_diag_map() -> " + << "track point index (" << i_pnt + << ") range check error!\n\n"; + exit(1); + } + + // Loop over the input diagnostics map + map::const_iterator it; + for(it = tmp_diag_map.begin(); it != tmp_diag_map.end(); it++) { + + // Add new diagnostics array entry, if needed + if(diag_map.count(it->first) == 0) { + NumArray empty_na; + empty_na.set_const(bad_data_double, + trk_ptr->n_points()); + diag_map[it->first] = empty_na; + } + + // Store the diagnostic value for the track point + diag_map[it->first].set(i_pnt, it->second); + } + + return; +} + +//////////////////////////////////////////////////////////////////////// + +void OutFileInfo::write_nc_diag() { + + // Setup dimensions + vector dims; + dims.push_back(vld_dim); + + vector offsets; + offsets.push_back(0); + + vector counts; + counts.push_back(get_dim_size(&vld_dim)); + + // Write the diagnostics for each lead time + map::iterator it; + for(it = diag_map.begin(); it != diag_map.end(); it++) { + NcVar diag_var = nc_diag_out->addVar(it->first, ncDouble, dims); + add_att(&diag_var, fill_value_att_name, bad_data_double); + diag_var.putVar(offsets, counts, it->second.buf()); + } + + return; +} + +//////////////////////////////////////////////////////////////////////// +// +// Code for class TmpFileInfo +// +//////////////////////////////////////////////////////////////////////// + +TmpFileInfo::TmpFileInfo() { + init_from_scratch(); +} + +//////////////////////////////////////////////////////////////////////// + +TmpFileInfo::~TmpFileInfo() { + clear(); +} + +//////////////////////////////////////////////////////////////////////// + +void TmpFileInfo::init_from_scratch() { + + // Initialize pointers + trk_ptr = (TrackInfo *) 0; + pnt_ptr = (TrackPoint *) 0; + tmp_out = (NcFile *) 0; + + clear(); + + return; +} + +//////////////////////////////////////////////////////////////////////// + +void TmpFileInfo::open(const TrackInfo *t_ptr, + const TrackPoint *p_ptr, + const DomainInfo &di, + const set &prs_lev) { + + // Set pointers + trk_ptr = t_ptr; + pnt_ptr = p_ptr; + domain = di.domain; + + // Open the temp file + tmp_file = build_tmp_file_name(trk_ptr, pnt_ptr, domain); + + mlog << Debug(3) << "Creating temp file: " << tmp_file << "\n"; + + setup_nc_file(di, prs_lev); + + return; +} + +//////////////////////////////////////////////////////////////////////// + +void TmpFileInfo::close() { + + // Write NetCDF temp file + if(tmp_out) { + + mlog << Debug(3) << "Writing temp file: " + << tmp_file << "\n"; + + delete tmp_out; + tmp_out = (NcFile *) 0; + } + + return; +} + +//////////////////////////////////////////////////////////////////////// + +void TmpFileInfo::clear() { + + trk_ptr = (TrackInfo *) 0; + pnt_ptr = (TrackPoint *) 0; + + diag_map.clear(); + + grid_out.clear(); + ra_grid.clear(); + + pressure_levels.clear(); + + domain.clear(); + + // Delete the temp file + if(tmp_out) { + + remove_temp_file(tmp_file); + + tmp_out = (NcFile *) 0; + } + tmp_file.clear(); + + return; +} + +//////////////////////////////////////////////////////////////////////// + +void TmpFileInfo::setup_nc_file(const DomainInfo &di, + const set &prs_lev) { + + // Open the output NetCDF file + tmp_out = open_ncfile(tmp_file.c_str(), true); + + if(IS_INVALID_NC_P(tmp_out)) { + mlog << Error << "\nTmpFileInfo::setup_nc_file() -> " + << "trouble opening output NetCDF file " + << tmp_file << "\n\n"; + exit(1); + } + + // Add global attributes + write_netcdf_global(tmp_out, tmp_file.c_str(), program_name); + + // Define latitude and longitude arrays + TcrmwData d = di.data; + int nra = d.range_n * d.azimuth_n; + double *lat_arr = new double[nra]; + double *lon_arr = new double[nra]; + + // Set grid center + d.lat_center = pnt_ptr->lat(); + d.lon_center = -1.0*pnt_ptr->lon(); // degrees east to west + d.range_max_km = di.delta_range_km * d.range_n; + + // Instantiate the grid + grid_out.set(d); + ra_grid.set_from_data(d); + + mlog << Debug(3) + << "Defining cylindrical coordinates for (Lat, Lon) = (" + << pnt_ptr->lat() << ", " << pnt_ptr->lon() << "), Range = " + << ra_grid.range_n() << " every " << ra_grid.range_delta_km() + << ra_grid.range_n() << " every " << ra_grid.range_delta_km() + << "km, Azimuth = " << ra_grid.azimuth_n() << "\n"; + + // Write track info + write_tc_storm(tmp_out, + trk_ptr->storm_id().c_str(), + trk_ptr->technique().c_str(), + di.domain.c_str()); + + // Write the track lines + write_tc_track_lines(tmp_out, *trk_ptr); + + // Define dimensions + trk_dim = add_dim(tmp_out, "track_point", + trk_ptr->n_points()); + vld_dim = add_dim(tmp_out, "time", 1); + rng_dim = add_dim(tmp_out, "range", + (long) ra_grid.range_n()); + azi_dim = add_dim(tmp_out, "azimuth", + (long) ra_grid.azimuth_n()); + + // Write the track locations + write_tc_track_lat_lon(tmp_out, trk_dim, *trk_ptr); + + // Write timing info for this TrackPoint + write_tc_times(tmp_out, vld_dim, trk_ptr, pnt_ptr); + + // Define range and azimuth coordinate variables + def_tc_range_azimuth(tmp_out, + rng_dim, azi_dim, + ra_grid, bad_data_double); + + // Pressure dimension and values (same for all temp files) + pressure_levels = prs_lev; + if(pressure_levels.size() > 0) { + prs_dim = add_dim(tmp_out, "pressure", + (long) pressure_levels.size()); + def_tc_pressure(tmp_out, prs_dim, pressure_levels); + } + + // Define latitude and longitude + NcVar lat_var, lon_var; + def_tc_lat_lon(tmp_out, vld_dim, rng_dim, azi_dim, + lat_var, lon_var); + + // Compute lat and lon coordinate arrays + compute_lat_lon(ra_grid, lat_arr, lon_arr); + + // Write coordinate arrays + write_tc_data(tmp_out, ra_grid, 0, lat_var, lat_arr); + write_tc_data(tmp_out, ra_grid, 0, lon_var, lon_arr); + + // Write track point values + write_tc_track_point(tmp_out, vld_dim, *pnt_ptr); + + // Clean up + if(lat_arr) { delete[] lat_arr; lat_arr = (double *) 0; } + if(lon_arr) { delete[] lon_arr; lon_arr = (double *) 0; } + + return; +} + +//////////////////////////////////////////////////////////////////////// + +void TmpFileInfo::write_nc_data(const VarInfo *vi, const DataPlane &dp_in, + const Grid &grid_in) { + DataPlane dp_out; + RegridInfo ri; + + // Use default regridding options + ri.method = InterpMthd_Nearest; + ri.width = 1; + ri.vld_thresh = 1.0; + ri.shape = GridTemplateFactory::GridTemplate_Square; + + // Do the cylindrical coordinate transformation + dp_out = met_regrid(dp_in, grid_in, grid_out, ri); + + // Logic for pressure level data + bool is_prs = (vi->level().type() == LevelType_Pres); + + // Setup dimensions + vector dims; + dims.push_back(vld_dim); + if(is_prs) dims.push_back(prs_dim); + dims.push_back(rng_dim); + dims.push_back(azi_dim); + + // Create the output variable name + ConcatString var_name; + var_name << vi->name_attr(); + if(!is_prs) { + var_name << "_" << vi->level_attr(); + } + + // Add new variable, if needed + if(!has_var(tmp_out, var_name.c_str())) { + NcVar new_var = tmp_out->addVar(var_name, ncDouble, dims); + add_att(&new_var, long_name_att_name, vi->long_name_attr()); + add_att(&new_var, units_att_name, vi->units_attr()); + add_att(&new_var, fill_value_att_name, bad_data_double); + } + + // Get the current variable + NcVar cur_var = get_var(tmp_out, var_name.c_str()); + + // Write pressure level data + if(is_prs) { + + // Find pressure level index + int i_level = pressure_levels.size() - 1; + for(set::iterator it = pressure_levels.begin(); + it != pressure_levels.end(); ++it, --i_level) { + if(is_eq(vi->level().lower(), *it)) break; + } + + write_tc_pressure_level_data(tmp_out, ra_grid, + 0, i_level, cur_var, dp_out.data()); + } + // Write single level data + else { + write_tc_data_rev(tmp_out, ra_grid, + 0, cur_var, dp_out.data()); + } + + return; +} + +//////////////////////////////////////////////////////////////////////// diff --git a/src/tools/tc_utils/tc_diag/tc_diag.h b/src/tools/tc_utils/tc_diag/tc_diag.h new file mode 100644 index 0000000000..2c79d5f8b2 --- /dev/null +++ b/src/tools/tc_utils/tc_diag/tc_diag.h @@ -0,0 +1,201 @@ +// *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* +// ** Copyright UCAR (c) 1992 - 2023 +// ** University Corporation for Atmospheric Research (UCAR) +// ** National Center for Atmospheric Research (NCAR) +// ** Research Applications Lab (RAL) +// ** P.O.Box 3000, Boulder, Colorado, 80307-3000, USA +// *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* + +//////////////////////////////////////////////////////////////////////// +// +// Filename: tc_diag.h +// +// Description: +// +// Mod# Date Name Description +// ---- ---- ---- ----------- +// 000 09/27/22 Halley Gotway New +// +//////////////////////////////////////////////////////////////////////// + +#ifndef __TC_DIAG_H__ +#define __TC_DIAG_H__ + +//////////////////////////////////////////////////////////////////////// + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "tc_diag_conf_info.h" + +#include "vx_data2d_factory.h" +#include "vx_tc_util.h" +#include "vx_grid.h" +#include "vx_util.h" + +//////////////////////////////////////////////////////////////////////// +// +// Constants +// +//////////////////////////////////////////////////////////////////////// + +// Program name +static const char* program_name = "tc_diag"; + +// ATCF file suffix +static const char* atcf_suffix = ".dat"; + +// Default configuration file name +static const char* default_config_filename = + "MET_BASE/config/TCDiagConfig_default"; + +// Default output directory +static const char* default_out_dir = "."; + +// Default output prefix +static const char* default_out_prefix = ""; + +//////////////////////////////////////////////////////////////////////// +// +// Variables for Command Line Arguments +// +//////////////////////////////////////////////////////////////////////// + +// Input files +static std::map data_opt_map; +static StringArray deck_source, deck_model_suffix; +static ConcatString config_file; +static TCDiagConfInfo conf_info; +static GrdFileType file_type = FileType_None; + +// Optional arguments +static ConcatString out_dir; +static ConcatString out_prefix; + +//////////////////////////////////////////////////////////////////////// +// +// Variables for Output Files +// +//////////////////////////////////////////////////////////////////////// + +class OutFileInfo { + + private: + + void init_from_scratch(); + + public: + + OutFileInfo(); + ~OutFileInfo(); + + ////////////////////////////////////////////////////////////////// + + // Track information + const TrackInfo *trk_ptr; // not allocated + + // Mapping of diagnostic names to values for each track point + std::map diag_map; + + // NetCDF Diagnostics output + ConcatString nc_diag_file; + netCDF::NcFile *nc_diag_out; + + // NetCDF Dimensions + netCDF::NcDim vld_dim; + + // CIRA Diagnostics output + ConcatString cira_diag_file; + std::ofstream *cira_diag_out; + AsciiTable cira_diag_at; + + void clear(); + + netCDF::NcFile *setup_nc_file(const string &); + void add_diag_map(const std::map &, int); + void write_nc_diag(); +}; + +static std::map out_file_map; + +//////////////////////////////////////////////////////////////////////// +// +// Variables for Temp Files +// +//////////////////////////////////////////////////////////////////////// + +class TmpFileInfo { + + private: + + void init_from_scratch(); + + public: + + TmpFileInfo(); + ~TmpFileInfo(); + + ////////////////////////////////////////////////////////////////// + + // Track information + const TrackInfo *trk_ptr; // not allocated + const TrackPoint *pnt_ptr; // not allocated + + // Mapping of diagnostic names to values + std::map diag_map; + + // Range azimuth grid + Grid grid_out; + TcrmwGrid ra_grid; + + // Pressure levels + std::set pressure_levels; + + // Domain name + std::string domain; + + // NetCDF Cylindrical Coordinates output + ConcatString tmp_file; + netCDF::NcFile *tmp_out; + + // NetCDF Dimensions + netCDF::NcDim trk_dim; + netCDF::NcDim vld_dim; + netCDF::NcDim rng_dim; + netCDF::NcDim azi_dim; + netCDF::NcDim prs_dim; + + void open(const TrackInfo *, const TrackPoint *, + const DomainInfo &, + const std::set &); + void close(); + + void clear(); + + void setup_nc_file(const DomainInfo &, + const std::set &); + + void write_nc_data(const VarInfo *, const DataPlane &, + const Grid &); +}; + +static std::map tmp_file_map; + +//////////////////////////////////////////////////////////////////////// + +#endif // __TC_DIAG_H__ + +//////////////////////////////////////////////////////////////////////// diff --git a/src/tools/tc_utils/tc_diag/tc_diag_conf_info.cc b/src/tools/tc_utils/tc_diag/tc_diag_conf_info.cc new file mode 100644 index 0000000000..62c8f374c8 --- /dev/null +++ b/src/tools/tc_utils/tc_diag/tc_diag_conf_info.cc @@ -0,0 +1,452 @@ +// *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* +// ** Copyright UCAR (c) 1992 - 2023 +// ** University Corporation for Atmospheric Research (UCAR) +// ** National Center for Atmospheric Research (NCAR) +// ** Research Applications Lab (RAL) +// ** P.O.Box 3000, Boulder, Colorado, 80307-3000, USA +// *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* + +//////////////////////////////////////////////////////////////////////// + +using namespace std; + +#include +#include +#include +#include +#include +#include +#include + +#include "tc_diag_conf_info.h" + +#include "vx_log.h" + +//////////////////////////////////////////////////////////////////////// +// +// Code for struct DataOptInfo +// +//////////////////////////////////////////////////////////////////////// + +void DataOptInfo::clear() { + + tech_ids.clear(); + data_files.clear(); + + return; +} + +//////////////////////////////////////////////////////////////////////// + +DataOptInfo & DataOptInfo::operator+=(const DataOptInfo &info) { + + tech_ids.add(info.tech_ids); + data_files.add(info.data_files); + + return(*this); +} + +//////////////////////////////////////////////////////////////////////// +// +// Code for class DomainInfo +// +//////////////////////////////////////////////////////////////////////// + +DomainInfo::DomainInfo() { + + init_from_scratch(); +} + +//////////////////////////////////////////////////////////////////////// + +DomainInfo::~DomainInfo() { + + clear(); +} + +//////////////////////////////////////////////////////////////////////// + +void DomainInfo::init_from_scratch() { + + clear(); + + return; +} + +//////////////////////////////////////////////////////////////////////// + +void DomainInfo::clear() { + + tech_ids.clear(); + data_files.clear(); + domain.clear(); + + data.name = (const char *) 0; + data.range_n = bad_data_int; + data.azimuth_n = bad_data_int; + data.range_max_km = bad_data_double; + data.lat_center = bad_data_double; + data.lon_center = bad_data_double; + + delta_range_km = bad_data_double; + + var_info_ptr.clear(); + diag_script.clear(); + + return; +} + +//////////////////////////////////////////////////////////////////////// + +void DomainInfo::parse_domain_info(Dictionary &dict) { + + // Initialize + clear(); + + // Note: tech_ids and data_files are specified on the + // command line rather than in the config file + + // Conf: domain + domain = dict.lookup_string(conf_key_domain); + + // Hard-code the name + data.name = "TCDIAG"; + + // Conf: n_range + data.range_n = dict.lookup_int(conf_key_n_range); + + // Conf: azimuth_n + data.azimuth_n = dict.lookup_int(conf_key_n_azimuth); + + // Conf: delta_range + delta_range_km = dict.lookup_double(conf_key_delta_range); + + // Conf: diag_script + diag_script = dict.lookup_string_array(conf_key_diag_script); + + // Expand MET_BASE + for(int i=0; i dmap) { + int i, j; + StringArray sa; + Dictionary *dict = (Dictionary *) 0; + VarInfoFactory vi_factory; + + // Conf: version + check_met_version(conf.lookup_string(conf_key_version).c_str()); + + // Conf: model + model = conf.lookup_string_array(conf_key_model); + + // Conf: storm_id + storm_id = conf.lookup_string(conf_key_storm_id); + + // Conf: basin + basin = conf.lookup_string(conf_key_basin); + + // Conf: cyclone + cyclone = conf.lookup_string(conf_key_cyclone); + + // Conf: init_inc + init_inc = conf.lookup_unixtime(conf_key_init_inc); + + // Conf: valid_beg, valid_end + valid_beg = conf.lookup_unixtime(conf_key_valid_beg); + valid_end = conf.lookup_unixtime(conf_key_valid_end); + + // Conf: valid_inc + sa = conf.lookup_string_array(conf_key_valid_inc); + for(i=0; i " + << "the \"" << conf_key_data_field + << "\" config file entry cannot be empty!\n\n"; + exit(1); + } + + // Process each field + for(i=0; iset_dict(i_dict); + var_info.push_back(vi); + + // Unique list of requested pressure levels + if(vi->level().type() == LevelType_Pres) { + if(vi->level().lower() != vi->level().upper()) { + mlog << Error << "\nTCDiagConfInfo::process_config() -> " + << "only individual pressure levels are supported, " + << "not ranges (" << vi->level().req_name() + << ").\n\n"; + exit(1); + } + pressure_levels.insert(vi->level().lower()); + } + + // Conf: field.domain + sa = i_dict.lookup_string_array(conf_key_domain); + + // Store domain-specific VarInfo pointers + for(j=0; j " + << "the \"" << conf_key_nc_diag_flag << "\" and \"" + << conf_key_cira_diag_flag + << "\" config entries cannot both be false.\n\n"; + exit(1); + } + */ + + // TODO: Remove this check for MET version 12.0.0 + if(nc_diag_flag || cira_diag_flag) { + mlog << Warning << "\nResetting the \"" + << conf_key_nc_diag_flag << "\" and \"" << conf_key_cira_diag_flag + << "\" configuration options to false since they are not supported" + << " for MET " << met_version << ".\n" + << "Additional outputs will be added in future MET versions.\n\n"; + nc_diag_flag = cira_diag_flag = false; + } + + // Conf: tmp_dir + tmp_dir = parse_conf_tmp_dir(&conf); + + // Conf: output_prefix + output_prefix = conf.lookup_string(conf_key_output_prefix); + + return; +} + +//////////////////////////////////////////////////////////////////////// + +void TCDiagConfInfo::parse_domain_info(map dmap) { + Dictionary *dict = (Dictionary *) 0; + int i, j; + bool found; + + // Conf: domain_info + dict = conf.lookup_array(conf_key_domain_info); + + if(!dict) { + mlog << Error << "\nTCDiagConfInfo::parse_domain_info() -> " + << "array lookup failed for key \"" << conf_key_domain_info + << "\"\n\n"; + exit(1); + } + + // Parse each grid info object + for(i=0; in_entries(); i++) { + DomainInfo di; + + // Parse the current domain info + di.parse_domain_info(*((*dict)[i]->dict_value())); + + // Store the domain-specifc data files + if(dmap.count(di.domain) > 0) { + di.tech_ids = dmap[di.domain].tech_ids; + di.data_files = dmap[di.domain].data_files; + } + else { + mlog << Error << "\nTCDiagConfInfo::parse_domain_info() -> " + << "no \"-data " << di.domain << "\" command line option provided for the \"" + << conf_key_domain_info << "." << conf_key_domain << "\" = \"" << di.domain + << "\" config file entry!\n\n"; + exit(1); + } + + // Check for duplicate entries + for(j=0; i " + << "multiple \"" << conf_key_domain_info + << "\" entries found for domain \"" << di.domain << "\"!\n\n"; + exit(1); + } + } + + // Store new entry + domain_info.push_back(di); + } + + // Make sure all -data domains appear in the config file + map::iterator it; + for(it = dmap.begin(); it != dmap.end(); it++) { + + for(i=0, found=false; ifirst == domain_info[i].domain) { + found = true; + break; + } + } // end for i + + if(!found) { + mlog << Error << "\nTCDiagConfInfo::parse_domain_info() -> " + << "no \"" << conf_key_domain_info << "." << conf_key_domain << "\" = \"" + << it->first << "\" config file entry provided for the \"-data " + << it->first << "\" command line option!\n\n"; + exit(1); + } + } + + return; +} + +//////////////////////////////////////////////////////////////////////// diff --git a/src/tools/tc_utils/tc_diag/tc_diag_conf_info.h b/src/tools/tc_utils/tc_diag/tc_diag_conf_info.h new file mode 100644 index 0000000000..3c2d0b5132 --- /dev/null +++ b/src/tools/tc_utils/tc_diag/tc_diag_conf_info.h @@ -0,0 +1,157 @@ +// *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* +// ** Copyright UCAR (c) 1992 - 2023 +// ** University Corporation for Atmospheric Research (UCAR) +// ** National Center for Atmospheric Research (NCAR) +// ** Research Applications Lab (RAL) +// ** P.O.Box 3000, Boulder, Colorado, 80307-3000, USA +// *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* + +//////////////////////////////////////////////////////////////////////// + +#ifndef __TC_DIAG_CONF_INFO_H__ +#define __TC_DIAG_CONF_INFO_H__ + +//////////////////////////////////////////////////////////////////////// + +#include +#include +#include + +#include "vx_config.h" +#include "vx_data2d_factory.h" +#include "vx_data2d.h" +#include "vx_util.h" + +//////////////////////////////////////////////////////////////////////// + +// Struct for the -data command line options +struct DataOptInfo { + + StringArray tech_ids; // ATCF Tech ID(s) corresponding to this data source + StringArray data_files; // Gridded data file(s) + + void clear(); + + DataOptInfo & operator+=(const DataOptInfo &); +}; + +//////////////////////////////////////////////////////////////////////// + +class DomainInfo { + + private: + + void init_from_scratch(); + + public: + + DomainInfo(); + ~DomainInfo(); + + // ATCF Tech ID's + StringArray tech_ids; + + // Domain data files + StringArray data_files; + + // Domain name + string domain; + + // TcrmwData struct for creating a TcrmwGrid object + TcrmwData data; + double delta_range_km; + + // Vector of VarInfo pointers (not allocated) + std::vector var_info_ptr; + + // Diagnostic scripts to be run + StringArray diag_script; + + ////////////////////////////////////////////////////////////////// + + void clear(); + + void parse_domain_info(Dictionary &); + void set_data_files(const StringArray &); + + int get_n_data() const; +}; + +//////////////////////////////////////////////////////////////////////// + +inline int DomainInfo::get_n_data() const { return var_info_ptr.size(); } + +//////////////////////////////////////////////////////////////////////// + +class TCDiagConfInfo { + + private: + + void init_from_scratch(); + + public: + + TCDiagConfInfo(); + ~TCDiagConfInfo(); + + ////////////////////////////////////////////////////////////////// + + // TCDiag configuration object + MetConfig conf; + + // Track line filtering criteria + StringArray model; + ConcatString storm_id; + ConcatString basin; + ConcatString cyclone; + unixtime init_inc; + unixtime valid_beg, valid_end; + TimeArray valid_inc, valid_exc; + NumArray valid_hour; + NumArray lead_time; + + // Vector of VarInfo objects from data.field (allocated) + std::vector var_info; + + // Pressure level values from the config file + std::set pressure_levels; + + // Vector of DomainInfo + std::vector domain_info; + + // Vortext removal settings + bool vortex_removal_flag; + + // Directory for temporary files + ConcatString tmp_dir; + + // String to customize output file name + ConcatString output_prefix; + + // Output file options + bool nc_rng_azi_flag; + bool nc_diag_flag; + bool cira_diag_flag; + + ////////////////////////////////////////////////////////////////// + + void clear(); + + void read_config(const char *, const char *); + void process_config(GrdFileType, + std::map); + + void parse_domain_info(std::map); + + int get_n_domain() const; +}; + +//////////////////////////////////////////////////////////////////////// + +inline int TCDiagConfInfo::get_n_domain() const { return domain_info.size(); } + +//////////////////////////////////////////////////////////////////////// + +#endif /* __TC_DIAG_CONF_INFO_H__ */ + +//////////////////////////////////////////////////////////////////////// diff --git a/src/tools/tc_utils/tc_gen/tc_gen.cc b/src/tools/tc_utils/tc_gen/tc_gen.cc index a1d6e0a4a9..f666ebab7b 100644 --- a/src/tools/tc_utils/tc_gen/tc_gen.cc +++ b/src/tools/tc_utils/tc_gen/tc_gen.cc @@ -25,7 +25,7 @@ // 008 05/02/22 Halley Gotway MET #2148 Fix init_hour and lead misses // 009 07/06/22 Howard Soh METplus-Internal #19 Rename main to met_main // 010 09/28/22 Prestopnik MET #2227 Remove using namespace std and netCDF from header files -// +// 011 05/25/23 Halley Gotway MET #2552 Update parsing of gtwo probability lead times // //////////////////////////////////////////////////////////////////////// @@ -86,7 +86,8 @@ static void process_edecks (const StringArray &, const StringArray &, ProbInfoArray &); static void process_shapes (const StringArray &, - GenShapeInfoArray &); + GenShapeInfoArray &, + int &); static void get_genesis_pairs (const TCGenVxOpt &, const ConcatString &, @@ -516,7 +517,7 @@ void score_genesis_prob(const GenesisInfoArray &best_ga, //////////////////////////////////////////////////////////////////////// void score_genesis_shape(const GenesisInfoArray &best_ga) { - int i, j, total_probs; + int i, j, total_probs, max_n_prob; StringArray shape_files; GenShapeInfoArray shapes_all, shapes_subset; ProbGenPCTInfo probgen_pct; @@ -530,11 +531,11 @@ void score_genesis_shape(const GenesisInfoArray &best_ga) { << "Processing " << shape_files.n() << " shapefile(s) matching the \"" << gen_shp_reg_exp << "\" regular expression.\n"; - process_shapes(shape_files, shapes_all); + process_shapes(shape_files, shapes_all, max_n_prob); // Setup output files based on the maximum number of filters // and lead times possible - setup_txt_files(conf_info.n_vx(), max_n_shape_prob, 0); + setup_txt_files(conf_info.n_vx(), max_n_prob, 0); // Process each verification filter for(i=0; in_records; + // Get the subrecord names, ignoring case + rec_names = dbf_file.subrecord_names(); + rec_names.set_ignore_case(true); + // Check expected shape types const ShapeType shape_type = (ShapeType) (shp_file.header()->shape_type); if(shape_type != shape_type_polygon) { @@ -1650,7 +1658,7 @@ void process_shapes(const StringArray &files, // Check for end-of-file if(shp_file.at_eof()) { - mlog << Error << "\nrocess_shapes() -> " + mlog << Error << "\nprocess_shapes() -> " << "hit shp file EOF before reading all records!\n\n"; exit(1); } @@ -1658,32 +1666,37 @@ void process_shapes(const StringArray &files, // Read the current shape and metadata shp_file >> poly_rec; poly_rec.toggle_longitudes(); - sa = dbf_file.subrecord_values(j); + rec_values = dbf_file.subrecord_values(j); // Initialize GenShapeInfo gsi.clear(); gsi.set_time(file_ut); - gsi.set_basin(string_to_basin_abbr(sa[0]).c_str()); gsi.set_poly(poly_rec); - // Parse probabilities from the subrecord values - for(k=0; k " + << "the first field name (" << rec_names[0] + << ") is not \"BASIN\", as expected.\n\n"; + exit(1); + } + gsi.set_basin(string_to_basin_abbr(rec_values[0]).c_str()); + + // Parse probabilities from the subrecord names and values + for(k=0; k= max_n_shape_prob) { - mlog << Warning << "\nprocess_shapes() -> " - << "unexpected number of shapefile probabilities (" - << gsi.n_prob() << ") in record " << j+1 - << " of file \"" << dbf_file_name - << "\"!\n\n"; - continue; - } + // Parse the lead day from the field name + lead_day = stoi(rec_names[k].substr(4)); + + mlog << Debug(5) << "Parsed " << rec_values[0] << " basin " + << lead_day << " day " << rec_values[k] + << " genesis probability shape.\n"; // Store the probability info - gsi.add_prob(shape_prob_lead_hr[gsi.n_prob()]*sec_per_hour, - atoi(sa[k].c_str())/100.0); + gsi.add_prob(lead_day*sec_per_day, + atoi(rec_values[k].c_str())/100.0); } } // end for k @@ -1691,6 +1704,9 @@ void process_shapes(const StringArray &files, if(shapes.add(gsi, true)) { mlog << Debug(5) << "Add new " << gsi.serialize() << "\n"; total_probs += gsi.n_prob(); + if(gsi.n_prob() > max_n_prob) { + max_n_prob = gsi.n_prob(); + } } } // end for j diff --git a/src/tools/tc_utils/tc_gen/tc_gen.h b/src/tools/tc_utils/tc_gen/tc_gen.h index 745956ac0d..5f1c4cc80c 100644 --- a/src/tools/tc_utils/tc_gen/tc_gen.h +++ b/src/tools/tc_utils/tc_gen/tc_gen.h @@ -81,11 +81,7 @@ static const char *txt_file_abbr[n_txt] = { // Cyclone numbers > 50 are for testing or invests static const int max_best_cyclone_number = 50; -// 2, 5, and 7 days shapefile probabilities -static const int max_n_shape_prob = 3; -static const int shape_prob_lead_hr[max_n_shape_prob] = { - 48, 120, 168 -}; +// Maximum search time for genesis shapefile matches static const int shape_prob_search_sec = 168*sec_per_hour; //////////////////////////////////////////////////////////////////////// diff --git a/src/tools/tc_utils/tc_pairs/tc_pairs.cc b/src/tools/tc_utils/tc_pairs/tc_pairs.cc index df144e8172..03ff8616c0 100644 --- a/src/tools/tc_utils/tc_pairs/tc_pairs.cc +++ b/src/tools/tc_utils/tc_pairs/tc_pairs.cc @@ -37,6 +37,7 @@ // 013 09/28/22 Prestopnik MET #2227 Remove namespace std from header files // 014 10/06/22 Halley Gotway MET #392 Incorporate diagnostics // 015 02/20/23 Seth Linden MET #2429 Added option to prevent output of consensus track members +// 016 06/08/23 Halley Gotway MET #2532 Full circle winds are the mean of the non-zero quadrants // //////////////////////////////////////////////////////////////////////// diff --git a/src/tools/tc_utils/tc_rmw/tc_rmw.cc b/src/tools/tc_utils/tc_rmw/tc_rmw.cc index 4dd6a5f2dc..ffadd8277a 100644 --- a/src/tools/tc_utils/tc_rmw/tc_rmw.cc +++ b/src/tools/tc_utils/tc_rmw/tc_rmw.cc @@ -283,7 +283,9 @@ void process_tracks(TrackInfoArray& tracks) { process_track_files(files, files_model_suffix, tracks); - write_tc_tracks(nc_out, track_point_dim, tracks); + write_tc_track_lines (nc_out, tracks[0]); + write_tc_track_lat_lon(nc_out, track_point_dim, tracks[0]); + write_tc_rmw (nc_out, track_point_dim, tracks[0]); } //////////////////////////////////////////////////////////////////////// @@ -580,14 +582,22 @@ void setup_nc_file() { range_dim = add_dim(nc_out, "range", (long) tcrmw_grid.range_n()); azimuth_dim = add_dim(nc_out, "azimuth", (long) tcrmw_grid.azimuth_n()); + // Define init, lead, and valid time variables + def_tc_init_time(nc_out, + init_time_str_var, init_time_ut_var); + def_tc_valid_time(nc_out, track_point_dim, + valid_time_str_var, valid_time_ut_var); + def_tc_lead_time(nc_out, track_point_dim, + lead_time_str_var, lead_time_sec_var); + // Define range and azimuth dimensions def_tc_range_azimuth(nc_out, range_dim, azimuth_dim, tcrmw_grid, conf_info.rmw_scale); // Define latitude and longitude arrays - def_tc_time_lat_lon(nc_out, + def_tc_lat_lon(nc_out, track_point_dim, range_dim, azimuth_dim, - valid_time_var, lat_arr_var, lon_arr_var); + lat_arr_var, lon_arr_var); // Find all variable levels, long names, and units for(int i_var = 0; i_var < conf_info.get_n_data(); i_var++) { @@ -655,12 +665,16 @@ void process_fields(const TrackInfoArray& tracks) { mlog << Debug(2) << "Processing 1 track consisting of " << track.n_points() << " points.\n"; + // Write the track initialization time + write_tc_init_time(nc_out, + init_time_str_var, init_time_ut_var, + track.init()); + // Loop over track points for (int i_point = 0; i_point < track.n_points(); i_point++) { TrackPoint point = track[i_point]; unixtime valid_time = point.valid(); - long valid_yyyymmddhh = unix_to_long_yyyymmddhh(valid_time); mlog << Debug(3) << "[" << i_point+1 << " of " << track.n_points() << "] Processing track point valid at " @@ -687,9 +701,13 @@ void process_fields(const TrackInfoArray& tracks) { write_tc_data(nc_out, tcrmw_grid, i_point, lat_arr_var, lat_arr); write_tc_data(nc_out, tcrmw_grid, i_point, lon_arr_var, lon_arr); - // Write valid time + // Write valid and lead times write_tc_valid_time(nc_out, i_point, - valid_time_var, valid_yyyymmddhh); + valid_time_str_var, valid_time_ut_var, + valid_time); + write_tc_lead_time(nc_out, i_point, + lead_time_str_var, lead_time_sec_var, + point.lead()); for(int i_var = 0; i_var < conf_info.get_n_data(); i_var++) { diff --git a/src/tools/tc_utils/tc_rmw/tc_rmw.h b/src/tools/tc_utils/tc_rmw/tc_rmw.h index 125a6978cb..87e37b74be 100644 --- a/src/tools/tc_utils/tc_rmw/tc_rmw.h +++ b/src/tools/tc_utils/tc_rmw/tc_rmw.h @@ -102,9 +102,14 @@ static netCDF::NcDim range_dim; static netCDF::NcDim azimuth_dim; static netCDF::NcDim pressure_dim; static netCDF::NcDim track_point_dim; +static netCDF::NcVar init_time_str_var; +static netCDF::NcVar init_time_ut_var; +static netCDF::NcVar valid_time_str_var; +static netCDF::NcVar valid_time_ut_var; +static netCDF::NcVar lead_time_str_var; +static netCDF::NcVar lead_time_sec_var; static netCDF::NcVar lat_arr_var; static netCDF::NcVar lon_arr_var; -static netCDF::NcVar valid_time_var; static netCDF::NcVar data_var; static netCDF::NcVar wind_r_var; static netCDF::NcVar wind_a_var; diff --git a/src/tools/tc_utils/tc_rmw/tc_rmw_wind_converter.cc b/src/tools/tc_utils/tc_rmw/tc_rmw_wind_converter.cc index 0755bcdeb5..f94994b314 100644 --- a/src/tools/tc_utils/tc_rmw/tc_rmw_wind_converter.cc +++ b/src/tools/tc_utils/tc_rmw/tc_rmw_wind_converter.cc @@ -226,12 +226,12 @@ void wind_ne_to_ra(const TcrmwGrid& tcrmw_grid, double wind_r; double wind_t; if(is_bad_data(u) || is_bad_data(v)) { - mlog << Debug(3) << "wind_ne_to_ra: latlon:" << lat << "," << lon << " winds are missing\n"; + mlog << Debug(4) << "wind_ne_to_ra: latlon:" << lat << "," << lon << " winds are missing\n"; wind_r = bad_data_double; wind_t = bad_data_double; } else { tcrmw_grid.wind_ne_to_ra(lat, lon, u, v, wind_r, wind_t); - mlog << Debug(3) << "wind_ne_to_ra: latlon:" << lat << "," << lon << " uv:" << u << "," + mlog << Debug(4) << "wind_ne_to_ra: latlon:" << lat << "," << lon << " uv:" << u << "," << v << ", rt:" << wind_r << "," << wind_t <<"\n"; } wind_r_arr[i] = wind_r;