From ebddb56d1acc0d25a10505fa30b5e5beff4ed77c Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Tue, 26 Jan 2021 09:32:54 -0700 Subject: [PATCH 01/86] Start on write netcdf pickle alternative. --- met/data/wrappers/write_pickle_dataplane.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/met/data/wrappers/write_pickle_dataplane.py b/met/data/wrappers/write_pickle_dataplane.py index 079557538b..bacf617ceb 100644 --- a/met/data/wrappers/write_pickle_dataplane.py +++ b/met/data/wrappers/write_pickle_dataplane.py @@ -13,12 +13,16 @@ import pickle import importlib.util import xarray as xr +import netCDF4 as nc print('Python Script:\t', sys.argv[0]) print('User Command:\t', sys.argv[2:]) print('Write Pickle:\t', sys.argv[1]) pickle_filename = sys.argv[1] +netcdf_filename = sys.argv[1] + '.nc4' + +print('Write NetCDF:\t', netcdf_filename) pyembed_module_name = sys.argv[2] sys.argv = sys.argv[2:] @@ -37,6 +41,21 @@ else: met_info = { 'attrs': met_in.attrs, 'met_data': met_in.met_data } +print('write_pickle_dataplane') print(met_info) pickle.dump( met_info, open( pickle_filename, "wb" ) ) + +# write NetCDF file +ds = nc.Dataset(netcdf_filename, 'w') + +nx, ny = met_in.met_data.shape +print(nx, ny) +ds.createDimension('x', nx) +ds.createDimension('y', ny) +ds.createVariable('met_data', met_in.met_data.dtype, ('x', 'y')) + +for attr in met_in.attrs: + attr_val = met_in.attrs[attr] + print(attr, attr_val, type(attr_val)) +ds.close() From 0fdbfdd617bca2b36fee468dc04e9dcb5a96b8dc Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Tue, 26 Jan 2021 09:38:43 -0700 Subject: [PATCH 02/86] Write dataplane array. --- met/data/wrappers/write_pickle_dataplane.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/met/data/wrappers/write_pickle_dataplane.py b/met/data/wrappers/write_pickle_dataplane.py index bacf617ceb..c9ba3a57eb 100644 --- a/met/data/wrappers/write_pickle_dataplane.py +++ b/met/data/wrappers/write_pickle_dataplane.py @@ -53,7 +53,8 @@ print(nx, ny) ds.createDimension('x', nx) ds.createDimension('y', ny) -ds.createVariable('met_data', met_in.met_data.dtype, ('x', 'y')) +dp = ds.createVariable('met_data', met_in.met_data.dtype, ('x', 'y')) +dp[:] = met_in.met_data for attr in met_in.attrs: attr_val = met_in.attrs[attr] From 6d46603cad053d426e22cf3927b7358892ec06aa Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Tue, 26 Jan 2021 09:46:16 -0700 Subject: [PATCH 03/86] Start on read of netcdf as pickle alternative. --- met/data/wrappers/read_pickle_dataplane.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/met/data/wrappers/read_pickle_dataplane.py b/met/data/wrappers/read_pickle_dataplane.py index f97f153df7..58badccdd2 100644 --- a/met/data/wrappers/read_pickle_dataplane.py +++ b/met/data/wrappers/read_pickle_dataplane.py @@ -9,7 +9,16 @@ import sys import numpy as np import pickle +import netCDF4 as nc print('Python Script:\t', sys.argv[0]) print('Load Pickle:\t', sys.argv[1]) met_info = pickle.load(open(sys.argv[1], "rb")) + +netcdf_filename = sys.argv[1] + '.nc4' +print('Read NetCDF:\t', netcdf_filename) + +# read NetCDF file +ds = nc.Dataset(netcdf_filename, 'r') +met_data = ds['met_data'][:] +met_info['met_data'] = met_data From 6fe424551448285776909c07dd1304515fff1912 Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Tue, 2 Feb 2021 11:15:04 -0700 Subject: [PATCH 04/86] Create attribute variables. --- met/data/wrappers/write_pickle_dataplane.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/met/data/wrappers/write_pickle_dataplane.py b/met/data/wrappers/write_pickle_dataplane.py index c9ba3a57eb..a6e5b0a42a 100644 --- a/met/data/wrappers/write_pickle_dataplane.py +++ b/met/data/wrappers/write_pickle_dataplane.py @@ -59,4 +59,6 @@ for attr in met_in.attrs: attr_val = met_in.attrs[attr] print(attr, attr_val, type(attr_val)) + if type(attr_val) == str: + a = ds.createVariable(attr, 'str') ds.close() From 644db219b3a6518d51569b4bfe6bb626a510935f Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Wed, 3 Feb 2021 16:25:46 -0700 Subject: [PATCH 05/86] Use global attributes for met_info attrs. --- met/data/wrappers/write_pickle_dataplane.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/met/data/wrappers/write_pickle_dataplane.py b/met/data/wrappers/write_pickle_dataplane.py index a6e5b0a42a..1764bbbd9e 100644 --- a/met/data/wrappers/write_pickle_dataplane.py +++ b/met/data/wrappers/write_pickle_dataplane.py @@ -53,6 +53,7 @@ print(nx, ny) ds.createDimension('x', nx) ds.createDimension('y', ny) +ds.createDimension('str_dim', 1) dp = ds.createVariable('met_data', met_in.met_data.dtype, ('x', 'y')) dp[:] = met_in.met_data @@ -60,5 +61,5 @@ attr_val = met_in.attrs[attr] print(attr, attr_val, type(attr_val)) if type(attr_val) == str: - a = ds.createVariable(attr, 'str') + setattr(ds, attr, attr_val) ds.close() From 659406273e3cd211578ddda0e8201ca48fdd151b Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Wed, 3 Feb 2021 17:45:08 -0700 Subject: [PATCH 06/86] Add grid structure. --- met/data/wrappers/write_pickle_dataplane.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/met/data/wrappers/write_pickle_dataplane.py b/met/data/wrappers/write_pickle_dataplane.py index 1764bbbd9e..ec234a5c40 100644 --- a/met/data/wrappers/write_pickle_dataplane.py +++ b/met/data/wrappers/write_pickle_dataplane.py @@ -53,7 +53,6 @@ print(nx, ny) ds.createDimension('x', nx) ds.createDimension('y', ny) -ds.createDimension('str_dim', 1) dp = ds.createVariable('met_data', met_in.met_data.dtype, ('x', 'y')) dp[:] = met_in.met_data @@ -62,4 +61,7 @@ print(attr, attr_val, type(attr_val)) if type(attr_val) == str: setattr(ds, attr, attr_val) + if type(attr_val) == dict: + for key in attr_val: + setattr(ds, attr + '.' + key, attr_val[key]) ds.close() From c6667e38e598716282e8b22ec43d6002beb0024c Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Wed, 3 Feb 2021 18:54:31 -0700 Subject: [PATCH 07/86] Read metadata back into met_info.attrs. --- met/data/wrappers/read_pickle_dataplane.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/met/data/wrappers/read_pickle_dataplane.py b/met/data/wrappers/read_pickle_dataplane.py index 58badccdd2..ddbcddce5c 100644 --- a/met/data/wrappers/read_pickle_dataplane.py +++ b/met/data/wrappers/read_pickle_dataplane.py @@ -21,4 +21,11 @@ # read NetCDF file ds = nc.Dataset(netcdf_filename, 'r') met_data = ds['met_data'][:] +grid = {} +for attr, attr_val in ds.__dict__.items(): + print(attr, attr_val) + if 'grid' in attr: + grid_attr = attr.split('.')[1] + grid[grid_attr] = attr_val +print(grid) met_info['met_data'] = met_data From 1e6eb9ee260658c2e2d0098ce93d91f193698f7f Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Wed, 3 Feb 2021 19:20:17 -0700 Subject: [PATCH 08/86] Convert grid.nx and grid.ny to int. --- met/data/wrappers/read_pickle_dataplane.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/met/data/wrappers/read_pickle_dataplane.py b/met/data/wrappers/read_pickle_dataplane.py index ddbcddce5c..e3b1b03768 100644 --- a/met/data/wrappers/read_pickle_dataplane.py +++ b/met/data/wrappers/read_pickle_dataplane.py @@ -13,7 +13,8 @@ print('Python Script:\t', sys.argv[0]) print('Load Pickle:\t', sys.argv[1]) -met_info = pickle.load(open(sys.argv[1], "rb")) +# met_info = pickle.load(open(sys.argv[1], "rb")) +met_info = {} netcdf_filename = sys.argv[1] + '.nc4' print('Read NetCDF:\t', netcdf_filename) @@ -21,11 +22,16 @@ # read NetCDF file ds = nc.Dataset(netcdf_filename, 'r') met_data = ds['met_data'][:] +met_attrs = {} grid = {} for attr, attr_val in ds.__dict__.items(): - print(attr, attr_val) if 'grid' in attr: grid_attr = attr.split('.')[1] grid[grid_attr] = attr_val -print(grid) + else: + met_attrs[attr] = attr_val +grid['nx'], grid['ny'] = int(grid['nx']), int(grid['ny']) +met_attrs['grid'] = grid met_info['met_data'] = met_data +met_info['attrs'] = met_attrs +print(met_info) From e0055854e870f4d6a7c4c0ad3bd0e3220b69434c Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Wed, 3 Feb 2021 19:27:04 -0700 Subject: [PATCH 09/86] Rename _name key to name. --- met/data/wrappers/read_pickle_dataplane.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/met/data/wrappers/read_pickle_dataplane.py b/met/data/wrappers/read_pickle_dataplane.py index e3b1b03768..330ed740a0 100644 --- a/met/data/wrappers/read_pickle_dataplane.py +++ b/met/data/wrappers/read_pickle_dataplane.py @@ -32,6 +32,8 @@ met_attrs[attr] = attr_val grid['nx'], grid['ny'] = int(grid['nx']), int(grid['ny']) met_attrs['grid'] = grid +met_attrs['name'] = met_attrs['_name'] +del met_attrs['_name'] met_info['met_data'] = met_data met_info['attrs'] = met_attrs print(met_info) From ab986caf6564b54067b0caa91c0ae78c2d9bb5c9 Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Thu, 4 Feb 2021 10:56:14 -0700 Subject: [PATCH 10/86] Removed pickle write. --- met/data/wrappers/read_pickle_dataplane.py | 5 +++-- met/data/wrappers/write_pickle_dataplane.py | 11 +++++++---- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/met/data/wrappers/read_pickle_dataplane.py b/met/data/wrappers/read_pickle_dataplane.py index 330ed740a0..dabc2f51e3 100644 --- a/met/data/wrappers/read_pickle_dataplane.py +++ b/met/data/wrappers/read_pickle_dataplane.py @@ -12,11 +12,12 @@ import netCDF4 as nc print('Python Script:\t', sys.argv[0]) -print('Load Pickle:\t', sys.argv[1]) +# print('Load Pickle:\t', sys.argv[1]) # met_info = pickle.load(open(sys.argv[1], "rb")) met_info = {} -netcdf_filename = sys.argv[1] + '.nc4' +# netcdf_filename = sys.argv[1] + '.nc4' +netcdf_filename = sys.argv[1] print('Read NetCDF:\t', netcdf_filename) # read NetCDF file diff --git a/met/data/wrappers/write_pickle_dataplane.py b/met/data/wrappers/write_pickle_dataplane.py index ec234a5c40..5794466064 100644 --- a/met/data/wrappers/write_pickle_dataplane.py +++ b/met/data/wrappers/write_pickle_dataplane.py @@ -19,8 +19,9 @@ print('User Command:\t', sys.argv[2:]) print('Write Pickle:\t', sys.argv[1]) -pickle_filename = sys.argv[1] -netcdf_filename = sys.argv[1] + '.nc4' +# pickle_filename = sys.argv[1] +# netcdf_filename = sys.argv[1] + '.nc4' +netcdf_filename = sys.argv[1] print('Write NetCDF:\t', netcdf_filename) @@ -44,13 +45,13 @@ print('write_pickle_dataplane') print(met_info) -pickle.dump( met_info, open( pickle_filename, "wb" ) ) +# pickle.dump( met_info, open( pickle_filename, "wb" ) ) # write NetCDF file ds = nc.Dataset(netcdf_filename, 'w') nx, ny = met_in.met_data.shape -print(nx, ny) +# print(nx, ny) ds.createDimension('x', nx) ds.createDimension('y', ny) dp = ds.createVariable('met_data', met_in.met_data.dtype, ('x', 'y')) @@ -59,6 +60,8 @@ for attr in met_in.attrs: attr_val = met_in.attrs[attr] print(attr, attr_val, type(attr_val)) + if attr == 'name': + setattr(ds, '_name', attr_val) if type(attr_val) == str: setattr(ds, attr, attr_val) if type(attr_val) == dict: From 760b6904f7fc7d6cc8da776dc89023280d9fa639 Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Fri, 5 Feb 2021 08:42:44 -0700 Subject: [PATCH 11/86] Fixed write_pickle_dataplane to work for both numpy and xarray. --- met/data/wrappers/write_pickle_dataplane.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/met/data/wrappers/write_pickle_dataplane.py b/met/data/wrappers/write_pickle_dataplane.py index 5794466064..fdb06cbf3f 100644 --- a/met/data/wrappers/write_pickle_dataplane.py +++ b/met/data/wrappers/write_pickle_dataplane.py @@ -57,8 +57,8 @@ dp = ds.createVariable('met_data', met_in.met_data.dtype, ('x', 'y')) dp[:] = met_in.met_data -for attr in met_in.attrs: - attr_val = met_in.attrs[attr] +for attr in met_info['attrs']: + attr_val = met_info['attrs'][attr] print(attr, attr_val, type(attr_val)) if attr == 'name': setattr(ds, '_name', attr_val) From 791ebf05559e4d14235a7f1c439884276e60b088 Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Fri, 5 Feb 2021 08:49:25 -0700 Subject: [PATCH 12/86] Use items() to iterate of key, value attrs. --- met/data/wrappers/write_pickle_dataplane.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/met/data/wrappers/write_pickle_dataplane.py b/met/data/wrappers/write_pickle_dataplane.py index fdb06cbf3f..44b57fea92 100644 --- a/met/data/wrappers/write_pickle_dataplane.py +++ b/met/data/wrappers/write_pickle_dataplane.py @@ -17,10 +17,7 @@ print('Python Script:\t', sys.argv[0]) print('User Command:\t', sys.argv[2:]) -print('Write Pickle:\t', sys.argv[1]) -# pickle_filename = sys.argv[1] -# netcdf_filename = sys.argv[1] + '.nc4' netcdf_filename = sys.argv[1] print('Write NetCDF:\t', netcdf_filename) @@ -57,8 +54,7 @@ dp = ds.createVariable('met_data', met_in.met_data.dtype, ('x', 'y')) dp[:] = met_in.met_data -for attr in met_info['attrs']: - attr_val = met_info['attrs'][attr] +for attr, attr_val in met_info['attrs'].items(): print(attr, attr_val, type(attr_val)) if attr == 'name': setattr(ds, '_name', attr_val) From c5f17e8d50e931c28895cf57005cadfa8d7dd0ba Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Sat, 13 Feb 2021 10:50:12 -0700 Subject: [PATCH 13/86] Write temporary text file. --- met/data/wrappers/write_pickle_point.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/met/data/wrappers/write_pickle_point.py b/met/data/wrappers/write_pickle_point.py index 1f5ee35bdb..907c0e005d 100644 --- a/met/data/wrappers/write_pickle_point.py +++ b/met/data/wrappers/write_pickle_point.py @@ -18,6 +18,7 @@ print('Write Pickle:\t', sys.argv[1]) pickle_filename = sys.argv[1] +tmp_filename = pickle_filename + '.txt' pyembed_module_name = sys.argv[2] sys.argv = sys.argv[2:] @@ -28,4 +29,8 @@ met_in = importlib.util.module_from_spec(spec) spec.loader.exec_module(met_in) +f = open(tmp_filename, 'w') +for line in met_in.point_data: + f.write(str(line) + '\n') + pickle.dump( met_in.point_data, open( pickle_filename, "wb" ) ) From d6142e8836d79ab13330b56e8a87ff30eaf0e24c Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Wed, 17 Feb 2021 08:53:38 -0700 Subject: [PATCH 14/86] Renamed scripts. --- ...{read_pickle_dataplane.py => read_tmp_dataplane.py} | 8 ++------ ...rite_pickle_dataplane.py => write_tmp_dataplane.py} | 10 +++------- 2 files changed, 5 insertions(+), 13 deletions(-) rename met/data/wrappers/{read_pickle_dataplane.py => read_tmp_dataplane.py} (77%) rename met/data/wrappers/{write_pickle_dataplane.py => write_tmp_dataplane.py} (87%) diff --git a/met/data/wrappers/read_pickle_dataplane.py b/met/data/wrappers/read_tmp_dataplane.py similarity index 77% rename from met/data/wrappers/read_pickle_dataplane.py rename to met/data/wrappers/read_tmp_dataplane.py index dabc2f51e3..e5fb0d6cb0 100644 --- a/met/data/wrappers/read_pickle_dataplane.py +++ b/met/data/wrappers/read_tmp_dataplane.py @@ -1,22 +1,18 @@ ######################################################################## # -# Reads temporary pickle file into memory. +# Reads temporary file into memory. # -# usage: /path/to/python read_pickle_dataplane.py pickle.tmp +# usage: /path/to/python read_tmp_dataplane.py dataplane.tmp # ######################################################################## import sys import numpy as np -import pickle import netCDF4 as nc print('Python Script:\t', sys.argv[0]) -# print('Load Pickle:\t', sys.argv[1]) -# met_info = pickle.load(open(sys.argv[1], "rb")) met_info = {} -# netcdf_filename = sys.argv[1] + '.nc4' netcdf_filename = sys.argv[1] print('Read NetCDF:\t', netcdf_filename) diff --git a/met/data/wrappers/write_pickle_dataplane.py b/met/data/wrappers/write_tmp_dataplane.py similarity index 87% rename from met/data/wrappers/write_pickle_dataplane.py rename to met/data/wrappers/write_tmp_dataplane.py index 44b57fea92..985535da5f 100644 --- a/met/data/wrappers/write_pickle_dataplane.py +++ b/met/data/wrappers/write_tmp_dataplane.py @@ -3,14 +3,13 @@ # Adapted from a script provided by George McCabe # Adapted by Randy Bullock # -# usage: /path/to/python write_pickle_dataplane.py \ -# pickle_output_filename .py +# usage: /path/to/python write_tmp_dataplane.py \ +# tmp_output_filename .py # ######################################################################## import os import sys -import pickle import importlib.util import xarray as xr import netCDF4 as nc @@ -39,16 +38,13 @@ else: met_info = { 'attrs': met_in.attrs, 'met_data': met_in.met_data } -print('write_pickle_dataplane') +print('write_tmp_dataplane') print(met_info) -# pickle.dump( met_info, open( pickle_filename, "wb" ) ) - # write NetCDF file ds = nc.Dataset(netcdf_filename, 'w') nx, ny = met_in.met_data.shape -# print(nx, ny) ds.createDimension('x', nx) ds.createDimension('y', ny) dp = ds.createVariable('met_data', met_in.met_data.dtype, ('x', 'y')) From b39ca2888eafdd227e2757f73bd3d22263b0b382 Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Wed, 17 Feb 2021 08:57:06 -0700 Subject: [PATCH 15/86] Changed script names in Makefile.am. --- met/data/wrappers/Makefile.am | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/met/data/wrappers/Makefile.am b/met/data/wrappers/Makefile.am index d8a6d5a026..821987b273 100644 --- a/met/data/wrappers/Makefile.am +++ b/met/data/wrappers/Makefile.am @@ -23,8 +23,8 @@ wrappersdir = $(pkgdatadir)/wrappers wrappers_DATA = \ generic_python.py \ generic_pickle.py \ - read_pickle_dataplane.py \ - write_pickle_dataplane.py \ + read_tmp_dataplane.py \ + write_tmp_dataplane.py \ write_pickle_mpr.py \ write_pickle_point.py From 7cc2d7779306db1cac55d03c11a67c15b836f7f5 Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Wed, 17 Feb 2021 09:09:57 -0700 Subject: [PATCH 16/86] Replaced pickle with tmp_nc. --- .../vx_data2d_python/python_dataplane.cc | 54 +++++++++---------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/met/src/libcode/vx_data2d_python/python_dataplane.cc b/met/src/libcode/vx_data2d_python/python_dataplane.cc index d5ace046d0..c1a5a3a163 100644 --- a/met/src/libcode/vx_data2d_python/python_dataplane.cc +++ b/met/src/libcode/vx_data2d_python/python_dataplane.cc @@ -31,15 +31,15 @@ GlobalPython GP; // this needs external linkage static const char * user_ppath = 0; -static const char write_pickle [] = "MET_BASE/wrappers/write_pickle_dataplane.py"; +static const char write_tmp_nc [] = "MET_BASE/wrappers/write_tmp_nc_dataplane.py"; -static const char read_pickle [] = "read_pickle_dataplane"; // NO ".py" suffix +static const char read_tmp_nc [] = "read_tmp_nc_dataplane"; // NO ".py" suffix -static const char pickle_base_name [] = "tmp_met_pickle"; +static const char tmp_nc_base_name [] = "tmp_met_nc"; -static const char pickle_var_name [] = "met_info"; +static const char tmp_nc_var_name [] = "met_info"; -static const char pickle_file_var_name [] = "pickle_filename"; +static const char tmp_nc_file_var_name [] = "tmp_nc_filename"; //////////////////////////////////////////////////////////////////////// @@ -51,7 +51,7 @@ static bool straight_python_dataplane(const char * script_name, Grid & met_grid_out, VarInfoPython &vinfo); -static bool pickle_dataplane(const char * script_name, +static bool tmp_nc_dataplane(const char * script_name, int script_argc, char ** script_argv, const bool use_xarray, DataPlane & met_dp_out, Grid & met_grid_out, VarInfoPython &vinfo); @@ -69,9 +69,9 @@ bool python_dataplane(const char * user_script_name, bool status = false; -if ( (user_ppath = getenv(user_python_path_env)) != 0 ) { // do_pickle = true; +if ( (user_ppath = getenv(user_python_path_env)) != 0 ) { // do_tmp_nc = true; - status = pickle_dataplane(user_script_name, + status = tmp_nc_dataplane(user_script_name, user_script_argc, user_script_argv, use_xarray, met_dp_out, met_grid_out, vinfo); @@ -276,7 +276,7 @@ return ( true ); //////////////////////////////////////////////////////////////////////// -bool pickle_dataplane(const char * user_script_name, +bool tmp_nc_dataplane(const char * user_script_name, int user_script_argc, char ** user_script_argv, const bool use_xarray, DataPlane & met_dp_out, Grid & met_grid_out, VarInfoPython &vinfo) @@ -287,7 +287,7 @@ int j; int status; ConcatString command; ConcatString path; -ConcatString pickle_path; +ConcatString tmp_nc_path; const char * tmp_dir = 0; Wchar_Argv wa; @@ -301,14 +301,14 @@ if ( ! tmp_dir ) tmp_dir = default_tmp_dir; path << cs_erase << tmp_dir << '/' - << pickle_base_name; + << tmp_nc_base_name; -pickle_path = make_temp_file_name(path.text(), 0); +tmp_nc_path = make_temp_file_name(path.text(), 0); command << cs_erase << user_ppath << ' ' // user's path to python - << replace_path(write_pickle) << ' ' // write_pickle.py - << pickle_path << ' ' // pickle output filename + << replace_path(write_tmp_nc) << ' ' // write_tmp_nc.py + << tmp_nc_path << ' ' // tmp_nc output filename << user_script_name; // user's script name for (j=1; j " + mlog << Error << "\ntmp_nc_dataplane() -> " << "command \"" << command.text() << "\" failed ... status = " << status << "\n\n"; @@ -346,15 +346,15 @@ if ( PyErr_Occurred() ) { PyErr_Print(); - mlog << Warning << "\npickle_dataplane() -> " + mlog << Warning << "\ntmp_nc_dataplane() -> " << "an error occurred initializing python\n\n"; return ( false ); } -mlog << Debug(3) << "Reading temporary pickle file: " - << pickle_path << "\n"; +mlog << Debug(3) << "Reading temporary tmp_nc file: " + << tmp_nc_path << "\n"; // // set the arguments @@ -362,9 +362,9 @@ mlog << Debug(3) << "Reading temporary pickle file: " StringArray a; -a.add(read_pickle); +a.add(read_tmp_nc); -a.add(pickle_path); +a.add(tmp_nc_path); wa.set(a); @@ -374,7 +374,7 @@ PySys_SetArgv (wa.wargc(), wa.wargv()); // import the python wrapper script as a module // -path = get_short_name(read_pickle); +path = get_short_name(read_tmp_nc); PyObject * module_obj = PyImport_ImportModule (path.text()); @@ -392,7 +392,7 @@ if ( PyErr_Occurred() ) { PyErr_Print(); - mlog << Warning << "\npickle_dataplane() -> " + mlog << Warning << "\ntmp_nc_dataplane() -> " << "an error occurred importing module " << '\"' << path << "\"\n\n"; @@ -402,7 +402,7 @@ if ( PyErr_Occurred() ) { if ( ! module_obj ) { - mlog << Warning << "\npickle_dataplane() -> " + mlog << Warning << "\ntmp_nc_dataplane() -> " << "error running python script\n\n"; return ( false ); @@ -410,7 +410,7 @@ if ( ! module_obj ) { } // - // read the pickle file + // read the tmp_nc file // // @@ -419,13 +419,13 @@ if ( ! module_obj ) { PyObject * module_dict_obj = PyModule_GetDict (module_obj); -PyObject * key_obj = PyUnicode_FromString (pickle_var_name); +PyObject * key_obj = PyUnicode_FromString (tmp_nc_var_name); PyObject * data_obj = PyDict_GetItem (module_dict_obj, key_obj); if ( ! data_obj || ! PyDict_Check(data_obj) ) { - mlog << Error << "\npickle_dataplane() -> " + mlog << Error << "\ntmp_nc_dataplane() -> " << "bad dict object\n\n"; exit ( 1 ); @@ -450,7 +450,7 @@ dataplane_from_numpy_array(np, attrs_dict_obj, met_dp_out, met_grid_out, vinfo); // cleanup // -remove_temp_file(pickle_path); +remove_temp_file(tmp_nc_path); // // done From df0db18e04a59280688781533b5f3092f4171091 Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Wed, 17 Feb 2021 09:28:59 -0700 Subject: [PATCH 17/86] Fixed wrapper script names. --- met/src/libcode/vx_data2d_python/python_dataplane.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/met/src/libcode/vx_data2d_python/python_dataplane.cc b/met/src/libcode/vx_data2d_python/python_dataplane.cc index c1a5a3a163..8f70af5109 100644 --- a/met/src/libcode/vx_data2d_python/python_dataplane.cc +++ b/met/src/libcode/vx_data2d_python/python_dataplane.cc @@ -31,9 +31,9 @@ GlobalPython GP; // this needs external linkage static const char * user_ppath = 0; -static const char write_tmp_nc [] = "MET_BASE/wrappers/write_tmp_nc_dataplane.py"; +static const char write_tmp_nc [] = "MET_BASE/wrappers/write_tmp_dataplane.py"; -static const char read_tmp_nc [] = "read_tmp_nc_dataplane"; // NO ".py" suffix +static const char read_tmp_nc [] = "read_tmp_dataplane"; // NO ".py" suffix static const char tmp_nc_base_name [] = "tmp_met_nc"; From 044c704a2df6a953366d61a48bdccba22aae906a Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Wed, 17 Feb 2021 10:28:33 -0700 Subject: [PATCH 18/86] Test for attrs in met_in.met_data. --- met/data/wrappers/write_tmp_dataplane.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/met/data/wrappers/write_tmp_dataplane.py b/met/data/wrappers/write_tmp_dataplane.py index 985535da5f..f7ff2d7559 100644 --- a/met/data/wrappers/write_tmp_dataplane.py +++ b/met/data/wrappers/write_tmp_dataplane.py @@ -11,7 +11,6 @@ import os import sys import importlib.util -import xarray as xr import netCDF4 as nc print('Python Script:\t', sys.argv[0]) @@ -33,10 +32,12 @@ met_in = importlib.util.module_from_spec(spec) spec.loader.exec_module(met_in) -if isinstance(met_in.met_data, xr.DataArray): - met_info = { 'attrs': met_in.met_data.attrs, 'met_data': met_in.met_data } +met_info = {'met_data': met_in.met_data} +if hasattr(met_in.met_data, 'attrs') and met_in.met_data.attrs: + attrs = met_in.met_data.attrs else: - met_info = { 'attrs': met_in.attrs, 'met_data': met_in.met_data } + attrs = met_in.attrs +met_info['attrs'] = attrs print('write_tmp_dataplane') print(met_info) From d798e9dc0f098e481580dc1ce60e92a4d8179c0f Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Thu, 18 Feb 2021 11:48:11 -0700 Subject: [PATCH 19/86] Initial version of read_tmp_point module. --- met/data/wrappers/read_tmp_point.py | 43 +++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 met/data/wrappers/read_tmp_point.py diff --git a/met/data/wrappers/read_tmp_point.py b/met/data/wrappers/read_tmp_point.py new file mode 100644 index 0000000000..0b3214de14 --- /dev/null +++ b/met/data/wrappers/read_tmp_point.py @@ -0,0 +1,43 @@ +""" +Module Name: read_tmp_point.py + +Read MET Point Observations from a text file created by write_tmp_point.py script. + + Message_Type, Station_ID, Valid_Time, Lat, Lon, Elevation, + GRIB_Code or Variable_Name, Level, Height, QC_String, Observation_Value + +Version Date +1.0.0 2021/02/18 David Fillmore Initial version +""" + +__author__ = 'David Fillmore' +__version__ = '1.0.0' +__email__ = 'met_help@ucar.edu' + +import argparse + +def read_tmp_point(filename): + """ + Arguments: + filename (string): temporary file created by write_tmp_point.py + + Returns: + (list of lists): point data + """ + f = open(filename, 'r') + lines = f.readlines() + f.close() + + point_data = [eval(line.strip('\n')) for line in lines] + + return point_data + +if __name__ == '__main__': + """ + Parse command line arguments + """ + parser = argparse.ArgumentParser() + parser.add_argument('--filename', type=str) + args = parser.parse_args() + + point_data = read_tmp_point(args.filename) From 8116e751090c824591fb8528f4ddd7123df43674 Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Thu, 18 Feb 2021 11:55:05 -0700 Subject: [PATCH 20/86] Added read_tmp_point.py to install list. --- met/data/wrappers/Makefile.am | 1 + 1 file changed, 1 insertion(+) diff --git a/met/data/wrappers/Makefile.am b/met/data/wrappers/Makefile.am index 821987b273..cb35df1dae 100644 --- a/met/data/wrappers/Makefile.am +++ b/met/data/wrappers/Makefile.am @@ -26,6 +26,7 @@ wrappers_DATA = \ read_tmp_dataplane.py \ write_tmp_dataplane.py \ write_pickle_mpr.py \ + read_tmp_point.py \ write_pickle_point.py EXTRA_DIST = ${wrappers_DATA} From 7b5771574f7b2f3c414ecbe27842b8405c280369 Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Thu, 18 Feb 2021 12:12:54 -0700 Subject: [PATCH 21/86] Start on Python3_Script::read_tmp_point. --- met/src/libcode/vx_python3_utils/python3_script.cc | 12 ++++++++++++ met/src/libcode/vx_python3_utils/python3_script.h | 1 + 2 files changed, 13 insertions(+) diff --git a/met/src/libcode/vx_python3_utils/python3_script.cc b/met/src/libcode/vx_python3_utils/python3_script.cc index 56837b65d0..71c994e40f 100644 --- a/met/src/libcode/vx_python3_utils/python3_script.cc +++ b/met/src/libcode/vx_python3_utils/python3_script.cc @@ -234,6 +234,18 @@ return; } +//////////////////////////////////////////////////////////////////////// + +void Python3_Script::read_tmp_point(const char * tmp_filename) const + +{ + +mlog << Debug(3) << "Reading temporary point ascii file: " + << tmp_filename << "\n"; + +ConcatString command; + +} //////////////////////////////////////////////////////////////////////// diff --git a/met/src/libcode/vx_python3_utils/python3_script.h b/met/src/libcode/vx_python3_utils/python3_script.h index 20069762f9..312e5e0fb1 100644 --- a/met/src/libcode/vx_python3_utils/python3_script.h +++ b/met/src/libcode/vx_python3_utils/python3_script.h @@ -77,6 +77,7 @@ class Python3_Script { void read_pickle (const char * variable_name, const char * pickle_filename) const; + void read_tmp_point (const char * tmp_filename) const; }; From 5502da9fea63afbe147ca8944be723514667c7fc Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Thu, 18 Feb 2021 13:07:51 -0700 Subject: [PATCH 22/86] Write MPR tmp ascii file. --- met/data/wrappers/write_pickle_mpr.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/met/data/wrappers/write_pickle_mpr.py b/met/data/wrappers/write_pickle_mpr.py index 2e3f2d0d04..efde687bf7 100644 --- a/met/data/wrappers/write_pickle_mpr.py +++ b/met/data/wrappers/write_pickle_mpr.py @@ -18,6 +18,7 @@ print('Write Pickle:\t', sys.argv[1]) pickle_filename = sys.argv[1] +tmp_filename = pickle_filename + '.txt' pyembed_module_name = sys.argv[2] sys.argv = sys.argv[2:] @@ -28,6 +29,8 @@ met_in = importlib.util.module_from_spec(spec) spec.loader.exec_module(met_in) -print(met_in) +f = open(tmp_filename, 'w') +for line in met_in.mpr_data: + f.write(str(line) + '\n') pickle.dump( met_in.mpr_data, open( pickle_filename, "wb" ) ) From 961b4fc1bab42bc8456129cf3e7c732ea68e8b79 Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Thu, 18 Feb 2021 13:13:32 -0700 Subject: [PATCH 23/86] Renamed to read_tmp_ascii to use for point point and MPR. --- .../{read_tmp_point.py => read_tmp_ascii.py} | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) rename met/data/wrappers/{read_tmp_point.py => read_tmp_ascii.py} (73%) diff --git a/met/data/wrappers/read_tmp_point.py b/met/data/wrappers/read_tmp_ascii.py similarity index 73% rename from met/data/wrappers/read_tmp_point.py rename to met/data/wrappers/read_tmp_ascii.py index 0b3214de14..126150b168 100644 --- a/met/data/wrappers/read_tmp_point.py +++ b/met/data/wrappers/read_tmp_ascii.py @@ -1,8 +1,10 @@ """ -Module Name: read_tmp_point.py +Module Name: read_tmp_ascii.py -Read MET Point Observations from a text file created by write_tmp_point.py script. +Read MET Point Observations from a text file created by write_tmp_point.py script + or MET Matched Pairs from a text file created by write_tmp_mpr.py script +Point observation format: Message_Type, Station_ID, Valid_Time, Lat, Lon, Elevation, GRIB_Code or Variable_Name, Level, Height, QC_String, Observation_Value @@ -16,7 +18,7 @@ import argparse -def read_tmp_point(filename): +def read_tmp_ascii(filename): """ Arguments: filename (string): temporary file created by write_tmp_point.py @@ -28,9 +30,9 @@ def read_tmp_point(filename): lines = f.readlines() f.close() - point_data = [eval(line.strip('\n')) for line in lines] + data = [eval(line.strip('\n')) for line in lines] - return point_data + return data if __name__ == '__main__': """ @@ -40,4 +42,4 @@ def read_tmp_point(filename): parser.add_argument('--filename', type=str) args = parser.parse_args() - point_data = read_tmp_point(args.filename) + data = read_tmp_ascii(args.filename) From 4c0963ddf1efc9e4bc7927f585e59fa7458f871d Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Thu, 18 Feb 2021 13:17:39 -0700 Subject: [PATCH 24/86] Renamed to read_tmp_ascii to use for point point and MPR. --- met/data/wrappers/Makefile.am | 2 +- met/src/libcode/vx_python3_utils/python3_script.cc | 4 ++-- met/src/libcode/vx_python3_utils/python3_script.h | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/met/data/wrappers/Makefile.am b/met/data/wrappers/Makefile.am index cb35df1dae..a8f464313f 100644 --- a/met/data/wrappers/Makefile.am +++ b/met/data/wrappers/Makefile.am @@ -26,7 +26,7 @@ wrappers_DATA = \ read_tmp_dataplane.py \ write_tmp_dataplane.py \ write_pickle_mpr.py \ - read_tmp_point.py \ + read_tmp_ascii.py \ write_pickle_point.py EXTRA_DIST = ${wrappers_DATA} diff --git a/met/src/libcode/vx_python3_utils/python3_script.cc b/met/src/libcode/vx_python3_utils/python3_script.cc index 71c994e40f..fdef49a066 100644 --- a/met/src/libcode/vx_python3_utils/python3_script.cc +++ b/met/src/libcode/vx_python3_utils/python3_script.cc @@ -236,11 +236,11 @@ return; //////////////////////////////////////////////////////////////////////// -void Python3_Script::read_tmp_point(const char * tmp_filename) const +void Python3_Script::read_tmp_ascii(const char * tmp_filename) const { -mlog << Debug(3) << "Reading temporary point ascii file: " +mlog << Debug(3) << "Reading temporary ascii file: " << tmp_filename << "\n"; ConcatString command; diff --git a/met/src/libcode/vx_python3_utils/python3_script.h b/met/src/libcode/vx_python3_utils/python3_script.h index 312e5e0fb1..fe199058a5 100644 --- a/met/src/libcode/vx_python3_utils/python3_script.h +++ b/met/src/libcode/vx_python3_utils/python3_script.h @@ -77,7 +77,7 @@ class Python3_Script { void read_pickle (const char * variable_name, const char * pickle_filename) const; - void read_tmp_point (const char * tmp_filename) const; + void read_tmp_ascii (const char * tmp_filename) const; }; From 91122be96e435b0690dae5592ca9dd489c052a99 Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Fri, 19 Feb 2021 13:07:23 -0700 Subject: [PATCH 25/86] Define Python3_Script::import_read_tmp_ascii_py. --- met/src/libcode/vx_python3_utils/python3_script.cc | 10 ++++++++++ met/src/libcode/vx_python3_utils/python3_script.h | 2 ++ 2 files changed, 12 insertions(+) diff --git a/met/src/libcode/vx_python3_utils/python3_script.cc b/met/src/libcode/vx_python3_utils/python3_script.cc index fdef49a066..0dd3464016 100644 --- a/met/src/libcode/vx_python3_utils/python3_script.cc +++ b/met/src/libcode/vx_python3_utils/python3_script.cc @@ -27,6 +27,8 @@ using namespace std; static const char sq = '\''; // single quote +static const char read_tmp_ascii_py [] = "MET_BASE/wrappers/read_tmp_ascii.py"; + //////////////////////////////////////////////////////////////////////// @@ -236,6 +238,14 @@ return; //////////////////////////////////////////////////////////////////////// +void Python3_Script::import_read_tmp_ascii_py(void) const + +{ + +} + +//////////////////////////////////////////////////////////////////////// + void Python3_Script::read_tmp_ascii(const char * tmp_filename) const { diff --git a/met/src/libcode/vx_python3_utils/python3_script.h b/met/src/libcode/vx_python3_utils/python3_script.h index fe199058a5..7a8aec210e 100644 --- a/met/src/libcode/vx_python3_utils/python3_script.h +++ b/met/src/libcode/vx_python3_utils/python3_script.h @@ -77,6 +77,8 @@ class Python3_Script { void read_pickle (const char * variable_name, const char * pickle_filename) const; + void import_read_tmp_ascii_py (void) const; + void read_tmp_ascii (const char * tmp_filename) const; }; From fef8484cfb23c63af53e869667e12a82d76c6f73 Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Fri, 19 Feb 2021 13:20:31 -0700 Subject: [PATCH 26/86] Call Python3_Script::import_read_tmp_ascii_py. --- met/src/libcode/vx_python3_utils/python3_script.cc | 7 +++++++ met/src/tools/other/ascii2nc/python_handler.cc | 2 ++ 2 files changed, 9 insertions(+) diff --git a/met/src/libcode/vx_python3_utils/python3_script.cc b/met/src/libcode/vx_python3_utils/python3_script.cc index 0dd3464016..99c45b3a89 100644 --- a/met/src/libcode/vx_python3_utils/python3_script.cc +++ b/met/src/libcode/vx_python3_utils/python3_script.cc @@ -242,6 +242,13 @@ void Python3_Script::import_read_tmp_ascii_py(void) const { +ConcatString module; + +module << cs_erase + << replace_path(read_tmp_ascii_py); + +mlog << Debug(3) << "Importing " << module << "\n"; + } //////////////////////////////////////////////////////////////////////// diff --git a/met/src/tools/other/ascii2nc/python_handler.cc b/met/src/tools/other/ascii2nc/python_handler.cc index e2733a605e..76e9dce677 100644 --- a/met/src/tools/other/ascii2nc/python_handler.cc +++ b/met/src/tools/other/ascii2nc/python_handler.cc @@ -379,6 +379,8 @@ wrapper = generic_pickle_wrapper; Python3_Script script(wrapper.text()); +script.import_read_tmp_ascii_py(); + script.read_pickle(list_name, pickle_path.text()); PyObject * obj = script.lookup(list_name); From 93e97624ed098724c0a1c3ab87d000d3e742353a Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Fri, 19 Feb 2021 18:04:57 -0700 Subject: [PATCH 27/86] Append MET_BASE/wrappers to sys.path. --- met/src/libcode/vx_python3_utils/python3_script.cc | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/met/src/libcode/vx_python3_utils/python3_script.cc b/met/src/libcode/vx_python3_utils/python3_script.cc index 99c45b3a89..1364dc7e3c 100644 --- a/met/src/libcode/vx_python3_utils/python3_script.cc +++ b/met/src/libcode/vx_python3_utils/python3_script.cc @@ -247,6 +247,19 @@ ConcatString module; module << cs_erase << replace_path(read_tmp_ascii_py); +ConcatString command; + +run_python_string("import sys"); + +command << cs_erase + << "sys.path.append(\"" + << module.dirname().c_str() + << "\")"; + +mlog << Debug(3) << command << "\n"; + +// run_python_string(command.text()); + mlog << Debug(3) << "Importing " << module << "\n"; } From 44d832872133db2a77dd1ed2d3e7ed3e690e3add Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Fri, 19 Feb 2021 18:26:10 -0700 Subject: [PATCH 28/86] Finished implementation of Python3_Script::import_read_tmp_ascii_py. --- met/src/libcode/vx_python3_utils/python3_script.cc | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/met/src/libcode/vx_python3_utils/python3_script.cc b/met/src/libcode/vx_python3_utils/python3_script.cc index 1364dc7e3c..36cade0317 100644 --- a/met/src/libcode/vx_python3_utils/python3_script.cc +++ b/met/src/libcode/vx_python3_utils/python3_script.cc @@ -258,9 +258,15 @@ command << cs_erase mlog << Debug(3) << command << "\n"; -// run_python_string(command.text()); +run_python_string(command.text()); + +mlog << Debug(2) << "Importing " << module << "\n"; + +command << cs_erase << "import read_tmp_ascii"; -mlog << Debug(3) << "Importing " << module << "\n"; +mlog << Debug(3) << command << "\n"; + +run_python_string(command.text()); } From 3953aba4190f0d50cf45512931d7ac4dc10eef7b Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Fri, 19 Feb 2021 19:30:56 -0700 Subject: [PATCH 29/86] Call Python3_Script::read_tmp_ascii in python_handler. --- met/src/libcode/vx_python3_utils/python3_script.cc | 10 +++++++++- met/src/tools/other/ascii2nc/python_handler.cc | 5 +++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/met/src/libcode/vx_python3_utils/python3_script.cc b/met/src/libcode/vx_python3_utils/python3_script.cc index 36cade0317..bb8c40f44e 100644 --- a/met/src/libcode/vx_python3_utils/python3_script.cc +++ b/met/src/libcode/vx_python3_utils/python3_script.cc @@ -276,11 +276,19 @@ void Python3_Script::read_tmp_ascii(const char * tmp_filename) const { -mlog << Debug(3) << "Reading temporary ascii file: " +mlog << Debug(2) << "Reading temporary ascii file: " << tmp_filename << "\n"; ConcatString command; +command << "read_tmp_ascii.read_tmp_ascii(\"" + << tmp_filename + << "\")"; + +mlog << Debug(3) << command << "\n"; + +run_python_string(command.text()); + } //////////////////////////////////////////////////////////////////////// diff --git a/met/src/tools/other/ascii2nc/python_handler.cc b/met/src/tools/other/ascii2nc/python_handler.cc index 76e9dce677..35e697045e 100644 --- a/met/src/tools/other/ascii2nc/python_handler.cc +++ b/met/src/tools/other/ascii2nc/python_handler.cc @@ -332,6 +332,7 @@ const int N = user_script_args.n(); ConcatString command; ConcatString path; ConcatString pickle_path; +ConcatString tmp_ascii_path; const char * tmp_dir = 0; int status; @@ -348,6 +349,8 @@ path << cs_erase << pickle_base_name; pickle_path = make_temp_file_name(path.text(), 0); +tmp_ascii_path = make_temp_file_name(path.text(), 0); +tmp_ascii_path << ".txt"; command << cs_erase << user_path_to_python << ' ' // user's path to python @@ -383,6 +386,8 @@ script.import_read_tmp_ascii_py(); script.read_pickle(list_name, pickle_path.text()); +script.read_tmp_ascii(tmp_ascii_path.text()); + PyObject * obj = script.lookup(list_name); if ( ! PyList_Check(obj) ) { From 25961d6981f08b8ffd36e9819c4c15f3b9fdf854 Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Sun, 21 Feb 2021 18:40:30 -0700 Subject: [PATCH 30/86] Revised python3_script::read_tmp_ascii with call to run, PyRun_String. --- met/src/libcode/vx_python3_utils/python3_script.cc | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/met/src/libcode/vx_python3_utils/python3_script.cc b/met/src/libcode/vx_python3_utils/python3_script.cc index bb8c40f44e..9afac9a596 100644 --- a/met/src/libcode/vx_python3_utils/python3_script.cc +++ b/met/src/libcode/vx_python3_utils/python3_script.cc @@ -287,7 +287,17 @@ command << "read_tmp_ascii.read_tmp_ascii(\"" mlog << Debug(3) << command << "\n"; -run_python_string(command.text()); +PyErr_Clear(); + +run(command.text()); + +if ( PyErr_Occurred() ) { + + mlog << Error << "\nPython3_Script::read_tmp_ascii() -> " + << "command \"" << command << "\" failed!\n\n"; + + exit ( 1 ); +} } From 794e8fb6173a9514cf388b7a75b09a037aa7a8d3 Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Sun, 21 Feb 2021 18:48:45 -0700 Subject: [PATCH 31/86] Return PyObject* from Python3_Script::run. --- met/src/libcode/vx_python3_utils/python3_script.cc | 10 +++++++--- met/src/libcode/vx_python3_utils/python3_script.h | 2 +- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/met/src/libcode/vx_python3_utils/python3_script.cc b/met/src/libcode/vx_python3_utils/python3_script.cc index 9afac9a596..b143619068 100644 --- a/met/src/libcode/vx_python3_utils/python3_script.cc +++ b/met/src/libcode/vx_python3_utils/python3_script.cc @@ -165,10 +165,12 @@ return ( var ); //////////////////////////////////////////////////////////////////////// -void Python3_Script::run(const char * command) const +PyObject * Python3_Script::run(const char * command) const { +PyObject * pobj; + if ( empty(command) ) { mlog << Error << "\nPython3_Script::run(const char *) -> " @@ -178,7 +180,9 @@ if ( empty(command) ) { } -if ( ! PyRun_String(command, Py_file_input, Dict, Dict) ) { +pobj = PyRun_String(command, Py_file_input, Dict, Dict); + +if ( ! pobj ) { mlog << Error << "\nPython3_Script::run(const char *) -> " << "command \"" << command << "\" failed!\n\n"; @@ -190,7 +194,7 @@ if ( ! PyRun_String(command, Py_file_input, Dict, Dict) ) { fflush(stdout); fflush(stderr); -return; +return pobj; } diff --git a/met/src/libcode/vx_python3_utils/python3_script.h b/met/src/libcode/vx_python3_utils/python3_script.h index 7a8aec210e..5a765aeabb 100644 --- a/met/src/libcode/vx_python3_utils/python3_script.h +++ b/met/src/libcode/vx_python3_utils/python3_script.h @@ -73,7 +73,7 @@ class Python3_Script { PyObject * lookup(const char * name) const; - void run(const char * command) const; // runs a command in the namespace of the script + PyObject * run(const char * command) const; // runs a command in the namespace of the script void read_pickle (const char * variable_name, const char * pickle_filename) const; From d569cfba0739b4849b57bbead4f2e7b96fe42b95 Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Sun, 21 Feb 2021 18:58:06 -0700 Subject: [PATCH 32/86] Restored call to run_python_string for now. --- met/src/libcode/vx_python3_utils/python3_script.cc | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/met/src/libcode/vx_python3_utils/python3_script.cc b/met/src/libcode/vx_python3_utils/python3_script.cc index b143619068..9a3c2ccaf9 100644 --- a/met/src/libcode/vx_python3_utils/python3_script.cc +++ b/met/src/libcode/vx_python3_utils/python3_script.cc @@ -293,7 +293,10 @@ mlog << Debug(3) << command << "\n"; PyErr_Clear(); -run(command.text()); +PyObject * pobj; + +// pobj = run(command.text()); +run_python_string(command.text()); if ( PyErr_Occurred() ) { From ab0f2c6893996b6ac9a2d069fe7b09ca8394b915 Mon Sep 17 00:00:00 2001 From: johnhg Date: Fri, 26 Feb 2021 09:26:18 -0700 Subject: [PATCH 33/86] Per #1429, enhance error message from DataLine::get_item(). (#1682) --- met/src/basic/vx_util/data_line.cc | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/met/src/basic/vx_util/data_line.cc b/met/src/basic/vx_util/data_line.cc index ab16353db1..1853449b65 100644 --- a/met/src/basic/vx_util/data_line.cc +++ b/met/src/basic/vx_util/data_line.cc @@ -253,7 +253,11 @@ const char * DataLine::get_item(int k) const if ( (k < 0) || (k >= N_items) ) { - mlog << Error << "\nDataLine::get_item(int) -> range check error\n\n"; + ConcatString filename = (get_file() ? get_file()->filename() : ""); + + mlog << Error << "\nDataLine::get_item(int) -> " + << "range check error while reading item number " << k+1 + << " from file \"" << filename << "\"\n\n"; exit ( 1 ); @@ -640,7 +644,8 @@ LineDataFile::LineDataFile(const LineDataFile &) { -mlog << Error << "\nLineDataFile::LineDataFile(const LineDataFile &) -> should never be called!\n\n"; +mlog << Error << "\nLineDataFile::LineDataFile(const LineDataFile &) -> " + << "should never be called!\n\n"; exit ( 1 ); @@ -654,7 +659,8 @@ LineDataFile & LineDataFile::operator=(const LineDataFile &) { -mlog << Error << "\nLineDataFile::operator=(const LineDataFile &) -> should never be called!\n\n"; +mlog << Error << "\nLineDataFile::operator=(const LineDataFile &) -> " + << "should never be called!\n\n"; exit ( 1 ); @@ -698,7 +704,8 @@ in = new ifstream; if ( !in ) { - mlog << Error << "\nLineDataFile::open(const char *) -> can't allocate input stream\n\n"; + mlog << Error << "\nLineDataFile::open(const char *) -> " + << "can't allocate input stream\n\n"; exit ( 1 ); From b2754b405d8f2212fd24f29245faed5be4f873d9 Mon Sep 17 00:00:00 2001 From: johnhg Date: Fri, 26 Feb 2021 10:17:01 -0700 Subject: [PATCH 34/86] Feature 1429 tc_log second try (#1686) * Per #1429, enhance error message from DataLine::get_item(). * Per #1429, I realize that the line number actually is readily available in the DataLine class... so include it in the error message. --- met/src/basic/vx_util/data_line.cc | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/met/src/basic/vx_util/data_line.cc b/met/src/basic/vx_util/data_line.cc index 1853449b65..da012b9a0a 100644 --- a/met/src/basic/vx_util/data_line.cc +++ b/met/src/basic/vx_util/data_line.cc @@ -253,11 +253,12 @@ const char * DataLine::get_item(int k) const if ( (k < 0) || (k >= N_items) ) { - ConcatString filename = (get_file() ? get_file()->filename() : ""); + ConcatString cs = (File ? File->filename() : ""); mlog << Error << "\nDataLine::get_item(int) -> " - << "range check error while reading item number " << k+1 - << " from file \"" << filename << "\"\n\n"; + << "range check error reading line number " << LineNumber + << ", item number " << k+1 << " of " << N_items + << " from file \"" << cs << "\"\n\n"; exit ( 1 ); From a1aead4ebcb2edc6a6deb4f225bc9f0a042aa477 Mon Sep 17 00:00:00 2001 From: johnhg Date: Fri, 26 Feb 2021 15:44:33 -0700 Subject: [PATCH 35/86] Feature 1588 ps_log (#1687) * Per #1588, updated pair_data_point.h/.cc to add detailed Debug(4) log messages, as specified in the GitHub issue. Do still need to test each of these cases to confirm that the log messages look good. * Per #1588, switch very detailed interpolation details from debug level 4 to 5. * Per #1588, remove the Debug(4) log message about duplicate obs since it's been moved up to a higher level. * Per #1588, add/update detailed log messages when processing point observations for bad data, off the grid, bad topo, big topo diffs, bad fcst value, and duplicate obs. --- met/src/basic/vx_util/interp_util.cc | 2 +- met/src/libcode/vx_statistics/pair_base.cc | 8 +- .../libcode/vx_statistics/pair_data_point.cc | 113 ++++++++++++++---- .../libcode/vx_statistics/pair_data_point.h | 7 ++ 4 files changed, 98 insertions(+), 32 deletions(-) diff --git a/met/src/basic/vx_util/interp_util.cc b/met/src/basic/vx_util/interp_util.cc index 90f3ae8c3f..9cb8a3d552 100644 --- a/met/src/basic/vx_util/interp_util.cc +++ b/met/src/basic/vx_util/interp_util.cc @@ -686,7 +686,7 @@ double interp_geog_match(const DataPlane &dp, const GridTemplate >, } if(!is_bad_data(interp_v)) { - mlog << Debug(4) + mlog << Debug(5) << "For observation value " << obs_v << " at grid (x, y) = (" << obs_x << ", " << obs_y << ") found forecast value " << interp_v << " at nearest matching geography point (" diff --git a/met/src/libcode/vx_statistics/pair_base.cc b/met/src/libcode/vx_statistics/pair_base.cc index bfcebaad0c..490037c78b 100644 --- a/met/src/libcode/vx_statistics/pair_base.cc +++ b/met/src/libcode/vx_statistics/pair_base.cc @@ -424,13 +424,7 @@ bool PairBase::add_point_obs(const char *sid, if(check_unique) { vector::iterator o_it = (*it).second.obs.begin(); for(;o_it != (*it).second.obs.end(); o_it++) { - if( (*o_it).ut == ut) { - mlog << Debug(4) - << "Skipping duplicate observation for [lat:lon:level:elevation] = [" - << obs_key << "] valid at " << unix_to_yyyymmdd_hhmmss(ut) - << " with value = " << o << "\n"; - return false; - } + if((*o_it).ut == ut) return false; } } diff --git a/met/src/libcode/vx_statistics/pair_data_point.cc b/met/src/libcode/vx_statistics/pair_data_point.cc index f5f1bc94e8..813ab5f815 100644 --- a/met/src/libcode/vx_statistics/pair_data_point.cc +++ b/met/src/libcode/vx_statistics/pair_data_point.cc @@ -603,12 +603,12 @@ void VxPairDataPoint::set_pd_size(int types, int masks, int interps) { rej_dup[i][j] = new int [n_interp]; for(k=0; kname() ) { + if(var_name != obs_info->name()) { rej_var++; return; } @@ -805,10 +805,10 @@ void VxPairDataPoint::add_point_obs(float *hdr_arr, const char *hdr_typ_str, // Check if the observation quality flag is included in the list if(obs_qty_filt.n() && strcmp(obs_qty, "")) { bool qty_match = false; - for(i=0; imagic_str() << " versus " + << obs_info->magic_str() + << ", skipping observation with bad data value:\n" + << point_obs_to_string(hdr_arr, hdr_typ_str, hdr_sid_str, + hdr_ut, obs_qty, obs_arr, var_name) + << "\n"; rej_obs++; return; } @@ -845,6 +852,15 @@ void VxPairDataPoint::add_point_obs(float *hdr_arr, const char *hdr_typ_str, // Check if the observation's lat/lon is on the grid if(x < 0 || x >= gr.nx() || y < 0 || y >= gr.ny()) { + mlog << Debug(4) + << "For " << fcst_info->magic_str() << " versus " + << obs_info->magic_str() + << ", skipping observation off the grid where (x, y) = (" + << x << ", " << y << ") and grid (nx, ny) = (" << gr.nx() + << ", " << gr.ny() << "):\n" + << point_obs_to_string(hdr_arr, hdr_typ_str, hdr_sid_str, + hdr_ut, obs_qty, obs_arr, var_name) + << "\n"; rej_grd++; return; } @@ -861,12 +877,14 @@ void VxPairDataPoint::add_point_obs(float *hdr_arr, const char *hdr_typ_str, // Skip bad topography values if(is_bad_data(hdr_elv) || is_bad_data(topo)) { mlog << Debug(4) - << "Skipping observation due to missing topography values for " - << "[msg_typ:sid:lat:lon:elevation] = [" - << hdr_typ_str << ":" << hdr_sid_str << ":" - << hdr_lat << ":" << -1.0*hdr_lon << ":" - << hdr_elv << "] and model topography = " - << topo << ".\n"; + << "For " << fcst_info->magic_str() << " versus " + << obs_info->magic_str() + << ", skipping observation due to bad topography values " + << "where observation elevation = " << hdr_elv + << " and model topography = " << topo << ":\n" + << point_obs_to_string(hdr_arr, hdr_typ_str, hdr_sid_str, + hdr_ut, obs_qty, obs_arr, var_name) + << "\n"; rej_topo++; return; } @@ -874,14 +892,16 @@ void VxPairDataPoint::add_point_obs(float *hdr_arr, const char *hdr_typ_str, // Check the topography difference threshold if(!sfc_info.topo_use_obs_thresh.check(topo - hdr_elv)) { mlog << Debug(4) - << "Skipping observation for topography difference since " + << "For " << fcst_info->magic_str() << " versus " + << obs_info->magic_str() + << ", skipping observation due to topography difference " + << "where observation elevation (" << hdr_elv + << ") minus model topography (" << topo << ") = " << topo - hdr_elv << " is not " - << sfc_info.topo_use_obs_thresh.get_str() << " for " - << "[msg_typ:sid:lat:lon:elevation] = [" - << hdr_typ_str << ":" << hdr_sid_str << ":" - << hdr_lat << ":" << -1.0*hdr_lon << ":" - << hdr_elv << "] and model topography = " - << topo << ".\n"; + << sfc_info.topo_use_obs_thresh.get_str() << ":\n" + << point_obs_to_string(hdr_arr, hdr_typ_str, hdr_sid_str, + hdr_ut, obs_qty, obs_arr, var_name) + << "\n"; rej_topo++; return; } @@ -1099,6 +1119,14 @@ void VxPairDataPoint::add_point_obs(float *hdr_arr, const char *hdr_typ_str, } if(is_bad_data(fcst_v)) { + mlog << Debug(4) + << "For " << fcst_info->magic_str() << " versus " + << obs_info->magic_str() + << ", skipping observation due to bad data in the interpolated " + << "forecast value:\n" + << point_obs_to_string(hdr_arr, hdr_typ_str, hdr_sid_str, + hdr_ut, obs_qty, obs_arr, var_name) + << "\n"; inc_count(rej_fcst, i, j, k); continue; } @@ -1113,6 +1141,13 @@ void VxPairDataPoint::add_point_obs(float *hdr_arr, const char *hdr_typ_str, hdr_lat, hdr_lon, obs_x, obs_y, hdr_ut, obs_lvl, obs_hgt, fcst_v, obs_v, obs_qty, cmn_v, csd_v, wgt_v)) { + mlog << Debug(4) + << "For " << fcst_info->magic_str() << " versus " + << obs_info->magic_str() + << ", skipping observation since it is a duplicate:\n" + << point_obs_to_string(hdr_arr, hdr_typ_str, hdr_sid_str, + hdr_ut, obs_qty, obs_arr, var_name) + << "\n"; inc_count(rej_dup, i, j, k); } @@ -1494,6 +1529,36 @@ PairDataPoint subset_climo_cdf_bin(const PairDataPoint &pd, return(out_pd); } +//////////////////////////////////////////////////////////////////////// + +// Write the point observation in the MET point format for logging +ConcatString point_obs_to_string(float *hdr_arr, const char *hdr_typ_str, + const char *hdr_sid_str, unixtime hdr_ut, + const char *obs_qty, float *obs_arr, + const char *var_name) { + ConcatString obs_cs, name; + + if((var_name != 0) && (0 < strlen(var_name))) name = var_name; + else name = obs_arr[1]; + + // + // Write the 11-column MET point format: + // Message_Type Station_ID Valid_Time(YYYYMMDD_HHMMSS) + // Lat(Deg North) Lon(Deg East) Elevation(msl) + // Var_Name(or GRIB_Code) Level Height(msl or agl) + // QC_String Observation_Value + // + obs_cs << " " + << hdr_typ_str << " " << hdr_sid_str << " " + << unix_to_yyyymmdd_hhmmss(hdr_ut) << " " + << hdr_arr[0] << " " << -1.0*hdr_arr[1] << " " + << hdr_arr[2] << " " << name << " " + << obs_arr[2] << " " << obs_arr[3] << " " + << obs_qty << " " << obs_arr[4]; + + return(obs_cs); +} + //////////////////////////////////////////////////////////////////////// // // End miscellaneous functions diff --git a/met/src/libcode/vx_statistics/pair_data_point.h b/met/src/libcode/vx_statistics/pair_data_point.h index 4c04625bd6..57ecb54aae 100644 --- a/met/src/libcode/vx_statistics/pair_data_point.h +++ b/met/src/libcode/vx_statistics/pair_data_point.h @@ -254,6 +254,13 @@ extern void subset_wind_pairs(const PairDataPoint &, extern PairDataPoint subset_climo_cdf_bin(const PairDataPoint &, const ThreshArray &, int i_bin); +// Write the point observation in the MET point format for logging +extern ConcatString point_obs_to_string( + float *hdr_arr, const char *hdr_typ_str, + const char *hdr_sid_str, unixtime hdr_ut, + const char *obs_qty, float *obs_arr, + const char *var_name); + //////////////////////////////////////////////////////////////////////// #endif // __PAIR_DATA_POINT_H__ From 2ba6cd99d40a0f276fc47522e41f4b0c061a37ef Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Fri, 26 Feb 2021 16:16:23 -0700 Subject: [PATCH 36/86] #1454 Disabled plot_data_plane_CESM_SSMI_microwave and plot_data_plane_CESM_sea_ice_nc becaues of not evenly spaced --- test/xml/unit_plot_data_plane.xml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/xml/unit_plot_data_plane.xml b/test/xml/unit_plot_data_plane.xml index 1eb25e614c..67ecba95bf 100644 --- a/test/xml/unit_plot_data_plane.xml +++ b/test/xml/unit_plot_data_plane.xml @@ -343,6 +343,7 @@ + - - &MET_BIN;/plot_data_plane \ From 996197cff8c7272906602de8245ae26cd91549d5 Mon Sep 17 00:00:00 2001 From: johnhg Date: Mon, 1 Mar 2021 18:03:30 -0700 Subject: [PATCH 44/86] Feature 1684 bss and 1685 single reference model (#1689) * Per #1684, move an instance of the ClimoCDFInfo class into PairBase. Also define derive_climo_vals() and derive_climo_prob() utility functions. * Add to VxPairDataPoint and VxPairDataEnsemble functions to set the ClimoCDFInfo class. * Per #1684, update ensemble_stat and point_stat to set the ClimoCDFInfo object based on the contents of the config file. * Per #1684, update the vx_statistics library and stat_analysis to make calls to the new derive_climo_vals() and derive_climo_prob() functions. * Per #1684, since cdf_info is a member of PairBase class, need to handle it in the PairDataPoint and PairDataEnsemble assignment and subsetting logic. * Per #1684, during development, I ran across and then updated this log message. * Per #1684, working on log messages and figured that the regridding climo data should be moved from Debug(1) to at least Debug(2). * Per #1684 and #1685, update the logic for the derive_climo_vals() utility function. If only a single climo bin is requested, just return the climo mean. Otherwise, sample the requested number of values. * Per #1684, just fixing the format of this log message. * Per #1684, add a STATLine::get_offset() member function. * Per #1684, update parse_orank_line() logic. Rather than calling NumArray::clear() call NumArray::erase() to preserve allocated memory. Also, instead of parsing ensemble member values by column name, parse them by offset number. * Per #1684, call EnsemblePairData::extend() when parsing ORANK data to allocate one block of memory instead of bunches of litte ones. * Per #1684 and #1685, add another call to Ensemble-Stat to test computing the CRPSCL_EMP from a single climo mean instead of using the full climo distribution. * Per #1684 and #1685, update ensemble-stat docs about computing CRPSS_EMP relative to a single reference model. * Per #1684, need to update Grid-Stat to store the climo cdf info in the PairDataPoint objects. * Per #1684, remove debug print statements. * Per #1684, need to set cdf_info when aggregating MPR lines in Stat-Analysis. * Per #1684 and #1685, update PairDataEnsemble::compute_pair_vals() to print a log message indicating the climo data being used as reference: For a climo distribution defined by mean and stdev: DEBUG 3: Computing ensemble statistics relative to a 9-member climatological ensemble. For a single deterministic reference: DEBUG 3: Computing ensemble statistics relative to the climatological mean. --- met/docs/Users_Guide/ensemble-stat.rst | 6 +- met/src/libcode/vx_analysis_util/stat_line.cc | 16 +- met/src/libcode/vx_analysis_util/stat_line.h | 9 +- .../libcode/vx_statistics/compute_stats.cc | 3 +- met/src/libcode/vx_statistics/ens_stats.cc | 11 +- met/src/libcode/vx_statistics/pair_base.cc | 100 ++++++-- met/src/libcode/vx_statistics/pair_base.h | 48 ++-- .../vx_statistics/pair_data_ensemble.cc | 58 ++--- .../vx_statistics/pair_data_ensemble.h | 8 +- .../libcode/vx_statistics/pair_data_point.cc | 21 ++ .../libcode/vx_statistics/pair_data_point.h | 2 + met/src/libcode/vx_statistics/read_climo.cc | 2 +- .../tools/core/ensemble_stat/ensemble_stat.cc | 2 +- .../ensemble_stat/ensemble_stat_conf_info.cc | 4 +- met/src/tools/core/grid_stat/grid_stat.cc | 73 +++--- met/src/tools/core/point_stat/point_stat.cc | 2 +- .../core/point_stat/point_stat_conf_info.cc | 3 + .../core/stat_analysis/aggr_stat_line.cc | 45 ++-- .../tools/core/stat_analysis/aggr_stat_line.h | 1 + .../core/stat_analysis/parse_stat_line.cc | 9 +- .../tools/core/stat_analysis/stat_analysis.cc | 4 +- test/config/EnsembleStatConfig_one_cdf_bin | 237 ++++++++++++++++++ test/xml/unit_climatology.xml | 29 +++ 23 files changed, 550 insertions(+), 143 deletions(-) create mode 100644 test/config/EnsembleStatConfig_one_cdf_bin diff --git a/met/docs/Users_Guide/ensemble-stat.rst b/met/docs/Users_Guide/ensemble-stat.rst index 9a4b8f476e..cfe920c0e6 100644 --- a/met/docs/Users_Guide/ensemble-stat.rst +++ b/met/docs/Users_Guide/ensemble-stat.rst @@ -36,7 +36,11 @@ The ranked probability score (RPS) is included in the Ranked Probability Score ( Climatology data ~~~~~~~~~~~~~~~~ -The Ensemble-Stat output includes at least three statistics computed relative to external climatology data. The climatology is defined by mean and standard deviation fields, and both are required in the computation of ensemble skill score statistics. MET assumes that the climatology follows a normal distribution, defined by the mean and standard deviation at each point. When computing the CRPS skill score for (:ref:`Gneiting et al., 2004 `) the reference CRPS statistic is computed using the climatological mean and standard deviation directly. When computing the CRPS skill score for (:ref:`Hersbach, 2000 `) the reference CRPS statistic is computed by selecting equal-area-spaced values from the assumed normal climatological distribution. The number of points selected is determined by the *cdf_bins* setting in the *climo_cdf* dictionary. The reference CRPS is computed empirically from this ensemble of climatology values. The climatological distribution is also used for the RPSS. The forecast RPS statistic is computed from a probabilistic contingency table in which the probabilities are derived from the ensemble member values. In a simliar fashion, the climatogical probability for each observed value is derived from the climatological distribution. The area of the distribution to the left of the observed value is interpreted as the climatological probability. These climatological probabilities are also evaluated using a probabilistic contingency table from which the reference RPS score is computed. The skill scores are derived by comparing the forecast statistic to the reference climatology statistic. +The Ensemble-Stat output includes at least three statistics computed relative to external climatology data. The climatology is defined by mean and standard deviation fields, and typically both are required in the computation of ensemble skill score statistics. MET assumes that the climatology follows a normal distribution, defined by the mean and standard deviation at each point. + +When computing the CRPS skill score for (:ref:`Gneiting et al., 2004 `) the reference CRPS statistic is computed using the climatological mean and standard deviation directly. When computing the CRPS skill score for (:ref:`Hersbach, 2000 `) the reference CRPS statistic is computed by selecting equal-area-spaced values from the assumed normal climatological distribution. The number of points selected is determined by the *cdf_bins* setting in the *climo_cdf* dictionary. The reference CRPS is computed empirically from this ensemble of climatology values. If the number bins is set to 1, the climatological CRPS is computed using only the climatological mean value. In this way, the empirical CRPSS may be computed relative to a single model rather than a climatological distribution. + +The climatological distribution is also used for the RPSS. The forecast RPS statistic is computed from a probabilistic contingency table in which the probabilities are derived from the ensemble member values. In a simliar fashion, the climatogical probability for each observed value is derived from the climatological distribution. The area of the distribution to the left of the observed value is interpreted as the climatological probability. These climatological probabilities are also evaluated using a probabilistic contingency table from which the reference RPS score is computed. The skill scores are derived by comparing the forecast statistic to the reference climatology statistic. Ensemble observation error ~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/met/src/libcode/vx_analysis_util/stat_line.cc b/met/src/libcode/vx_analysis_util/stat_line.cc index b1d66871e0..9c2dadb7e8 100644 --- a/met/src/libcode/vx_analysis_util/stat_line.cc +++ b/met/src/libcode/vx_analysis_util/stat_line.cc @@ -327,6 +327,18 @@ bool STATLine::has(const char *col_str) const { +return ( !is_bad_data(get_offset(col_str)) ); + +} + + +//////////////////////////////////////////////////////////////////////// + + +int STATLine::get_offset(const char *col_str) const + +{ + int offset = bad_data_int; int dim = bad_data_int; @@ -353,10 +365,10 @@ if ( is_bad_data(offset) ) { } // - // Return whether a valid offset value was found + // Return the offset value // -return ( !is_bad_data(offset) ); +return ( offset ); } diff --git a/met/src/libcode/vx_analysis_util/stat_line.h b/met/src/libcode/vx_analysis_util/stat_line.h index ed52829950..6362031d58 100644 --- a/met/src/libcode/vx_analysis_util/stat_line.h +++ b/met/src/libcode/vx_analysis_util/stat_line.h @@ -61,10 +61,11 @@ class STATLine : public DataLine { // retrieve values of the header columns // - bool has (const char *) const; - ConcatString get (const char *, bool check_na = true) const; - const char * get_item (const char *, bool check_na = true) const; - const char * get_item (int, bool check_na = true) const; + bool has (const char *) const; + int get_offset(const char *) const; + ConcatString get (const char *, bool check_na = true) const; + const char * get_item (const char *, bool check_na = true) const; + const char * get_item (int, bool check_na = true) const; const char * version () const; const char * model () const; diff --git a/met/src/libcode/vx_statistics/compute_stats.cc b/met/src/libcode/vx_statistics/compute_stats.cc index 7e6e74f66f..211fe9860e 100644 --- a/met/src/libcode/vx_statistics/compute_stats.cc +++ b/met/src/libcode/vx_statistics/compute_stats.cc @@ -743,7 +743,8 @@ void compute_pctinfo(const PairDataPoint &pd, bool pstd_flag, // Use input climatological probabilities or derive them if(cmn_flag) { if(cprob_in) climo_prob = *cprob_in; - else climo_prob = derive_climo_prob(pd.cmn_na, pd.csd_na, + else climo_prob = derive_climo_prob(pd.cdf_info, + pd.cmn_na, pd.csd_na, pct_info.othresh); } diff --git a/met/src/libcode/vx_statistics/ens_stats.cc b/met/src/libcode/vx_statistics/ens_stats.cc index 4748b186ac..ebfe4b5e28 100644 --- a/met/src/libcode/vx_statistics/ens_stats.cc +++ b/met/src/libcode/vx_statistics/ens_stats.cc @@ -484,10 +484,10 @@ void RPSInfo::set(const PairDataEnsemble &pd) { // Check that thresholds are actually defined if(fthresh.n() == 0) { mlog << Error << "\nRPSInfo::set(const PairDataEnsemble &) -> " - << "no thresholds provided to compute the RPS line type! " - << "Specify thresholds using the \"" - << conf_key_prob_cat_thresh - << "\" configuration file option.\n\n"; + << "no thresholds provided to compute the RPS line type!\n" + << "Specify thresholds using the \"" << conf_key_prob_cat_thresh + << "\" configuration file option or by providing climatological " + << "mean and standard deviation data.\n\n"; exit(1); } @@ -522,7 +522,8 @@ void RPSInfo::set(const PairDataEnsemble &pd) { climo_pct.zero_out(); // Derive climatological probabilities - if(cmn_flag) climo_prob = derive_climo_prob(pd.cmn_na, pd.csd_na, + if(cmn_flag) climo_prob = derive_climo_prob(pd.cdf_info, + pd.cmn_na, pd.csd_na, fthresh[i]); // Loop over the observations diff --git a/met/src/libcode/vx_statistics/pair_base.cc b/met/src/libcode/vx_statistics/pair_base.cc index 490037c78b..8066ed262f 100644 --- a/met/src/libcode/vx_statistics/pair_base.cc +++ b/met/src/libcode/vx_statistics/pair_base.cc @@ -74,6 +74,8 @@ void PairBase::clear() { interp_mthd = InterpMthd_None; interp_shape = GridTemplateFactory::GridTemplate_None; + cdf_info.clear(); + o_na.clear(); x_na.clear(); y_na.clear(); @@ -121,6 +123,8 @@ void PairBase::erase() { interp_mthd = InterpMthd_None; interp_shape = GridTemplateFactory::GridTemplate_None; + cdf_info.clear(); + o_na.erase(); x_na.erase(); y_na.erase(); @@ -267,6 +271,15 @@ void PairBase::set_interp_shape(GridTemplateFactory::GridTemplates shape) { //////////////////////////////////////////////////////////////////////// +void PairBase::set_climo_cdf_info(const ClimoCDFInfo &info) { + + cdf_info = info; + + return; +} + +//////////////////////////////////////////////////////////////////////// + void PairBase::set_fcst_ut(unixtime ut){ fcst_ut = ut; @@ -1010,11 +1023,49 @@ bool set_climo_flag(const NumArray &f_na, const NumArray &c_na) { //////////////////////////////////////////////////////////////////////// -NumArray derive_climo_prob(const NumArray &mn_na, const NumArray &sd_na, +void derive_climo_vals(const ClimoCDFInfo &cdf_info, + double m, double s, + NumArray &climo_vals) { + + // Initialize + climo_vals.erase(); + + // cdf_info.cdf_ta starts with >=0.0 and ends with >=1.0. + // The number of bins is the number of thresholds minus 1. + + // Check for bad mean value + if(is_bad_data(m) || cdf_info.cdf_ta.n() < 2) { + return; + } + // Single climo bin + else if(cdf_info.cdf_ta.n() == 2) { + climo_vals.add(m); + } + // Check for bad standard deviation value + else if(is_bad_data(s)) { + return; + } + // Extract climo distribution values + else { + + // Skip the first and last thresholds + for(int i=1; i " + mlog << Error << "\nderive_climo_prob() -> " << "climatological threshold \"" << othresh.get_str() << "\" cannot be converted to a probability!\n\n"; exit(1); @@ -1060,23 +1111,17 @@ NumArray derive_climo_prob(const NumArray &mn_na, const NumArray &sd_na, // threshold else if(n_mn > 0 && n_sd > 0) { - mlog << Debug(2) - << "Deriving normal approximation of climatological " - << "probabilities for threshold " << othresh.get_str() - << ".\n"; + // The first (>=0.0) and last (>=1.0) climo thresholds are omitted + mlog << Debug(4) + << "Deriving climatological probabilities for threshold " + << othresh.get_str() << " by sampling " << cdf_info.cdf_ta.n()-2 + << " values from the normal climatological distribution.\n"; - // Compute probability value for each point + // Compute the probability by sampling from the climo distribution + // and deriving the event frequency for(i=0; i 2) { + mlog << Debug(3) + << "Computing ensemble statistics relative to a " + << cdf_info.cdf_ta.n() - 2 + << "-member climatological ensemble.\n"; + } + else { + mlog << Debug(3) + << "No reference climatology data provided.\n"; + } + // Compute the rank for each observation for(i=0, n_pair=0, n_skip_const=0, n_skip_vld=0; i=0.0) and last (>=1.0) climo CDF thresholds - for(int i=1; igrid() == vx_grid)) { - mlog << Debug(1) + mlog << Debug(2) << "Regridding the " << cur_ut_cs << " \"" << info->magic_str() << "\" climatology field to the verification grid.\n"; diff --git a/met/src/tools/core/ensemble_stat/ensemble_stat.cc b/met/src/tools/core/ensemble_stat/ensemble_stat.cc index 31ab7b8b56..f3affe5895 100644 --- a/met/src/tools/core/ensemble_stat/ensemble_stat.cc +++ b/met/src/tools/core/ensemble_stat/ensemble_stat.cc @@ -1735,7 +1735,7 @@ void process_grid_vx() { // Initialize pd_all.clear(); pd_all.set_ens_size(n_vx_vld[i]); - pd_all.set_climo_cdf(conf_info.vx_opt[i].cdf_info); + pd_all.set_climo_cdf_info(conf_info.vx_opt[i].cdf_info); pd_all.skip_const = conf_info.vx_opt[i].vx_pd.pd[0][0][0].skip_const; // Apply the current mask to the fields and compute the pairs diff --git a/met/src/tools/core/ensemble_stat/ensemble_stat_conf_info.cc b/met/src/tools/core/ensemble_stat/ensemble_stat_conf_info.cc index 0461bc1b7f..e4361e041a 100644 --- a/met/src/tools/core/ensemble_stat/ensemble_stat_conf_info.cc +++ b/met/src/tools/core/ensemble_stat/ensemble_stat_conf_info.cc @@ -877,8 +877,8 @@ void EnsembleStatVxOpt::set_vx_pd(EnsembleStatConfInfo *conf_info) { // Define the dimensions vx_pd.set_pd_size(n_msg_typ, n_mask, n_interp); - // Store climo CDF - vx_pd.set_climo_cdf(cdf_info); + // Store the climo CDF info + vx_pd.set_climo_cdf_info(cdf_info); // Store the list of surface message types vx_pd.set_msg_typ_sfc(conf_info->msg_typ_sfc); diff --git a/met/src/tools/core/grid_stat/grid_stat.cc b/met/src/tools/core/grid_stat/grid_stat.cc index 5a90a6464f..14b09c6d5a 100644 --- a/met/src/tools/core/grid_stat/grid_stat.cc +++ b/met/src/tools/core/grid_stat/grid_stat.cc @@ -145,7 +145,8 @@ static void build_outfile_name(unixtime, int, const char *, static void process_scores(); -static void get_mask_points(const MaskPlane &, const DataPlane *, +static void get_mask_points(const GridStatVxOpt &, + const MaskPlane &, const DataPlane *, const DataPlane *, const DataPlane *, const DataPlane *, const DataPlane *, PairDataPoint &); @@ -797,7 +798,8 @@ void process_scores() { } // Apply the current mask to the current fields - get_mask_points(mask_mp, &fcst_dp_smooth, &obs_dp_smooth, + get_mask_points(conf_info.vx_opt[i], mask_mp, + &fcst_dp_smooth, &obs_dp_smooth, &cmn_dp, &csd_dp, &wgt_dp, pd); // Set the mask name @@ -981,7 +983,8 @@ void process_scores() { } // Apply the current mask to the U-wind fields - get_mask_points(mask_mp, &fu_dp_smooth, &ou_dp_smooth, + get_mask_points(conf_info.vx_opt[i], mask_mp, + &fu_dp_smooth, &ou_dp_smooth, &cmnu_dp, &csdu_dp, &wgt_dp, pd_u); // Compute VL1L2 @@ -1136,9 +1139,11 @@ void process_scores() { mask_bad_data(mask_mp, ogy_dp); // Apply the current mask to the current fields - get_mask_points(mask_mp, &fgx_dp, &ogx_dp, + get_mask_points(conf_info.vx_opt[i], mask_mp, + &fgx_dp, &ogx_dp, 0, 0, &wgt_dp, pd_gx); - get_mask_points(mask_mp, &fgy_dp, &ogy_dp, + get_mask_points(conf_info.vx_opt[i], mask_mp, + &fgy_dp, &ogy_dp, 0, 0, &wgt_dp, pd_gy); // Set the mask name @@ -1217,7 +1222,8 @@ void process_scores() { conf_info.vx_opt[i].ocat_ta.need_perc()) { // Apply the current mask - get_mask_points(mask_mp, &fcst_dp, &obs_dp, + get_mask_points(conf_info.vx_opt[i], mask_mp, + &fcst_dp, &obs_dp, &cmn_dp, 0, 0, pd); // Process percentile thresholds @@ -1271,9 +1277,11 @@ void process_scores() { // Apply the current mask to the distance map and // thresholded fields - get_mask_points(mask_mp, &fcst_dp_dmap, &obs_dp_dmap, + get_mask_points(conf_info.vx_opt[i], mask_mp, + &fcst_dp_dmap, &obs_dp_dmap, 0, 0, 0, pd); - get_mask_points(mask_mp, &fcst_dp_thresh, &obs_dp_thresh, + get_mask_points(conf_info.vx_opt[i], mask_mp, + &fcst_dp_thresh, &obs_dp_thresh, 0, 0, 0, pd_thr); dmap_info.set_options( @@ -1346,7 +1354,8 @@ void process_scores() { conf_info.vx_opt[i].ocat_ta.need_perc()) { // Apply the current mask - get_mask_points(mask_mp, &fcst_dp, &obs_dp, + get_mask_points(conf_info.vx_opt[i], mask_mp, + &fcst_dp, &obs_dp, &cmn_dp, 0, 0, pd); // Process percentile thresholds @@ -1445,9 +1454,11 @@ void process_scores() { // Apply the current mask to the fractional coverage // and thresholded fields - get_mask_points(mask_mp, &fcst_dp_smooth, &obs_dp_smooth, + get_mask_points(conf_info.vx_opt[i], mask_mp, + &fcst_dp_smooth, &obs_dp_smooth, 0, 0, &wgt_dp, pd); - get_mask_points(mask_mp, &fcst_dp_thresh, &obs_dp_thresh, + get_mask_points(conf_info.vx_opt[i], mask_mp, + &fcst_dp_thresh, &obs_dp_thresh, 0, 0, 0, pd_thr); // Store climatology values as bad data @@ -1618,7 +1629,8 @@ void process_scores() { } // Apply the current mask to the current fields - get_mask_points(mask_mp, &fcst_dp_smooth, &obs_dp_smooth, + get_mask_points(conf_info.vx_opt[i], mask_mp, + &fcst_dp_smooth, &obs_dp_smooth, &cmn_dp_smooth, &csd_dp, &wgt_dp, pd); // Set the mask name @@ -1706,7 +1718,8 @@ void process_scores() { } // Apply the current mask to the U-wind fields - get_mask_points(mask_mp, &fu_dp_smooth, &ou_dp_smooth, + get_mask_points(conf_info.vx_opt[i], mask_mp, + &fu_dp_smooth, &ou_dp_smooth, &cmnu_dp_smooth, 0, &wgt_dp, pd_u); // Compute VL1L2 @@ -1790,29 +1803,33 @@ void process_scores() { //////////////////////////////////////////////////////////////////////// -void get_mask_points(const MaskPlane &mask_mp, +void get_mask_points(const GridStatVxOpt &vx_opt, + const MaskPlane &mask_mp, const DataPlane *fcst_ptr, const DataPlane *obs_ptr, const DataPlane *cmn_ptr, const DataPlane *csd_ptr, const DataPlane *wgt_ptr, PairDataPoint &pd) { - // Initialize - pd.erase(); + // Initialize + pd.erase(); - // Apply the mask the data fields or fill with default values - apply_mask(*fcst_ptr, mask_mp, pd.f_na); - apply_mask(*obs_ptr, mask_mp, pd.o_na); - pd.n_obs = pd.o_na.n(); + // Store the climo CDF info + pd.set_climo_cdf_info(vx_opt.cdf_info); + + // Apply the mask the data fields or fill with default values + apply_mask(*fcst_ptr, mask_mp, pd.f_na); + apply_mask(*obs_ptr, mask_mp, pd.o_na); + pd.n_obs = pd.o_na.n(); - if(cmn_ptr) apply_mask(*cmn_ptr, mask_mp, pd.cmn_na); - else pd.cmn_na.add_const(bad_data_double, pd.n_obs); - if(csd_ptr) apply_mask(*csd_ptr, mask_mp, pd.csd_na); - else pd.csd_na.add_const(bad_data_double, pd.n_obs); - if(wgt_ptr) apply_mask(*wgt_ptr, mask_mp, pd.wgt_na); - else pd.wgt_na.add_const(default_grid_weight, pd.n_obs); + if(cmn_ptr) apply_mask(*cmn_ptr, mask_mp, pd.cmn_na); + else pd.cmn_na.add_const(bad_data_double, pd.n_obs); + if(csd_ptr) apply_mask(*csd_ptr, mask_mp, pd.csd_na); + else pd.csd_na.add_const(bad_data_double, pd.n_obs); + if(wgt_ptr) apply_mask(*wgt_ptr, mask_mp, pd.wgt_na); + else pd.wgt_na.add_const(default_grid_weight, pd.n_obs); - if(cmn_ptr && csd_ptr) pd.add_climo_cdf(); + if(cmn_ptr && csd_ptr) pd.add_climo_cdf(); - return; + return; } //////////////////////////////////////////////////////////////////////// diff --git a/met/src/tools/core/point_stat/point_stat.cc b/met/src/tools/core/point_stat/point_stat.cc index f61583f3ee..d1edfdb935 100644 --- a/met/src/tools/core/point_stat/point_stat.cc +++ b/met/src/tools/core/point_stat/point_stat.cc @@ -1762,7 +1762,7 @@ void do_hira_ens(int i_vx, const PairDataPoint *pd_ptr) { hira_pd.clear(); hira_pd.extend(pd_ptr->n_obs); hira_pd.set_ens_size(gt->size()); - hira_pd.set_climo_cdf(conf_info.vx_opt[i_vx].cdf_info); + hira_pd.set_climo_cdf_info(conf_info.vx_opt[i_vx].cdf_info); f_ens.extend(gt->size()); // Process each observation point diff --git a/met/src/tools/core/point_stat/point_stat_conf_info.cc b/met/src/tools/core/point_stat/point_stat_conf_info.cc index 0d8dd3fa43..ecd6b8b3dc 100644 --- a/met/src/tools/core/point_stat/point_stat_conf_info.cc +++ b/met/src/tools/core/point_stat/point_stat_conf_info.cc @@ -932,6 +932,9 @@ void PointStatVxOpt::set_vx_pd(PointStatConfInfo *conf_info) { // Define the dimensions vx_pd.set_pd_size(n_msg_typ, n_mask, n_interp); + // Store the climo CDF info + vx_pd.set_climo_cdf_info(cdf_info); + // Store the surface message type group cs = surface_msg_typ_group_str; if(conf_info->msg_typ_group_map.count(cs) == 0) { diff --git a/met/src/tools/core/stat_analysis/aggr_stat_line.cc b/met/src/tools/core/stat_analysis/aggr_stat_line.cc index 3bd72ae766..db7cd98ba9 100644 --- a/met/src/tools/core/stat_analysis/aggr_stat_line.cc +++ b/met/src/tools/core/stat_analysis/aggr_stat_line.cc @@ -575,7 +575,22 @@ ConcatString StatHdrInfo::get_shc_str(const ConcatString &cur_case, //////////////////////////////////////////////////////////////////////// // -// Code for AggrTimeSeriesInfo structure. +// Code for AggrENSInfo structure +// +//////////////////////////////////////////////////////////////////////// + +void AggrENSInfo::clear() { + hdr.clear(); + ens_pd.clear(); + me_na.clear(); + mse_na.clear(); + me_oerr_na.clear(); + mse_oerr_na.clear(); +} + +//////////////////////////////////////////////////////////////////////// +// +// Code for AggrTimeSeriesInfo structure // //////////////////////////////////////////////////////////////////////// @@ -2190,6 +2205,9 @@ void aggr_mpr_lines(LineDataFile &f, STATAnalysisJob &job, // if(m.count(key) == 0) { + bool center = false; + aggr.pd.cdf_info.set_cdf_ta(nint(1.0/job.out_bin_size), center); + aggr.pd.f_na.clear(); aggr.pd.o_na.clear(); aggr.pd.cmn_na.clear(); @@ -2208,6 +2226,7 @@ void aggr_mpr_lines(LineDataFile &f, STATAnalysisJob &job, aggr.fcst_var = cur.fcst_var; aggr.obs_var = cur.obs_var; aggr.hdr.clear(); + m[key] = aggr; } // @@ -2552,8 +2571,7 @@ void aggr_ecnt_lines(LineDataFile &f, STATAnalysisJob &job, // Add a new map entry, if necessary // if(m.count(key) == 0) { - aggr.ens_pd.clear(); - aggr.hdr.clear(); + aggr.clear(); m[key] = aggr; } @@ -2775,8 +2793,7 @@ void aggr_rhist_lines(LineDataFile &f, STATAnalysisJob &job, // Add a new map entry, if necessary // if(m.count(key) == 0) { - aggr.ens_pd.clear(); - aggr.hdr.clear(); + aggr.clear(); for(i=0; i::iterator it; @@ -3045,17 +3062,17 @@ void aggr_orank_lines(LineDataFile &f, STATAnalysisJob &job, // Add a new map entry, if necessary // if(m.count(key) == 0) { - aggr.ens_pd.clear(); + aggr.clear(); + bool center = false; + aggr.ens_pd.cdf_info.set_cdf_ta(nint(1.0/job.out_bin_size), center); aggr.ens_pd.obs_error_flag = !is_bad_data(cur.ens_mean_oerr); aggr.ens_pd.set_ens_size(cur.n_ens); + aggr.ens_pd.extend(cur.total); for(i=0; i " - << "the \"N_ENS\" column must remain constant. " + << "the \"N_ENS\" column must remain constant. " << "Try setting \"-column_eq N_ENS n\".\n\n"; throw(1); } @@ -3099,12 +3116,12 @@ void aggr_orank_lines(LineDataFile &f, STATAnalysisJob &job, m[key].ens_pd.v_na.add(n_valid); // Derive ensemble from climo mean and standard deviation - cur_clm = derive_climo_cdf_inv(m[key].ens_pd.cdf_info, - cur.climo_mean, cur.climo_stdev); + derive_climo_vals(m[key].ens_pd.cdf_info, + cur.climo_mean, cur.climo_stdev, climo_vals); // Store empirical CRPS stats m[key].ens_pd.crps_emp_na.add(compute_crps_emp(cur.obs, cur.ens_na)); - m[key].ens_pd.crpscl_emp_na.add(compute_crps_emp(cur.obs, cur_clm)); + m[key].ens_pd.crpscl_emp_na.add(compute_crps_emp(cur.obs, climo_vals)); // Store Gaussian CRPS stats m[key].ens_pd.crps_gaus_na.add(compute_crps_gaus(cur.obs, cur.ens_mean, cur.spread)); diff --git a/met/src/tools/core/stat_analysis/aggr_stat_line.h b/met/src/tools/core/stat_analysis/aggr_stat_line.h index 2e6ab5b72e..e8ed7809f9 100644 --- a/met/src/tools/core/stat_analysis/aggr_stat_line.h +++ b/met/src/tools/core/stat_analysis/aggr_stat_line.h @@ -152,6 +152,7 @@ struct AggrENSInfo { StatHdrInfo hdr; PairDataEnsemble ens_pd; NumArray me_na, mse_na, me_oerr_na, mse_oerr_na; + void clear(); }; struct AggrRPSInfo { diff --git a/met/src/tools/core/stat_analysis/parse_stat_line.cc b/met/src/tools/core/stat_analysis/parse_stat_line.cc index 2f79ae4c3a..14bf981ea6 100644 --- a/met/src/tools/core/stat_analysis/parse_stat_line.cc +++ b/met/src/tools/core/stat_analysis/parse_stat_line.cc @@ -461,8 +461,7 @@ void parse_relp_line(STATLine &l, RELPData &r_data) { //////////////////////////////////////////////////////////////////////// void parse_orank_line(STATLine &l, ORANKData &o_data) { - int i; - char col_str[max_str_len]; + int i, ens1; o_data.total = atoi(l.get_item("TOTAL")); o_data.index = atoi(l.get_item("INDEX")); @@ -480,10 +479,10 @@ void parse_orank_line(STATLine &l, ORANKData &o_data) { o_data.n_ens = atoi(l.get_item("N_ENS")); // Parse out ENS_i - o_data.ens_na.clear(); + o_data.ens_na.erase(); + ens1 = l.get_offset("ENS_1"); for(i=0; i + + + + echo "&DATA_DIR_MODEL;/grib1/arw-fer-gep1/arw-fer-gep1_2012040912_F024.grib \ + &DATA_DIR_MODEL;/grib1/arw-fer-gep5/arw-fer-gep5_2012040912_F024.grib \ + &DATA_DIR_MODEL;/grib1/arw-sch-gep2/arw-sch-gep2_2012040912_F024.grib \ + &DATA_DIR_MODEL;/grib1/arw-sch-gep6/arw-sch-gep6_2012040912_F024.grib \ + &DATA_DIR_MODEL;/grib1/arw-tom-gep3/arw-tom-gep3_2012040912_F024.grib \ + &DATA_DIR_MODEL;/grib1/arw-tom-gep7/arw-tom-gep7_2012040912_F024.grib" \ + > &OUTPUT_DIR;/climatology/ensemble_stat_input_file_list; \ + &MET_BIN;/ensemble_stat + + OUTPUT_PREFIX ONE_CDF_BIN + CLIMO_MEAN_FILE_LIST "&DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg/cmean_1d.19590410" + + \ + &OUTPUT_DIR;/climatology/ensemble_stat_input_file_list \ + &CONFIG_DIR;/EnsembleStatConfig_one_cdf_bin \ + -point_obs &OUTPUT_DIR;/pb2nc/ndas.20120410.t12z.prepbufr.tm00.nc \ + -grid_obs &DATA_DIR_OBS;/laps/laps_2012041012_F000.grib \ + -outdir &OUTPUT_DIR;/climatology + + + &OUTPUT_DIR;/climatology/ensemble_stat_ONE_CDF_BIN_20120410_120000V.stat + &OUTPUT_DIR;/climatology/ensemble_stat_ONE_CDF_BIN_20120410_120000V_ecnt.txt + &OUTPUT_DIR;/climatology/ensemble_stat_ONE_CDF_BIN_20120410_120000V_ens.nc + + + From 40b57af2e2124ae9bec11181cf2b2452171482ec Mon Sep 17 00:00:00 2001 From: johnhg Date: Tue, 2 Mar 2021 16:09:21 -0700 Subject: [PATCH 45/86] Per #1691, add met-10.0.0-beta4 release notes. (#1692) --- met/docs/Users_Guide/release-notes.rst | 55 +++++++++++++++++++++++++- met/docs/conf.py | 2 +- met/docs/version | 2 +- 3 files changed, 56 insertions(+), 3 deletions(-) diff --git a/met/docs/Users_Guide/release-notes.rst b/met/docs/Users_Guide/release-notes.rst index 679c9e7fbf..791d657cfc 100644 --- a/met/docs/Users_Guide/release-notes.rst +++ b/met/docs/Users_Guide/release-notes.rst @@ -2,11 +2,64 @@ MET release notes _________________ When applicable, release notes are followed by the GitHub issue number which -describes the bugfix, enhancement, or new feature: `MET Git-Hub issues. `_ +describes the bugfix, enhancement, or new feature: `MET GitHub issues. `_ Version |version| release notes (|release_date|) ------------------------------------------------ +Version `10.0.0-beta4 `_ release notes (20210302) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* Bugfixes: + + * Fix the set_attr_accum option to set the accumulation time instead of the lead time (`#1646 `_). + * Correct the time offset for tests in unit_plot_data_plane.xml (`#1677 `_). + +* Repository and build: + + * Enhance the sample plotting R-script to read output from different versions of MET (`#1653 `_). + +* Library code: + + * Miscellaneous: + + * Update GRIB1/2 table entries for the MXUPHL, MAXREF, MAXUVV, and MAXDVV variables (`#1658 `_). + * Update the Air Force GRIB tables to reflect current AF usage (`#1519 `_). + * Enhance the DataLine::get_item() error message to include the file name, line number, and column (`#1429 `_). + + * NetCDF library: + + * Add support for the NetCDF-CF conventions time bounds option (`#1657 `_). + * Error out when reading CF-compliant NetCDF data with incomplete grid definition (`#1454 `_). + * Reformat and simplify the magic_str() printed for NetCDF data files (`#1655 `_). + + * Statistics computations: + + * Add support for the Hersbach CRPS algorithm by add new columns to the ECNT line type (`#1450 `_). + * Enhance MET to derive the Hersbach CRPSCL_EMP and CRPSS_EMP statistics from a single deterministic reference model (`#1685 `_). + * Correct the climatological CRPS computation to match the NOAA/EMC VSDB method (`#1451 `_). + * Modify the climatological Brier Score computation to match the NOAA/EMC VSDB method (`#1684 `_). + +* Application code: + + * ASCII2NC and Point2Grid: + + * Enhance ascii2nc and point2grid to gracefully process zero input observations rather than erroring out (`#1630 `_). + + * Point-Stat Tool: + + * Enhance the validation of masking regions to check for non-unique masking region names (`#1439 `_). + * Print the Point-Stat rejection code reason count log messages at verbosity level 2 for zero matched pairs (`#1644 `_). + * Add detailed log messages to Point-Stat when discarding observations (`#1588 `_). + + * Stat-Analysis Tool: + + * Add -fcst_init_inc/_exc and -fcst_valid_inc/_exc job command filtering options to Stat-Analysis (`#1135 `_). + + * MODE Tool: + + * Update the MODE AREA_RATIO output column to list the forecast area divided by the observation area (`#1643 `_). + Version `10.0.0-beta3 `_ release notes (20210127) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/met/docs/conf.py b/met/docs/conf.py index 15b76ffc0b..efa6f948c9 100644 --- a/met/docs/conf.py +++ b/met/docs/conf.py @@ -24,7 +24,7 @@ verinfo = version release = f'{version}' release_year = '2021' -release_date = f'{release_year}0127' +release_date = f'{release_year}0302' copyright = f'{release_year}, {author}' # -- General configuration --------------------------------------------------- diff --git a/met/docs/version b/met/docs/version index 16c1efe0d4..2e6b7f7038 100644 --- a/met/docs/version +++ b/met/docs/version @@ -1 +1 @@ -10.0.0-beta3 +10.0.0-beta4 From 23dc482ac49af3fef1103ef46585c488cf5b8f6d Mon Sep 17 00:00:00 2001 From: "Julie.Prestopnik" Date: Wed, 3 Mar 2021 10:11:09 -0700 Subject: [PATCH 46/86] Updated Python documentation --- met/docs/Users_Guide/appendixF.rst | 6 +++--- met/docs/requirements.txt | 11 +++++++++++ 2 files changed, 14 insertions(+), 3 deletions(-) create mode 100644 met/docs/requirements.txt diff --git a/met/docs/Users_Guide/appendixF.rst b/met/docs/Users_Guide/appendixF.rst index aa4d1f84aa..e122d6a560 100644 --- a/met/docs/Users_Guide/appendixF.rst +++ b/met/docs/Users_Guide/appendixF.rst @@ -13,11 +13,11 @@ ________________________ In order to use Python embedding, the user's local Python installation must have the C-language Python header files and libraries. Sometimes when Python is installed locally, these header files and libraries are deleted at the end of the installation process, leaving only the binary executable and run-time shared object files. But the Python header files and libraries must be present to compile support in MET for Python embedding. Assuming the requisite Python files are present, and that Python embedding is enabled when building MET (which is done by passing the **--enable-python** option to the **configure** command line), the MET C++ code will use these in the compilation process to link directly to the Python libraries. -In addition to the **configure** option mentioned above, two variables, **MET_PYTHON_CC** and **MET_PYTHON_LD**, must also be set for the configuration process. These may either be set as environment variables or as command line options to **configure**. These constants are passed as compiler command line options when building MET to enable the compiler to find the requisite Python header files and libraries in the user's local filesystem. Fortunately, Python provides a way to set these variables properly. This frees the user from the necessity of having any expert knowledge of the compiling and linking process. Along with the **Python** executable, there should be another executable called **python-config**, whose output can be used to set these environment variables as follows: +In addition to the **configure** option mentioned above, two variables, **MET_PYTHON_CC** and **MET_PYTHON_LD**, must also be set for the configuration process. These may either be set as environment variables or as command line options to **configure**. These constants are passed as compiler command line options when building MET to enable the compiler to find the requisite Python header files and libraries in the user's local filesystem. Fortunately, Python provides a way to set these variables properly. This frees the user from the necessity of having any expert knowledge of the compiling and linking process. Along with the **Python** executable, there should be another executable called **python3-config**, whose output can be used to set these environment variables as follows: -• On the command line, run “**python-config --cflags**”. Set the value of **MET_PYTHON_CC** to the output of that command. +• On the command line, run “**python3-config --cflags**”. Set the value of **MET_PYTHON_CC** to the output of that command. -• Again on the command line, run “**python-config --ldflags**”. Set the value of **MET_PYTHON_LD** to the output of that command. +• Again on the command line, run “**python3-config --ldflags**”. Set the value of **MET_PYTHON_LD** to the output of that command. Make sure that these are set as environment variables or that you have included them on the command line prior to running **configure**. diff --git a/met/docs/requirements.txt b/met/docs/requirements.txt new file mode 100644 index 0000000000..f6bdb82841 --- /dev/null +++ b/met/docs/requirements.txt @@ -0,0 +1,11 @@ +sphinx +sphinx-gallery +sphinx-rtd-theme +sphinxcontrib-applehelp +sphinxcontrib-bibtex +sphinxcontrib-devhelp +sphinxcontrib-htmlhelp +sphinxcontrib-jsmath +sphinxcontrib-qthelp +sphinxcontrib-serializinghtml + From 9c9c54cef62a8e726ac3b52ee911a0684d693a6b Mon Sep 17 00:00:00 2001 From: John Halley Gotway Date: Thu, 4 Mar 2021 16:11:13 -0700 Subject: [PATCH 47/86] Per #1694, add VarInfo::magic_str_attr() to construct a field summary string from the name_attr() and level_attr() functions. --- met/src/libcode/vx_data2d/var_info.cc | 13 +++++++++++++ met/src/libcode/vx_data2d/var_info.h | 1 + 2 files changed, 14 insertions(+) diff --git a/met/src/libcode/vx_data2d/var_info.cc b/met/src/libcode/vx_data2d/var_info.cc index e72a761096..aa4449d2e5 100644 --- a/met/src/libcode/vx_data2d/var_info.cc +++ b/met/src/libcode/vx_data2d/var_info.cc @@ -434,6 +434,19 @@ void VarInfo::set_magic(const ConcatString &nstr, const ConcatString &lstr) { /////////////////////////////////////////////////////////////////////////////// +ConcatString VarInfo::magic_str_attr() const { + ConcatString mstr(name_attr()); + ConcatString lstr(level_attr()); + + // Format as {name}/{level} or {name}{level} + if(lstr.nonempty() && lstr[0] != '(') mstr << "/"; + mstr << lstr; + + return(mstr); +} + +/////////////////////////////////////////////////////////////////////////////// + void VarInfo::set_dict(Dictionary &dict) { ThreshArray ta; NumArray na; diff --git a/met/src/libcode/vx_data2d/var_info.h b/met/src/libcode/vx_data2d/var_info.h index b9d7c501af..f2801a49c0 100644 --- a/met/src/libcode/vx_data2d/var_info.h +++ b/met/src/libcode/vx_data2d/var_info.h @@ -135,6 +135,7 @@ class VarInfo RegridInfo regrid() const; + ConcatString magic_str_attr() const; ConcatString name_attr() const; ConcatString units_attr() const; ConcatString level_attr() const; From a16bebc778370626234142d8e177b17e8c2e8f4c Mon Sep 17 00:00:00 2001 From: John Halley Gotway Date: Thu, 4 Mar 2021 16:44:48 -0700 Subject: [PATCH 48/86] Per #1694, fixing 2 issues here. There was a bug in the computation of the max value. Had a less-than sign that should have been greater-than. Also, switch from tracking data by it's magic_str() to simply using VAR_i and VAR_j strings. We *could* have just used the i, j integers directly, but constructing the ij joint histogram integer could have been tricky since we start numbering with 0 instead of 1. i=0, j=1 would result in 01 which is the same as integer of 1. If we do want to switch to integers, we just need to make them 1-based and add +1 all over the place. --- met/src/tools/other/grid_diag/grid_diag.cc | 100 ++++++++++++--------- 1 file changed, 60 insertions(+), 40 deletions(-) diff --git a/met/src/tools/other/grid_diag/grid_diag.cc b/met/src/tools/other/grid_diag/grid_diag.cc index 2fe311e0b2..b9f4f2d12a 100644 --- a/met/src/tools/other/grid_diag/grid_diag.cc +++ b/met/src/tools/other/grid_diag/grid_diag.cc @@ -16,6 +16,7 @@ // ---- ---- ---- ----------- // 000 10/01/19 Fillmore New // 001 07/28/20 Halley Gotway Updates for #1391. +// 002 03/04/21 Halley Gotway Bugfix #1694. // //////////////////////////////////////////////////////////////////////// @@ -228,6 +229,7 @@ void process_series(void) { StringArray *cur_files; GrdFileType *cur_ftype; Grid cur_grid; + ConcatString i_var_str, j_var_str, ij_var_str; // List the lengths of the series options mlog << Debug(1) @@ -245,6 +247,8 @@ void process_series(void) { // Process the 1d histograms for(int i_var=0; i_varmagic_str() + << "Reading field " << data_info->magic_str_attr() << " data from file: " << (*cur_files)[i_series] << "\n"; @@ -268,7 +272,7 @@ void process_series(void) { // Regrid, if necessary if(!(cur_grid == grid)) { mlog << Debug(2) - << "Regridding field " << data_info->magic_str() + << "Regridding field " << data_info->magic_str_attr() << " to the verification grid.\n"; data_dp[i_var] = met_regrid(data_dp[i_var], cur_grid, grid, @@ -311,38 +315,40 @@ void process_series(void) { if(is_bad_data(var_mins[i_var]) || min < var_mins[i_var]) { var_mins[i_var] = min; } - if(is_bad_data(var_maxs[i_var]) || max < var_maxs[i_var]) { + if(is_bad_data(var_maxs[i_var]) || max > var_maxs[i_var]) { var_maxs[i_var] = max; } - + // Update partial sums - update_pdf(bin_mins[data_info->magic_str()][0], - bin_deltas[data_info->magic_str()], - histograms[data_info->magic_str()], + update_pdf(bin_mins[i_var_str][0], + bin_deltas[i_var_str], + histograms[i_var_str], data_dp[i_var], conf_info.mask_area); } // end for i_var // Process the 2d joint histograms for(int i_var=0; i_varmagic_str(); - joint_str.add("_"); - joint_str.add(joint_info->magic_str()); + ij_var_str << cs_erase << i_var_str << "_" << j_var_str; // Update joint partial sums update_joint_pdf(data_info->n_bins(), joint_info->n_bins(), - bin_mins[data_info->magic_str()][0], - bin_mins[joint_info->magic_str()][0], - bin_deltas[data_info->magic_str()], - bin_deltas[joint_info->magic_str()], - joint_histograms[joint_str], + bin_mins[i_var_str][0], + bin_mins[j_var_str][0], + bin_deltas[i_var_str], + bin_deltas[j_var_str], + joint_histograms[ij_var_str], data_dp[i_var], data_dp[j_var], conf_info.mask_area); } // end for j_var @@ -355,7 +361,7 @@ void process_series(void) { VarInfo *data_info = conf_info.data_info[i_var]; mlog << Debug(2) - << "Processed " << data_info->magic_str() + << "Processed " << data_info->magic_str_attr() << " data with range (" << var_mins[i_var] << ", " << var_maxs[i_var] << ") into bins with range (" << data_info->range()[0] << ", " @@ -364,7 +370,7 @@ void process_series(void) { if(var_mins[i_var] < data_info->range()[0] || var_maxs[i_var] > data_info->range()[1]) { mlog << Warning << "\nprocess_series() -> " - << "the range of the " << data_info->magic_str() + << "the range of the " << data_info->magic_str_attr() << " data (" << var_mins[i_var] << ", " << var_maxs[i_var] << ") falls outside the configuration file range (" << data_info->range()[0] << ", " @@ -378,9 +384,12 @@ void process_series(void) { //////////////////////////////////////////////////////////////////////// void setup_histograms(void) { + ConcatString i_var_str; for(int i_var=0; i_varmagic_str()] = bin_min; - bin_maxs[data_info->magic_str()] = bin_max; - bin_mids[data_info->magic_str()] = bin_mid; - bin_deltas[data_info->magic_str()] = delta; + bin_mins[i_var_str] = bin_min; + bin_maxs[i_var_str] = bin_max; + bin_mids[i_var_str] = bin_mid; + bin_deltas[i_var_str] = delta; // Initialize histograms mlog << Debug(2) - << "Initializing " << data_info->magic_str() + << "Initializing " << data_info->magic_str_attr() << " histogram with " << n_bins << " bins from " << min << " to " << max << ".\n"; - histograms[data_info->magic_str()] = vector(); - init_pdf(n_bins, histograms[data_info->magic_str()]); + histograms[i_var_str] = vector(); + init_pdf(n_bins, histograms[i_var_str]); } // for i_var } //////////////////////////////////////////////////////////////////////// void setup_joint_histograms(void) { + ConcatString i_var_str, j_var_str, ij_var_str; for(int i_var=0; i_varn_bins(); for(int j_var=i_var+1; j_varn_bins(); - ConcatString joint_str = data_info->magic_str(); - joint_str.add("_"); - joint_str.add(joint_info->magic_str()); + ij_var_str << cs_erase << i_var_str << "_" << j_var_str; + mlog << Debug(2) - << "Initializing " << joint_str << " joint histogram with " - << n_bins << " x " << n_joint_bins << " bins.\n"; - joint_histograms[joint_str] = vector(); + << "Initializing " << data_info->magic_str_attr() << "_" + << joint_info->magic_str_attr() << " joint histogram with " + << n_bins << " x " << n_joint_bins << " bins.\n"; + joint_histograms[ij_var_str] = vector(); init_joint_pdf(n_bins, n_joint_bins, - joint_histograms[joint_str]); + joint_histograms[ij_var_str]); } // end for j_var } // end for i_var } @@ -453,7 +467,7 @@ void setup_joint_histograms(void) { //////////////////////////////////////////////////////////////////////// void setup_nc_file(void) { - ConcatString cs; + ConcatString cs, i_var_str; // Create NetCDF file nc_out = open_ncfile(out_file.c_str(), true); @@ -494,6 +508,8 @@ void setup_nc_file(void) { for(int i_var=0; i_var < conf_info.get_n_data(); i_var++) { + i_var_str << cs_erase << "VAR" << i_var; + VarInfo *data_info = conf_info.data_info[i_var]; // Set variable NetCDF name @@ -534,9 +550,9 @@ void setup_nc_file(void) { add_var_att_local(&var_mid, "units", data_info->units_attr()); // Write bin values - var_min.putVar(bin_mins[data_info->magic_str()].data()); - var_max.putVar(bin_maxs[data_info->magic_str()].data()); - var_mid.putVar(bin_mids[data_info->magic_str()].data()); + var_min.putVar(bin_mins[i_var_str].data()); + var_max.putVar(bin_maxs[i_var_str].data()); + var_mid.putVar(bin_mids[i_var_str].data()); } // Define histograms @@ -625,13 +641,16 @@ void add_var_att_local(NcVar *var, const char *att_name, //////////////////////////////////////////////////////////////////////// void write_histograms(void) { + ConcatString i_var_str; for(int i_var=0; i_var < conf_info.get_n_data(); i_var++) { + i_var_str << cs_erase << "VAR" << i_var; + VarInfo *data_info = conf_info.data_info[i_var]; NcVar hist_var = hist_vars[i_var]; - int *hist = histograms[data_info->magic_str()].data(); + int *hist = histograms[i_var_str].data(); hist_var.putVar(hist); } @@ -642,6 +661,7 @@ void write_histograms(void) { void write_joint_histograms(void) { vector offsets; vector counts; + ConcatString var_cs; int i_hist=0; for(int i_var=0; i_varmagic_str(); - joint_str.add("_"); - joint_str.add(joint_info->magic_str()); + var_cs << cs_erase + << "VAR" << i_var << "_" + << "VAR" << j_var; - int *hist = joint_histograms[joint_str].data(); + int *hist = joint_histograms[var_cs].data(); offsets.clear(); counts.clear(); From 21e3eb7d5f046d353acc32db942d16554f61f1fc Mon Sep 17 00:00:00 2001 From: John Halley Gotway Date: Thu, 4 Mar 2021 17:26:34 -0700 Subject: [PATCH 49/86] Per #1694, just switching to consistent variable name. --- met/src/tools/other/grid_diag/grid_diag.cc | 66 +++++++++++----------- 1 file changed, 33 insertions(+), 33 deletions(-) diff --git a/met/src/tools/other/grid_diag/grid_diag.cc b/met/src/tools/other/grid_diag/grid_diag.cc index b9f4f2d12a..7cc2f6c0ef 100644 --- a/met/src/tools/other/grid_diag/grid_diag.cc +++ b/met/src/tools/other/grid_diag/grid_diag.cc @@ -318,7 +318,7 @@ void process_series(void) { if(is_bad_data(var_maxs[i_var]) || max > var_maxs[i_var]) { var_maxs[i_var] = max; } - + // Update partial sums update_pdf(bin_mins[i_var_str][0], bin_deltas[i_var_str], @@ -510,34 +510,34 @@ void setup_nc_file(void) { i_var_str << cs_erase << "VAR" << i_var; - VarInfo *data_info = conf_info.data_info[i_var]; + VarInfo *data_info = conf_info.data_info[i_var]; + + // Set variable NetCDF name + ConcatString var_name = data_info->name_attr(); + var_name.add("_"); + var_name.add(data_info->level_attr()); - // Set variable NetCDF name - ConcatString var_name = data_info->name_attr(); - var_name.add("_"); - var_name.add(data_info->level_attr()); - - // Define histogram dimensions - NcDim var_dim = add_dim(nc_out, var_name, - (long) data_info->n_bins()); - data_var_dims.push_back(var_dim); - - // Define histogram bins - ConcatString var_min_name = var_name; - ConcatString var_max_name = var_name; - ConcatString var_mid_name = var_name; - var_min_name.add("_min"); - var_max_name.add("_max"); - var_mid_name.add("_mid"); - NcVar var_min = add_var(nc_out, var_min_name, ncFloat, var_dim, - deflate_level); - NcVar var_max = add_var(nc_out, var_max_name, ncFloat, var_dim, - deflate_level); - NcVar var_mid = add_var(nc_out, var_mid_name, ncFloat, var_dim, - deflate_level); - - // Add variable attributes - cs << cs_erase << "Minimum value of " << var_name << " bin"; + // Define histogram dimensions + NcDim var_dim = add_dim(nc_out, var_name, + (long) data_info->n_bins()); + data_var_dims.push_back(var_dim); + + // Define histogram bins + ConcatString var_min_name = var_name; + ConcatString var_max_name = var_name; + ConcatString var_mid_name = var_name; + var_min_name.add("_min"); + var_max_name.add("_max"); + var_mid_name.add("_mid"); + NcVar var_min = add_var(nc_out, var_min_name, ncFloat, var_dim, + deflate_level); + NcVar var_max = add_var(nc_out, var_max_name, ncFloat, var_dim, + deflate_level); + NcVar var_mid = add_var(nc_out, var_mid_name, ncFloat, var_dim, + deflate_level); + + // Add variable attributes + cs << cs_erase << "Minimum value of " << var_name << " bin"; add_var_att_local(&var_min, "long_name", cs); add_var_att_local(&var_min, "units", data_info->units_attr()); @@ -661,7 +661,7 @@ void write_histograms(void) { void write_joint_histograms(void) { vector offsets; vector counts; - ConcatString var_cs; + ConcatString ij_var_str; int i_hist=0; for(int i_var=0; i_var Date: Thu, 4 Mar 2021 17:29:39 -0700 Subject: [PATCH 50/86] Just consistent spacing. --- met/src/tools/other/grid_diag/grid_diag.cc | 24 +++++++++++----------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/met/src/tools/other/grid_diag/grid_diag.cc b/met/src/tools/other/grid_diag/grid_diag.cc index 7cc2f6c0ef..6199ba408f 100644 --- a/met/src/tools/other/grid_diag/grid_diag.cc +++ b/met/src/tools/other/grid_diag/grid_diag.cc @@ -440,27 +440,27 @@ void setup_joint_histograms(void) { i_var_str << cs_erase << "VAR" << i_var; - VarInfo *data_info = conf_info.data_info[i_var]; - int n_bins = data_info->n_bins(); + VarInfo *data_info = conf_info.data_info[i_var]; + int n_bins = data_info->n_bins(); - for(int j_var=i_var+1; j_varn_bins(); + VarInfo *joint_info = conf_info.data_info[j_var]; + int n_joint_bins = joint_info->n_bins(); ij_var_str << cs_erase << i_var_str << "_" << j_var_str; - mlog << Debug(2) - << "Initializing " << data_info->magic_str_attr() << "_" + mlog << Debug(2) + << "Initializing " << data_info->magic_str_attr() << "_" << joint_info->magic_str_attr() << " joint histogram with " - << n_bins << " x " << n_joint_bins << " bins.\n"; - joint_histograms[ij_var_str] = vector(); + << n_bins << " x " << n_joint_bins << " bins.\n"; + joint_histograms[ij_var_str] = vector(); - init_joint_pdf(n_bins, n_joint_bins, - joint_histograms[ij_var_str]); - } // end for j_var + init_joint_pdf(n_bins, n_joint_bins, + joint_histograms[ij_var_str]); + } // end for j_var } // end for i_var } From f21b2e62a9791525db92a1ac61738762ba767ff4 Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Fri, 5 Mar 2021 09:22:54 -0700 Subject: [PATCH 51/86] Added python3_script::import_read_tmp_ascii. --- .../vx_python3_utils/python3_script.cc | 34 +++++++++++++++---- .../libcode/vx_python3_utils/python3_script.h | 12 ++++++- .../tools/other/ascii2nc/python_handler.cc | 2 +- 3 files changed, 40 insertions(+), 8 deletions(-) diff --git a/met/src/libcode/vx_python3_utils/python3_script.cc b/met/src/libcode/vx_python3_utils/python3_script.cc index 9a3c2ccaf9..ffef0ef99d 100644 --- a/met/src/libcode/vx_python3_utils/python3_script.cc +++ b/met/src/libcode/vx_python3_utils/python3_script.cc @@ -77,8 +77,12 @@ void Python3_Script::clear() Module = 0; +ModuleAscii = 0; + Dict = 0; +DictAscii = 0; + Script_Filename.clear(); @@ -242,7 +246,7 @@ return; //////////////////////////////////////////////////////////////////////// -void Python3_Script::import_read_tmp_ascii_py(void) const +void Python3_Script::import_read_tmp_ascii_py(void) { @@ -266,11 +270,30 @@ run_python_string(command.text()); mlog << Debug(2) << "Importing " << module << "\n"; -command << cs_erase << "import read_tmp_ascii"; +ConcatString path = "read_tmp_ascii"; -mlog << Debug(3) << command << "\n"; +ModuleAscii = PyImport_ImportModule(path.text()); -run_python_string(command.text()); +if ( ! ModuleAscii ) { + + PyErr_Print(); + mlog << Error << "\nPython3_Script::Python3_Script(const char *) -> " + << "unable to import module \"" << path << "\"\n\n"; + + Py_Finalize(); + + exit ( 1 ); + +} + +DictAscii = PyModule_GetDict (ModuleAscii); + + // + // done + // + +fflush(stdout); +fflush(stderr); } @@ -293,8 +316,7 @@ mlog << Debug(3) << command << "\n"; PyErr_Clear(); -PyObject * pobj; - +// PyObject * pobj; // pobj = run(command.text()); run_python_string(command.text()); diff --git a/met/src/libcode/vx_python3_utils/python3_script.h b/met/src/libcode/vx_python3_utils/python3_script.h index 5a765aeabb..382bad9b58 100644 --- a/met/src/libcode/vx_python3_utils/python3_script.h +++ b/met/src/libcode/vx_python3_utils/python3_script.h @@ -41,6 +41,10 @@ class Python3_Script { PyObject * Dict; // script dictionary, not allocated + PyObject * ModuleAscii; + + PyObject * DictAscii; + ConcatString Script_Filename; @@ -62,6 +66,8 @@ class Python3_Script { PyObject * module(); PyObject * dict(); + PyObject * module_ascii(); + PyObject * dict_ascii(); // // do stuff @@ -77,7 +83,7 @@ class Python3_Script { void read_pickle (const char * variable_name, const char * pickle_filename) const; - void import_read_tmp_ascii_py (void) const; + void import_read_tmp_ascii_py (void); void read_tmp_ascii (const char * tmp_filename) const; }; @@ -90,6 +96,10 @@ inline PyObject * Python3_Script::module() { return ( Module ); } inline PyObject * Python3_Script::dict() { return ( Dict ); } +inline PyObject * Python3_Script::module_ascii() { return ( ModuleAscii ); } + +inline PyObject * Python3_Script::dict_ascii() { return ( DictAscii ); } + inline ConcatString Python3_Script::filename() const { return ( Script_Filename ); } diff --git a/met/src/tools/other/ascii2nc/python_handler.cc b/met/src/tools/other/ascii2nc/python_handler.cc index 35e697045e..15aba246b8 100644 --- a/met/src/tools/other/ascii2nc/python_handler.cc +++ b/met/src/tools/other/ascii2nc/python_handler.cc @@ -386,7 +386,7 @@ script.import_read_tmp_ascii_py(); script.read_pickle(list_name, pickle_path.text()); -script.read_tmp_ascii(tmp_ascii_path.text()); +// script.read_tmp_ascii(tmp_ascii_path.text()); PyObject * obj = script.lookup(list_name); From 31ae2e459821b066b23affef003d9fb48f681877 Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Fri, 5 Mar 2021 09:45:15 -0700 Subject: [PATCH 52/86] Restored read_tmp_ascii call. --- met/src/libcode/vx_python3_utils/python3_script.cc | 10 +++++----- met/src/tools/other/ascii2nc/python_handler.cc | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/met/src/libcode/vx_python3_utils/python3_script.cc b/met/src/libcode/vx_python3_utils/python3_script.cc index ffef0ef99d..b4f20f1bcd 100644 --- a/met/src/libcode/vx_python3_utils/python3_script.cc +++ b/met/src/libcode/vx_python3_utils/python3_script.cc @@ -286,7 +286,7 @@ if ( ! ModuleAscii ) { } -DictAscii = PyModule_GetDict (ModuleAscii); +DictAscii = PyModule_GetDict(ModuleAscii); // // done @@ -308,7 +308,7 @@ mlog << Debug(2) << "Reading temporary ascii file: " ConcatString command; -command << "read_tmp_ascii.read_tmp_ascii(\"" +command << "read_tmp_ascii(\"" << tmp_filename << "\")"; @@ -316,9 +316,9 @@ mlog << Debug(3) << command << "\n"; PyErr_Clear(); -// PyObject * pobj; -// pobj = run(command.text()); -run_python_string(command.text()); +PyObject * pobj; + +pobj = PyRun_String(command.text(), Py_file_input, DictAscii, DictAscii); if ( PyErr_Occurred() ) { diff --git a/met/src/tools/other/ascii2nc/python_handler.cc b/met/src/tools/other/ascii2nc/python_handler.cc index 15aba246b8..35e697045e 100644 --- a/met/src/tools/other/ascii2nc/python_handler.cc +++ b/met/src/tools/other/ascii2nc/python_handler.cc @@ -386,7 +386,7 @@ script.import_read_tmp_ascii_py(); script.read_pickle(list_name, pickle_path.text()); -// script.read_tmp_ascii(tmp_ascii_path.text()); +script.read_tmp_ascii(tmp_ascii_path.text()); PyObject * obj = script.lookup(list_name); From f8becb96f090710003d1f0bbf88668dbc54e6936 Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Fri, 5 Mar 2021 09:52:36 -0700 Subject: [PATCH 53/86] Added lookup into ascii module. --- met/src/libcode/vx_python3_utils/python3_script.cc | 13 +++++++++++++ met/src/libcode/vx_python3_utils/python3_script.h | 2 ++ met/src/tools/other/ascii2nc/python_handler.cc | 2 ++ 3 files changed, 17 insertions(+) diff --git a/met/src/libcode/vx_python3_utils/python3_script.cc b/met/src/libcode/vx_python3_utils/python3_script.cc index b4f20f1bcd..0373fb5ec6 100644 --- a/met/src/libcode/vx_python3_utils/python3_script.cc +++ b/met/src/libcode/vx_python3_utils/python3_script.cc @@ -165,6 +165,19 @@ return ( var ); } +//////////////////////////////////////////////////////////////////////// + +PyObject * Python3_Script::lookup_ascii(const char * name) const + +{ + +PyObject * var = 0; + +var = PyDict_GetItemString (DictAscii, name); + +return ( var ); + +} //////////////////////////////////////////////////////////////////////// diff --git a/met/src/libcode/vx_python3_utils/python3_script.h b/met/src/libcode/vx_python3_utils/python3_script.h index 382bad9b58..42d3134492 100644 --- a/met/src/libcode/vx_python3_utils/python3_script.h +++ b/met/src/libcode/vx_python3_utils/python3_script.h @@ -79,6 +79,8 @@ class Python3_Script { PyObject * lookup(const char * name) const; + PyObject * lookup_ascii(const char * name) const; + PyObject * run(const char * command) const; // runs a command in the namespace of the script void read_pickle (const char * variable_name, const char * pickle_filename) const; diff --git a/met/src/tools/other/ascii2nc/python_handler.cc b/met/src/tools/other/ascii2nc/python_handler.cc index 35e697045e..fac3effc11 100644 --- a/met/src/tools/other/ascii2nc/python_handler.cc +++ b/met/src/tools/other/ascii2nc/python_handler.cc @@ -390,6 +390,8 @@ script.read_tmp_ascii(tmp_ascii_path.text()); PyObject * obj = script.lookup(list_name); +// PyObject * obj = script.lookup_ascii(list_name); + if ( ! PyList_Check(obj) ) { mlog << Error << "\nPythonHandler::do_pickle() -> " From 922484461f32a35b15591817a9c4e06539bb9ff9 Mon Sep 17 00:00:00 2001 From: "Julie.Prestopnik" Date: Fri, 5 Mar 2021 15:49:53 -0700 Subject: [PATCH 54/86] Adding files for ReadTheDocs --- met/docs/requirements.txt | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/met/docs/requirements.txt b/met/docs/requirements.txt index f6bdb82841..b323295f77 100644 --- a/met/docs/requirements.txt +++ b/met/docs/requirements.txt @@ -1,11 +1,11 @@ -sphinx -sphinx-gallery -sphinx-rtd-theme -sphinxcontrib-applehelp +sphinx==2.4.4 +sphinx-gallery==0.7.0 +sphinx-rtd-theme==0.4.3 +sphinxcontrib-applehelp==1.0.2 sphinxcontrib-bibtex -sphinxcontrib-devhelp -sphinxcontrib-htmlhelp -sphinxcontrib-jsmath -sphinxcontrib-qthelp -sphinxcontrib-serializinghtml +sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-htmlhelp==1.0.3 +sphinxcontrib-jsmath==1.0.1 +sphinxcontrib-qthelp==1.0.3 +sphinxcontrib-serializinghtml==1.1.4 From 1b41a0aab645933595c1cd6d1240011ad23a8f7c Mon Sep 17 00:00:00 2001 From: "Julie.Prestopnik" Date: Fri, 5 Mar 2021 15:53:00 -0700 Subject: [PATCH 55/86] Adding .yaml file for ReadTheDocs --- .readthedocs.yaml | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 .readthedocs.yaml diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 0000000000..c6dfdc81d4 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,22 @@ +# .readthedocs.yaml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Build all formats (htmlzip, pdf, epub) +formats: all + +# Optionally set the version of Python and requirements required to build your +# docs +python: + version: 3.7 + install: + - requirements: docs/requirements.txt + +# Configuration for Sphinx documentation (this is the default documentation +# type) +sphinx: + builder: html + configuration: conf.py \ No newline at end of file From 8382b33bb8662876a62ea5e8c9b2788df87a70fd Mon Sep 17 00:00:00 2001 From: "Julie.Prestopnik" Date: Fri, 5 Mar 2021 16:07:20 -0700 Subject: [PATCH 56/86] Updated path to requirements.txt file --- .readthedocs.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.readthedocs.yaml b/.readthedocs.yaml index c6dfdc81d4..751fd6e5e6 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -13,7 +13,7 @@ formats: all python: version: 3.7 install: - - requirements: docs/requirements.txt + - requirements: met/docs/requirements.txt # Configuration for Sphinx documentation (this is the default documentation # type) From 0303f1f3740c59dee3a959ea7deaad32abaec90d Mon Sep 17 00:00:00 2001 From: "Julie.Prestopnik" Date: Fri, 5 Mar 2021 16:12:21 -0700 Subject: [PATCH 57/86] Updated path to conf.py file --- .readthedocs.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 751fd6e5e6..36014c884e 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -19,4 +19,4 @@ python: # type) sphinx: builder: html - configuration: conf.py \ No newline at end of file + configuration: met/docs/conf.py \ No newline at end of file From 99a63631a6828b6f0a37e4becd7820cd1511136c Mon Sep 17 00:00:00 2001 From: "Julie.Prestopnik" Date: Fri, 5 Mar 2021 16:35:47 -0700 Subject: [PATCH 58/86] Removing ReadTheDocs files and working in separate branch --- .readthedocs.yaml | 22 ---------------------- met/docs/requirements.txt | 11 ----------- 2 files changed, 33 deletions(-) delete mode 100644 .readthedocs.yaml delete mode 100644 met/docs/requirements.txt diff --git a/.readthedocs.yaml b/.readthedocs.yaml deleted file mode 100644 index 36014c884e..0000000000 --- a/.readthedocs.yaml +++ /dev/null @@ -1,22 +0,0 @@ -# .readthedocs.yaml -# Read the Docs configuration file -# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details - -# Required -version: 2 - -# Build all formats (htmlzip, pdf, epub) -formats: all - -# Optionally set the version of Python and requirements required to build your -# docs -python: - version: 3.7 - install: - - requirements: met/docs/requirements.txt - -# Configuration for Sphinx documentation (this is the default documentation -# type) -sphinx: - builder: html - configuration: met/docs/conf.py \ No newline at end of file diff --git a/met/docs/requirements.txt b/met/docs/requirements.txt deleted file mode 100644 index b323295f77..0000000000 --- a/met/docs/requirements.txt +++ /dev/null @@ -1,11 +0,0 @@ -sphinx==2.4.4 -sphinx-gallery==0.7.0 -sphinx-rtd-theme==0.4.3 -sphinxcontrib-applehelp==1.0.2 -sphinxcontrib-bibtex -sphinxcontrib-devhelp==1.0.2 -sphinxcontrib-htmlhelp==1.0.3 -sphinxcontrib-jsmath==1.0.1 -sphinxcontrib-qthelp==1.0.3 -sphinxcontrib-serializinghtml==1.1.4 - From b0c8813be10f6a17732eed393ecd0fc01aff8359 Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Sat, 6 Mar 2021 19:12:42 -0700 Subject: [PATCH 59/86] Return PyObject* from read_tmp_ascii. --- met/src/libcode/vx_python3_utils/python3_script.cc | 9 ++++++++- met/src/libcode/vx_python3_utils/python3_script.h | 2 +- met/src/tools/other/ascii2nc/python_handler.cc | 8 ++++---- 3 files changed, 13 insertions(+), 6 deletions(-) diff --git a/met/src/libcode/vx_python3_utils/python3_script.cc b/met/src/libcode/vx_python3_utils/python3_script.cc index 0373fb5ec6..7bd567ae2c 100644 --- a/met/src/libcode/vx_python3_utils/python3_script.cc +++ b/met/src/libcode/vx_python3_utils/python3_script.cc @@ -312,7 +312,7 @@ fflush(stderr); //////////////////////////////////////////////////////////////////////// -void Python3_Script::read_tmp_ascii(const char * tmp_filename) const +PyObject* Python3_Script::read_tmp_ascii(const char * tmp_filename) const { @@ -341,6 +341,13 @@ if ( PyErr_Occurred() ) { exit ( 1 ); } +PyTypeObject* type = pobj->ob_type; + +const char* p = type->tp_name; + +mlog << Debug(2) << "read_tmp_ascii return type: " << p << "\n"; + +return pobj; } //////////////////////////////////////////////////////////////////////// diff --git a/met/src/libcode/vx_python3_utils/python3_script.h b/met/src/libcode/vx_python3_utils/python3_script.h index 42d3134492..6930d226a5 100644 --- a/met/src/libcode/vx_python3_utils/python3_script.h +++ b/met/src/libcode/vx_python3_utils/python3_script.h @@ -87,7 +87,7 @@ class Python3_Script { void import_read_tmp_ascii_py (void); - void read_tmp_ascii (const char * tmp_filename) const; + PyObject * read_tmp_ascii (const char * tmp_filename) const; }; diff --git a/met/src/tools/other/ascii2nc/python_handler.cc b/met/src/tools/other/ascii2nc/python_handler.cc index fac3effc11..189a17ea13 100644 --- a/met/src/tools/other/ascii2nc/python_handler.cc +++ b/met/src/tools/other/ascii2nc/python_handler.cc @@ -153,7 +153,7 @@ if ( ! PyList_Check(obj) ) { mlog << Error << "\nPythonHandler::load_python_obs(PyObject *) -> " << "given object not a list!\n\n"; - exit ( 1 ); + // exit ( 1 ); } @@ -386,9 +386,9 @@ script.import_read_tmp_ascii_py(); script.read_pickle(list_name, pickle_path.text()); -script.read_tmp_ascii(tmp_ascii_path.text()); +PyObject * obj = script.read_tmp_ascii(tmp_ascii_path.text()); -PyObject * obj = script.lookup(list_name); +// PyObject * obj = script.lookup(list_name); // PyObject * obj = script.lookup_ascii(list_name); @@ -397,7 +397,7 @@ if ( ! PyList_Check(obj) ) { mlog << Error << "\nPythonHandler::do_pickle() -> " << "pickle object is not a list!\n\n"; - exit ( 1 ); + // exit ( 1 ); } From bd9ed77aabf08b300abff1a363535c6ddbad929f Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Sun, 7 Mar 2021 07:30:50 -0700 Subject: [PATCH 60/86] Put point_data in global namespace. --- met/data/wrappers/read_tmp_ascii.py | 8 ++++++-- met/src/tools/other/ascii2nc/python_handler.cc | 8 ++++---- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/met/data/wrappers/read_tmp_ascii.py b/met/data/wrappers/read_tmp_ascii.py index 126150b168..b4f4303044 100644 --- a/met/data/wrappers/read_tmp_ascii.py +++ b/met/data/wrappers/read_tmp_ascii.py @@ -18,6 +18,8 @@ import argparse +point_data = None + def read_tmp_ascii(filename): """ Arguments: @@ -26,13 +28,15 @@ def read_tmp_ascii(filename): Returns: (list of lists): point data """ + print('read_tmp_ascii:' + filename) f = open(filename, 'r') lines = f.readlines() f.close() - data = [eval(line.strip('\n')) for line in lines] + global point_data + point_data = [eval(line.strip('\n')) for line in lines] - return data + return point_data if __name__ == '__main__': """ diff --git a/met/src/tools/other/ascii2nc/python_handler.cc b/met/src/tools/other/ascii2nc/python_handler.cc index 189a17ea13..d1b410523f 100644 --- a/met/src/tools/other/ascii2nc/python_handler.cc +++ b/met/src/tools/other/ascii2nc/python_handler.cc @@ -153,7 +153,7 @@ if ( ! PyList_Check(obj) ) { mlog << Error << "\nPythonHandler::load_python_obs(PyObject *) -> " << "given object not a list!\n\n"; - // exit ( 1 ); + exit ( 1 ); } @@ -386,18 +386,18 @@ script.import_read_tmp_ascii_py(); script.read_pickle(list_name, pickle_path.text()); -PyObject * obj = script.read_tmp_ascii(tmp_ascii_path.text()); +PyObject * dobj = script.read_tmp_ascii(tmp_ascii_path.text()); // PyObject * obj = script.lookup(list_name); -// PyObject * obj = script.lookup_ascii(list_name); +PyObject * obj = script.lookup_ascii(list_name); if ( ! PyList_Check(obj) ) { mlog << Error << "\nPythonHandler::do_pickle() -> " << "pickle object is not a list!\n\n"; - // exit ( 1 ); + exit ( 1 ); } From b358bedd65b51432c02c88056170d14a3b026939 Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Sun, 7 Mar 2021 08:21:12 -0700 Subject: [PATCH 61/86] Remove temporary ascii file. --- met/data/wrappers/Makefile.am | 3 +- met/data/wrappers/write_tmp_point.py | 32 +++++++++++++++++++ met/src/tools/other/ascii2nc/ascii2nc.cc | 1 + .../tools/other/ascii2nc/python_handler.cc | 9 +++--- 4 files changed, 39 insertions(+), 6 deletions(-) create mode 100644 met/data/wrappers/write_tmp_point.py diff --git a/met/data/wrappers/Makefile.am b/met/data/wrappers/Makefile.am index a8f464313f..5061e51d51 100644 --- a/met/data/wrappers/Makefile.am +++ b/met/data/wrappers/Makefile.am @@ -27,7 +27,8 @@ wrappers_DATA = \ write_tmp_dataplane.py \ write_pickle_mpr.py \ read_tmp_ascii.py \ - write_pickle_point.py + write_pickle_point.py \ + write_tmp_point.py EXTRA_DIST = ${wrappers_DATA} diff --git a/met/data/wrappers/write_tmp_point.py b/met/data/wrappers/write_tmp_point.py new file mode 100644 index 0000000000..4bbd046122 --- /dev/null +++ b/met/data/wrappers/write_tmp_point.py @@ -0,0 +1,32 @@ +######################################################################## +# +# Adapted from a script provided by George McCabe +# Adapted by Randy Bullock +# +# usage: /path/to/python write_tmp_point.py \ +# tmp_ascii_output_filename .py +# +######################################################################## + +import os +import sys +import importlib.util + +print('Python Script:\t', sys.argv[0]) +print('User Command:\t', sys.argv[2:]) +print('Write Temporary Ascii:\t', sys.argv[1]) + +tmp_filename = sys.argv[1] + '.txt' + +pyembed_module_name = sys.argv[2] +sys.argv = sys.argv[2:] + +user_base = os.path.basename(pyembed_module_name).replace('.py','') + +spec = importlib.util.spec_from_file_location(user_base, pyembed_module_name) +met_in = importlib.util.module_from_spec(spec) +spec.loader.exec_module(met_in) + +f = open(tmp_filename, 'w') +for line in met_in.point_data: + f.write(str(line) + '\n') diff --git a/met/src/tools/other/ascii2nc/ascii2nc.cc b/met/src/tools/other/ascii2nc/ascii2nc.cc index 4ced5397b1..360329659c 100644 --- a/met/src/tools/other/ascii2nc/ascii2nc.cc +++ b/met/src/tools/other/ascii2nc/ascii2nc.cc @@ -43,6 +43,7 @@ // 015 03-20-19 Fillmore Add aeronetv2 and aeronetv3 options. // 016 01-30-20 Bullock Add python option. // 017 01-25-21 Halley Gotway MET #1630 Handle zero obs. +// 018 03-01-21 Fillmore Replace pickle files for temporary ascii. // //////////////////////////////////////////////////////////////////////// diff --git a/met/src/tools/other/ascii2nc/python_handler.cc b/met/src/tools/other/ascii2nc/python_handler.cc index d1b410523f..522e877bce 100644 --- a/met/src/tools/other/ascii2nc/python_handler.cc +++ b/met/src/tools/other/ascii2nc/python_handler.cc @@ -27,9 +27,8 @@ using namespace std; static const char generic_python_wrapper [] = "generic_python"; -static const char generic_pickle_wrapper [] = "generic_pickle"; -static const char write_pickle_wrapper [] = "MET_BASE/wrappers/write_pickle_point.py"; +static const char write_pickle_wrapper [] = "MET_BASE/wrappers/write_tmp_point.py"; static const char list_name [] = "point_data"; @@ -378,13 +377,13 @@ if ( status ) { ConcatString wrapper; -wrapper = generic_pickle_wrapper; +wrapper = generic_python_wrapper; Python3_Script script(wrapper.text()); script.import_read_tmp_ascii_py(); -script.read_pickle(list_name, pickle_path.text()); +// script.read_pickle(list_name, pickle_path.text()); PyObject * dobj = script.read_tmp_ascii(tmp_ascii_path.text()); @@ -407,7 +406,7 @@ load_python_obs(obj); // cleanup // -remove_temp_file(pickle_path); +remove_temp_file(tmp_ascii_path); // // done From 450617347eff5b10d4a992ed0835329857ee7291 Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Sun, 7 Mar 2021 08:33:16 -0700 Subject: [PATCH 62/86] Added tmp_ascii_path. --- met/data/wrappers/write_tmp_point.py | 2 +- .../tools/other/ascii2nc/python_handler.cc | 20 ++++++++----------- 2 files changed, 9 insertions(+), 13 deletions(-) diff --git a/met/data/wrappers/write_tmp_point.py b/met/data/wrappers/write_tmp_point.py index 4bbd046122..94f56cd3dd 100644 --- a/met/data/wrappers/write_tmp_point.py +++ b/met/data/wrappers/write_tmp_point.py @@ -16,7 +16,7 @@ print('User Command:\t', sys.argv[2:]) print('Write Temporary Ascii:\t', sys.argv[1]) -tmp_filename = sys.argv[1] + '.txt' +tmp_filename = sys.argv[1] pyembed_module_name = sys.argv[2] sys.argv = sys.argv[2:] diff --git a/met/src/tools/other/ascii2nc/python_handler.cc b/met/src/tools/other/ascii2nc/python_handler.cc index 522e877bce..4cdd1a69be 100644 --- a/met/src/tools/other/ascii2nc/python_handler.cc +++ b/met/src/tools/other/ascii2nc/python_handler.cc @@ -28,11 +28,11 @@ using namespace std; static const char generic_python_wrapper [] = "generic_python"; -static const char write_pickle_wrapper [] = "MET_BASE/wrappers/write_tmp_point.py"; +static const char write_tmp_ascii_wrapper[] = "MET_BASE/wrappers/write_tmp_point.py"; static const char list_name [] = "point_data"; -static const char pickle_base_name [] = "tmp_ascii2nc_pickle"; +static const char tmp_base_name [] = "tmp_ascii2nc"; //////////////////////////////////////////////////////////////////////// @@ -345,17 +345,17 @@ if ( ! tmp_dir ) tmp_dir = default_tmp_dir; path << cs_erase << tmp_dir << '/' - << pickle_base_name; + << tmp_base_name; -pickle_path = make_temp_file_name(path.text(), 0); +// pickle_path = make_temp_file_name(path.text(), 0); tmp_ascii_path = make_temp_file_name(path.text(), 0); tmp_ascii_path << ".txt"; command << cs_erase - << user_path_to_python << ' ' // user's path to python - << replace_path(write_pickle_wrapper) << ' ' // write_pickle.py - << pickle_path << ' ' // pickle output filename - << user_script_filename; // user's script name + << user_path_to_python << ' ' // user's path to python + << replace_path(write_tmp_ascii_wrapper) << ' ' // write_tmp_point.py + << tmp_ascii_path << ' ' // pickle output filename + << user_script_filename; // user's script name for (j=0; j Date: Sun, 7 Mar 2021 08:42:08 -0700 Subject: [PATCH 63/86] Removed read_obs_from_pickle. --- .../tools/other/ascii2nc/python_handler.cc | 22 +++++++++---------- met/src/tools/other/ascii2nc/python_handler.h | 10 ++++----- 2 files changed, 14 insertions(+), 18 deletions(-) diff --git a/met/src/tools/other/ascii2nc/python_handler.cc b/met/src/tools/other/ascii2nc/python_handler.cc index 4cdd1a69be..d894ab6c64 100644 --- a/met/src/tools/other/ascii2nc/python_handler.cc +++ b/met/src/tools/other/ascii2nc/python_handler.cc @@ -56,7 +56,7 @@ PythonHandler::PythonHandler(const string &program_name) : FileHandler(program_n { -use_pickle = false; +use_tmp_ascii = false; } @@ -81,13 +81,13 @@ for (j=1; j<(a.n()); ++j) { // j starts at one here, not zero } -use_pickle = false; +use_tmp_ascii = false; const char * c = getenv(user_python_path_env); if ( c ) { - use_pickle = true; + use_tmp_ascii = true; user_path_to_python = c; @@ -230,7 +230,7 @@ bool PythonHandler::readAsciiFiles(const vector< ConcatString > &ascii_filename_ bool status = false; -if ( use_pickle ) status = do_pickle (); +if ( use_tmp_ascii ) status = do_tmp_ascii (); else status = do_straight (); return ( status ); @@ -319,10 +319,10 @@ return ( true ); // - // wrapper usage: /path/to/python wrapper.py pickle_output_filename user_script_name [ user_script args ... ] + // wrapper usage: /path/to/python wrapper.py tmp_output_filename user_script_name [ user_script args ... ] // -bool PythonHandler::do_pickle() +bool PythonHandler::do_tmp_ascii() { @@ -330,7 +330,6 @@ int j; const int N = user_script_args.n(); ConcatString command; ConcatString path; -ConcatString pickle_path; ConcatString tmp_ascii_path; const char * tmp_dir = 0; int status; @@ -347,14 +346,13 @@ path << cs_erase << tmp_dir << '/' << tmp_base_name; -// pickle_path = make_temp_file_name(path.text(), 0); tmp_ascii_path = make_temp_file_name(path.text(), 0); tmp_ascii_path << ".txt"; command << cs_erase << user_path_to_python << ' ' // user's path to python << replace_path(write_tmp_ascii_wrapper) << ' ' // write_tmp_point.py - << tmp_ascii_path << ' ' // pickle output filename + << tmp_ascii_path << ' ' // temporary ascii output filename << user_script_filename; // user's script name for (j=0; j " + mlog << Error << "\nPythonHandler::do_tmp_ascii() -> " << "command \"" << command.text() << "\" failed ... status = " << status << "\n\n"; @@ -389,8 +387,8 @@ PyObject * obj = script.lookup_ascii(list_name); if ( ! PyList_Check(obj) ) { - mlog << Error << "\nPythonHandler::do_pickle() -> " - << "pickle object is not a list!\n\n"; + mlog << Error << "\nPythonHandler::do_tmp_ascii() -> " + << "tmp ascii object is not a list!\n\n"; exit ( 1 ); diff --git a/met/src/tools/other/ascii2nc/python_handler.h b/met/src/tools/other/ascii2nc/python_handler.h index abae8ddd5d..b0fb2ef492 100644 --- a/met/src/tools/other/ascii2nc/python_handler.h +++ b/met/src/tools/other/ascii2nc/python_handler.h @@ -50,9 +50,9 @@ class PythonHandler : public FileHandler static string getFormatString() { return "python"; } - bool use_pickle; + bool use_tmp_ascii; - ConcatString user_path_to_python; // if we're using pickle + ConcatString user_path_to_python; // if we're using temporary ascii ConcatString user_script_filename; @@ -68,15 +68,13 @@ class PythonHandler : public FileHandler virtual bool readAsciiFiles(const vector< ConcatString > &ascii_filename_list); - bool do_pickle (); - bool do_straight (); // straight-up python, no pickle + bool do_tmp_ascii(); + bool do_straight (); // straight-up python, no temporary ascii void load_python_obs(PyObject *); // python object is list of lists bool read_obs_from_script (const char * script_name, const char * variable_name); - - bool read_obs_from_pickle (const char * pickle_name, const char * variable_name); }; From 592c93714f9c9b4b8d3bcb00eadca58c2c8edd56 Mon Sep 17 00:00:00 2001 From: jprestop Date: Mon, 8 Mar 2021 18:15:41 -0700 Subject: [PATCH 64/86] Trying different options for formats (#1702) --- .readthedocs.yaml | 23 +++++++++++++++++++++++ met/docs/requirements.txt | 10 ++++++++++ 2 files changed, 33 insertions(+) create mode 100644 .readthedocs.yaml create mode 100644 met/docs/requirements.txt diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 0000000000..e148a2aad0 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,23 @@ +# .readthedocs.yaml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Build all formats (htmlzip, pdf, epub) +#formats: all +formats: [] + +# Optionally set the version of Python and requirements required to build your +# docs +python: + version: 3.7 + install: + - requirements: met/docs/requirements.txt + +# Configuration for Sphinx documentation (this is the default documentation +# type) +sphinx: + builder: html + configuration: met/docs/conf.py \ No newline at end of file diff --git a/met/docs/requirements.txt b/met/docs/requirements.txt new file mode 100644 index 0000000000..87ac8f9656 --- /dev/null +++ b/met/docs/requirements.txt @@ -0,0 +1,10 @@ +sphinx==2.4.4 +sphinx-gallery==0.7.0 +sphinx-rtd-theme==0.4.3 +sphinxcontrib-applehelp==1.0.2 +sphinxcontrib-bibtex +sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-htmlhelp==1.0.3 +sphinxcontrib-jsmath==1.0.1 +sphinxcontrib-qthelp==1.0.3 +sphinxcontrib-serializinghtml==1.1.4 From d80aafa54c59e69d63f9f97bb30c508545a4ef2f Mon Sep 17 00:00:00 2001 From: johnhg Date: Wed, 10 Mar 2021 10:56:52 -0700 Subject: [PATCH 65/86] Per #1706, add bugfix to the develop branch. Also add a new job to unit_stat_analysis.xml to test out the aggregation of the ECNT line type. This will add new unit test output and cause the NB to fail. (#1708) --- .../tools/core/stat_analysis/aggr_stat_line.cc | 1 + test/xml/unit_stat_analysis.xml | 16 ++++++++++++++++ 2 files changed, 17 insertions(+) diff --git a/met/src/tools/core/stat_analysis/aggr_stat_line.cc b/met/src/tools/core/stat_analysis/aggr_stat_line.cc index db7cd98ba9..bb9819634f 100644 --- a/met/src/tools/core/stat_analysis/aggr_stat_line.cc +++ b/met/src/tools/core/stat_analysis/aggr_stat_line.cc @@ -2597,6 +2597,7 @@ void aggr_ecnt_lines(LineDataFile &f, STATAnalysisJob &job, m[key].ens_pd.var_oerr_na.add(square(cur.spread_oerr)); m[key].ens_pd.var_plus_oerr_na.add(square(cur.spread_plus_oerr)); m[key].ens_pd.wgt_na.add(cur.total); + m[key].ens_pd.skip_ba.add(false); // // Store the summary statistics diff --git a/test/xml/unit_stat_analysis.xml b/test/xml/unit_stat_analysis.xml index 0d5fa6edc4..799011f8a6 100644 --- a/test/xml/unit_stat_analysis.xml +++ b/test/xml/unit_stat_analysis.xml @@ -74,6 +74,22 @@ + + &MET_BIN;/stat_analysis + \ + -lookin &OUTPUT_DIR;/ensemble_stat/ensemble_stat_SKIP_CONST_20120410_120000V.stat \ + -job aggregate -line_type ECNT -by FCST_VAR -obs_thresh NA -vx_mask NWC,GRB \ + -dump_row &OUTPUT_DIR;/stat_analysis/AGG_ECNT_dump.stat \ + -out_stat &OUTPUT_DIR;/stat_analysis/AGG_ECNT_out.stat \ + -set_hdr VX_MASK NWC_AND_GRB \ + -v 1 + + + &OUTPUT_DIR;/stat_analysis/AGG_ECNT_dump.stat + &OUTPUT_DIR;/stat_analysis/AGG_ECNT_out.stat + + + &MET_BIN;/stat_analysis \ From 6ed8fc4fa40497da5dad10836b0738945a66ce5c Mon Sep 17 00:00:00 2001 From: johnhg Date: Wed, 10 Mar 2021 14:55:23 -0700 Subject: [PATCH 66/86] Feature 1471 python_grid (#1704) * Per #1471, defined a parse_grid_string() function in the vx_statistics library and then updated vx_data2d_python to call that function. However, this creates a circular dependency because vx_data2d_python now depends on vx_statistics. * Per #1471, because of the change in dependencies, I had to modify many, many Makefile.am files to link to the -lvx_statistics after -lvx_data2d_python. This is not great, but I didn't find a better solution. * Per #1471, add a sanity check to make sure the grid and data dimensions actually match. * Per #1471, add 3 new unit tests to demonstrate setting the python grid as a named grid, grid specification string, or a gridded data file. * Per #1471, document python grid changes in appendix F. * Per #1471, just spacing. * Per #1471, lots of Makefile.am changes to get this code to compile on kiowa. Worringly, it compiled and linked fine on my Mac laptop but not on kiowa. Must be some large differences in the linker logic. Co-authored-by: John Halley Gotway --- met/docs/Users_Guide/appendixF.rst | 30 +++++++-- met/internal_tests/basic/vx_util/Makefile.am | 30 ++++++--- .../libcode/vx_data2d_factory/Makefile.am | 11 +++- .../libcode/vx_nc_util/Makefile.am | 29 ++++++--- .../libcode/vx_tc_util/Makefile.am | 63 +++++++++++++------ .../tools/other/mode_time_domain/Makefile.am | 2 + met/scripts/python/Makefile.am | 1 + met/scripts/python/read_ascii_numpy_grid.py | 53 ++++++++++++++++ .../dataplane_from_numpy_array.cc | 39 +++++++++++- .../libcode/vx_python3_utils/python3_dict.cc | 19 ++++-- .../libcode/vx_python3_utils/python3_dict.h | 2 +- met/src/libcode/vx_statistics/apply_mask.cc | 45 +++++++++++++ met/src/libcode/vx_statistics/apply_mask.h | 2 + met/src/tools/core/ensemble_stat/Makefile.am | 1 + met/src/tools/core/grid_stat/Makefile.am | 1 + met/src/tools/core/mode/Makefile.am | 1 + met/src/tools/core/mode_analysis/Makefile.am | 13 ++-- met/src/tools/core/pcp_combine/Makefile.am | 2 + met/src/tools/core/point_stat/Makefile.am | 1 + .../tools/core/series_analysis/Makefile.am | 1 + met/src/tools/core/stat_analysis/Makefile.am | 9 +-- met/src/tools/core/wavelet_stat/Makefile.am | 1 + met/src/tools/dev_utils/Makefile.am | 1 + met/src/tools/other/ascii2nc/Makefile.am | 1 + met/src/tools/other/gen_vx_mask/Makefile.am | 2 + met/src/tools/other/grid_diag/Makefile.am | 1 + met/src/tools/other/gsi_tools/Makefile.am | 33 +++++----- met/src/tools/other/ioda2nc/Makefile.am | 1 + met/src/tools/other/lidar2nc/Makefile.am | 1 + met/src/tools/other/madis2nc/Makefile.am | 1 + met/src/tools/other/mode_graphics/Makefile.am | 1 + .../tools/other/mode_time_domain/Makefile.am | 1 + met/src/tools/other/modis_regrid/Makefile.am | 2 + met/src/tools/other/pb2nc/Makefile.am | 1 + .../tools/other/plot_data_plane/Makefile.am | 2 + .../tools/other/plot_point_obs/Makefile.am | 1 + met/src/tools/other/point2grid/Makefile.am | 1 + .../tools/other/regrid_data_plane/Makefile.am | 1 + .../tools/other/shift_data_plane/Makefile.am | 1 + met/src/tools/other/wwmca_tool/Makefile.am | 1 + .../tools/tc_utils/rmw_analysis/Makefile.am | 1 + met/src/tools/tc_utils/tc_gen/Makefile.am | 1 + met/src/tools/tc_utils/tc_pairs/Makefile.am | 1 + met/src/tools/tc_utils/tc_rmw/Makefile.am | 1 + met/src/tools/tc_utils/tc_stat/Makefile.am | 1 + test/xml/unit_python.xml | 62 ++++++++++++++++++ 46 files changed, 394 insertions(+), 82 deletions(-) create mode 100755 met/scripts/python/read_ascii_numpy_grid.py diff --git a/met/docs/Users_Guide/appendixF.rst b/met/docs/Users_Guide/appendixF.rst index e122d6a560..a5e34df338 100644 --- a/met/docs/Users_Guide/appendixF.rst +++ b/met/docs/Users_Guide/appendixF.rst @@ -65,7 +65,9 @@ The data must be loaded into a 2D NumPy array named **met_data**. In addition th 'long_name': 'FooBar', 'level': 'Surface', 'units': 'None', - + + # Define 'grid' as a string or a dictionary + 'grid': { 'type': 'Lambert Conformal', 'hemisphere': 'N', @@ -83,12 +85,32 @@ The data must be loaded into a 2D NumPy array named **met_data**. In addition th 'ny': 129, } - } + } + + +In the dictionary, valid time, initialization time, lead time and accumulation time (if any) must be indicated by strings. Valid and initialization times must be given in YYYYMMDD[_HH[MMSS]] format, and lead and accumulation times must be given in HH[MMSS] format, where the square brackets indicate optional elements. The dictionary must also include strings for the name, long_name, level, and units to describe the data. The rest of the **attrs** dictionary gives the grid size and projection information in the same format that is used in the netCDF files written out by the MET tools. Those entries are also listed below. Note that the **grid** entry in the **attrs** dictionary can either be defined as a string or as a dictionary itself. + +If specified as a string, **grid** can be defined as follows: + +• As a named grid: + +.. code-block:: none + 'grid': 'G212' + +• As a grid specification string, as described in :ref:`appendixB`: + +.. code-block:: none + + 'grid': 'lambert 185 129 12.19 -133.459 -95 40.635 6371.2 25 25 N' + +• As the path to an existing gridded data file: + +.. code-block:: none -In the dictionary, valid time, initialization time, lead time and accumulation time (if any) must be indicated by strings. Valid and initialization times must be given in YYYYMMDD[_HH[MMSS]] format, and lead and accumulation times must be given in HH[MMSS] format, where the square brackets indicate optional elements. The dictionary must also include strings for the name, long_name, level, and units to describe the data. The rest of the **attrs** dictionary gives the grid size and projection information in the same format that is used in the netCDF files written out by the MET tools. Those entries are also listed below. Note that the **grid** entry in the **attrs** dictionary is itself a dictionary. + 'grid': '/path/to/sample_data.grib' -The supported grid **type** strings are described below: +When specified as a dictionary, the contents of the **grid** dictionary vary based on the grid **type** string. The entries for the supported grid types are described below: • **Lambert Conformal** grid dictionary entries: diff --git a/met/internal_tests/basic/vx_util/Makefile.am b/met/internal_tests/basic/vx_util/Makefile.am index 364b846a0b..96afff9e82 100644 --- a/met/internal_tests/basic/vx_util/Makefile.am +++ b/met/internal_tests/basic/vx_util/Makefile.am @@ -82,18 +82,30 @@ endif test_ascii_header_SOURCES = test_ascii_header.cc test_ascii_header_CPPFLAGS = ${MET_CPPFLAGS} test_ascii_header_LDFLAGS = -L. ${MET_LDFLAGS} -test_ascii_header_LDADD = -lvx_util \ +test_ascii_header_LDADD = -lvx_stat_out \ + -lvx_statistics \ + -lvx_shapedata \ + -lvx_gsl_prob \ + -lvx_analysis_util \ + -lvx_shapedata \ + -lvx_util \ + $(PYTHON_LIBS) \ + -lvx_statistics \ + -lvx_data2d_factory \ + -lvx_data2d_nc_met \ + -lvx_data2d_nc_pinterp \ + $(PYTHON_LIBS) \ + -lvx_data2d_nccf \ + -lvx_data2d_grib $(GRIB2_LIBS) \ + -lvx_data2d \ + -lvx_nc_util \ + -lvx_regrid \ + -lvx_grid \ -lvx_config \ -lvx_gsl_prob \ -lvx_cal \ -lvx_util \ -lvx_math \ + -lvx_color \ -lvx_log \ - -lgsl -lgslcblas - -if ENABLE_PYTHON -test_ascii_header_LDADD += $(MET_PYTHON_LD) -lvx_data2d_python -lvx_python3_utils -lvx_data2d_python -lvx_python3_utils -lvx_math -test_ascii_header_LDADD += -lvx_data2d_python -lvx_python3_utils -lvx_data2d_python -lvx_python3_utils -test_ascii_header_LDADD += -lvx_math -lvx_grid -lvx_util -lvx_data2d -lvx_config -lvx_gsl_prob -lvx_cal -lvx_math -lvx_util -endif - + -lm -lnetcdf_c++4 -lnetcdf -lgsl -lgslcblas -lvx_util diff --git a/met/internal_tests/libcode/vx_data2d_factory/Makefile.am b/met/internal_tests/libcode/vx_data2d_factory/Makefile.am index b1d31b3aa4..04a31968c0 100644 --- a/met/internal_tests/libcode/vx_data2d_factory/Makefile.am +++ b/met/internal_tests/libcode/vx_data2d_factory/Makefile.am @@ -25,20 +25,27 @@ test_is_grib_LDADD = -lvx_data2d_factory \ test_factory_SOURCES = test_factory.cc test_factory_CPPFLAGS = ${MET_CPPFLAGS} test_factory_LDFLAGS = -L. ${MET_LDFLAGS} -test_factory_LDADD = -lvx_data2d_factory \ +test_factory_LDADD = -lvx_stat_out \ + -lvx_statistics \ + -lvx_shapedata \ + -lvx_gsl_prob \ + -lvx_analysis_util \ + -lvx_data2d_factory \ -lvx_data2d_nc_met \ -lvx_data2d_grib $(GRIB2_LIBS) \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_statistics \ -lvx_data2d \ -lvx_nc_util \ + -lvx_regrid \ -lvx_grid \ -lvx_config \ -lvx_gsl_prob \ + -lvx_cal \ -lvx_util \ -lvx_math \ -lvx_color \ - -lvx_cal \ -lvx_log \ -lm -lnetcdf_c++4 -lnetcdf -lgsl -lgslcblas diff --git a/met/internal_tests/libcode/vx_nc_util/Makefile.am b/met/internal_tests/libcode/vx_nc_util/Makefile.am index 13bf6c2bc0..ff40235a5c 100644 --- a/met/internal_tests/libcode/vx_nc_util/Makefile.am +++ b/met/internal_tests/libcode/vx_nc_util/Makefile.am @@ -19,21 +19,32 @@ noinst_PROGRAMS = test_pressure_levels test_pressure_levels_SOURCES = test_pressure_levels.cc test_pressure_levels_CPPFLAGS = ${MET_CPPFLAGS} test_pressure_levels_LDFLAGS = -L. ${MET_LDFLAGS} -test_pressure_levels_LDADD = -lvx_tc_util \ +test_pressure_levels_LDADD = -lvx_stat_out \ + -lvx_statistics \ + -lvx_shapedata \ + -lvx_gsl_prob \ + -lvx_analysis_util \ + -lvx_tc_util \ + -lvx_shapedata \ + -lvx_util \ + $(PYTHON_LIBS) \ + -lvx_statistics \ + -lvx_data2d_factory \ + -lvx_data2d_nc_met \ + -lvx_data2d_nc_pinterp \ + $(PYTHON_LIBS) \ + -lvx_data2d_nccf \ + -lvx_data2d_grib $(GRIB2_LIBS) \ -lvx_data2d \ -lvx_nc_util \ + -lvx_regrid \ + -lvx_grid \ -lvx_config \ -lvx_gsl_prob \ -lvx_cal \ -lvx_util \ -lvx_math \ + -lvx_color \ -lvx_log \ - -lnetcdf_c++4 -lnetcdf -lgsl -lgslcblas + -lm -lnetcdf_c++4 -lnetcdf -lgsl -lgslcblas -lvx_util -if ENABLE_PYTHON -test_pressure_levels_LDADD += $(MET_PYTHON_LD) -test_pressure_levels_LDADD += -lvx_data2d_python -lvx_python3_utils -test_pressure_levels_LDADD += -lvx_data2d_python -lvx_python3_utils -test_pressure_levels_LDADD += -lvx_grid -lvx_util -lvx_config -test_pressure_levels_LDADD += -lvx_data2d -lvx_gsl_prob -lvx_util -lvx_math -lvx_cal -lvx_config -endif diff --git a/met/internal_tests/libcode/vx_tc_util/Makefile.am b/met/internal_tests/libcode/vx_tc_util/Makefile.am index 2b71de8a9f..a5fc96c5d0 100644 --- a/met/internal_tests/libcode/vx_tc_util/Makefile.am +++ b/met/internal_tests/libcode/vx_tc_util/Makefile.am @@ -20,39 +20,64 @@ noinst_PROGRAMS = test_read \ test_read_SOURCES = test_read.cc test_read_CPPFLAGS = ${MET_CPPFLAGS} test_read_LDFLAGS = -L. ${MET_LDFLAGS} -test_read_LDADD = -lvx_tc_util \ +test_read_LDADD = -lvx_stat_out \ + -lvx_statistics \ + -lvx_shapedata \ + -lvx_gsl_prob \ + -lvx_analysis_util \ + -lvx_tc_util \ + -lvx_shapedata \ + -lvx_util \ + $(PYTHON_LIBS) \ + -lvx_statistics \ + -lvx_data2d_factory \ + -lvx_data2d_nc_met \ + -lvx_data2d_nc_pinterp \ + $(PYTHON_LIBS) \ + -lvx_data2d_nccf \ + -lvx_data2d_grib $(GRIB2_LIBS) \ + -lvx_data2d \ + -lvx_nc_util \ + -lvx_regrid \ + -lvx_grid \ -lvx_config \ -lvx_gsl_prob \ -lvx_cal \ -lvx_util \ -lvx_math \ + -lvx_color \ -lvx_log \ - -lgsl -lgslcblas - -if ENABLE_PYTHON -test_read_LDADD += $(MET_PYTHON_LD) -test_read_LDADD += -lvx_data2d_python -lvx_python3_utils -test_read_LDADD += -lvx_data2d_python -lvx_python3_utils -test_read_LDADD += -lvx_grid -lvx_util -lvx_config -test_read_LDADD += -lvx_data2d -lvx_gsl_prob -lvx_util -lvx_math -lvx_cal -lvx_config -endif + -lm -lnetcdf_c++4 -lnetcdf -lgsl -lgslcblas -lvx_util test_read_prob_SOURCES = test_read_prob.cc test_read_prob_CPPFLAGS = ${MET_CPPFLAGS} test_read_prob_LDFLAGS = -L. ${MET_LDFLAGS} -test_read_prob_LDADD = -lvx_tc_util \ +test_read_prob_LDADD = -lvx_stat_out \ + -lvx_statistics \ + -lvx_shapedata \ + -lvx_gsl_prob \ + -lvx_analysis_util \ + -lvx_tc_util \ + -lvx_shapedata \ + -lvx_util \ + $(PYTHON_LIBS) \ + -lvx_statistics \ + -lvx_data2d_factory \ + -lvx_data2d_nc_met \ + -lvx_data2d_nc_pinterp \ + $(PYTHON_LIBS) \ + -lvx_data2d_nccf \ + -lvx_data2d_grib $(GRIB2_LIBS) \ + -lvx_data2d \ + -lvx_nc_util \ + -lvx_regrid \ + -lvx_grid \ -lvx_config \ -lvx_gsl_prob \ -lvx_cal \ -lvx_util \ -lvx_math \ + -lvx_color \ -lvx_log \ - -lgsl -lgslcblas + -lm -lnetcdf_c++4 -lnetcdf -lgsl -lgslcblas -lvx_util -if ENABLE_PYTHON -test_read_prob_LDADD += $(MET_PYTHON_LD) -test_read_prob_LDADD += -lvx_data2d_python -lvx_python3_utils -test_read_prob_LDADD += -lvx_data2d_python -lvx_python3_utils -test_read_prob_LDADD += -lvx_grid -lvx_util -lvx_config -test_read_prob_LDADD += -lvx_data2d -lvx_gsl_prob -lvx_util -lvx_math -lvx_cal -lvx_config -endif diff --git a/met/internal_tests/tools/other/mode_time_domain/Makefile.am b/met/internal_tests/tools/other/mode_time_domain/Makefile.am index 2b62114046..693d9de44c 100644 --- a/met/internal_tests/tools/other/mode_time_domain/Makefile.am +++ b/met/internal_tests/tools/other/mode_time_domain/Makefile.am @@ -56,6 +56,7 @@ test_velocity_LDADD = \ -lvx_data2d_grib $(GRIB2_LIBS) \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ + -lvx_statistics \ -lvx_data2d_nccf \ -lvx_data2d \ -lvx_nc_util \ @@ -69,3 +70,4 @@ test_velocity_LDADD = \ -lvx_color \ -lvx_log \ -lm -lnetcdf_c++4 -lnetcdf -lgsl -lgslcblas -lz + diff --git a/met/scripts/python/Makefile.am b/met/scripts/python/Makefile.am index b980d2e4ab..78dc7d88bc 100644 --- a/met/scripts/python/Makefile.am +++ b/met/scripts/python/Makefile.am @@ -27,6 +27,7 @@ pythonscriptsdir = $(pkgdatadir)/python pythonscripts_DATA = \ read_ascii_numpy.py \ + read_ascii_numpy_grid.py \ read_ascii_xarray.py \ read_ascii_point.py \ read_ascii_mpr.py diff --git a/met/scripts/python/read_ascii_numpy_grid.py b/met/scripts/python/read_ascii_numpy_grid.py new file mode 100755 index 0000000000..88d868a2ad --- /dev/null +++ b/met/scripts/python/read_ascii_numpy_grid.py @@ -0,0 +1,53 @@ +from __future__ import print_function + +import numpy as np +import os +import sys + +########################################### + +print('Python Script:\t', sys.argv[0]) + + ## + ## input file specified on the command line + ## load the data into the numpy array + ## + +if len(sys.argv) == 3: + # Read the input file as the first argument + input_file = os.path.expandvars(sys.argv[1]) + data_name = sys.argv[2] + try: + # Print some output to verify that this script ran + print("Input File:\t" + repr(input_file)) + print("Data Name:\t" + repr(data_name)) + met_data = np.loadtxt(input_file) + print("Data Shape:\t" + repr(met_data.shape)) + print("Data Type:\t" + repr(met_data.dtype)) + except NameError: + print("Can't find the input file") +else: + print("ERROR: read_ascii_numpy.py -> Must specify exactly one input file and a name for the data.") + sys.exit(1) + +########################################### + + ## + ## create the metadata dictionary + ## + +attrs = { + + 'valid': '20050807_120000', + 'init': '20050807_000000', + 'lead': '120000', + 'accum': '120000', + + 'name': data_name, + 'long_name': data_name + '_word', + 'level': 'Surface', + 'units': 'None', + 'grid': os.path.expandvars(os.getenv('PYTHON_GRID')) +} + +print("Attributes:\t" + repr(attrs)) diff --git a/met/src/libcode/vx_data2d_python/dataplane_from_numpy_array.cc b/met/src/libcode/vx_data2d_python/dataplane_from_numpy_array.cc index 7462c20829..61f997a267 100644 --- a/met/src/libcode/vx_data2d_python/dataplane_from_numpy_array.cc +++ b/met/src/libcode/vx_data2d_python/dataplane_from_numpy_array.cc @@ -12,6 +12,7 @@ #include "string.h" #include "vx_python3_utils.h" +#include "vx_statistics.h" #include "check_endian.h" #include "data_plane.h" @@ -219,9 +220,43 @@ dp_out.set_accum(t); //////////////////// -PyObject * py_grid = attrs.lookup_dict("grid"); + // + // attempt to parse "grid" as a string + // + +s = attrs.lookup_string("grid", false); + +if ( s.nonempty() ) { + + grid_out = parse_grid_string(s.c_str()); + +} +else { + + // + // otherwise, parse "grid" as a dictionary + // + + PyObject * py_grid = attrs.lookup_dict("grid"); -grid_from_python_dict(Python3_Dict(py_grid), grid_out); + grid_from_python_dict(Python3_Dict(py_grid), grid_out); + +} + + // + // make sure the grid and data dimensions match + // + +if ( grid_out.nx() != Nx || grid_out.ny() != Ny ) { + + mlog << Error << "\ndataplane_from_numpy_array() -> " + << "the grid dimensions (" << grid_out.nx() << ", " + << grid_out.ny() << ") and data dimensions (" << Nx + << ", " << Ny << ") do not match!\n\n"; + + exit ( 1 ); + +} //////////////////// diff --git a/met/src/libcode/vx_python3_utils/python3_dict.cc b/met/src/libcode/vx_python3_utils/python3_dict.cc index 9f6036f801..6d38599aa4 100644 --- a/met/src/libcode/vx_python3_utils/python3_dict.cc +++ b/met/src/libcode/vx_python3_utils/python3_dict.cc @@ -220,7 +220,7 @@ return ( t ); //////////////////////////////////////////////////////////////////////// -ConcatString Python3_Dict::lookup_string(const char * key) const +ConcatString Python3_Dict::lookup_string(const char * key, bool error_out) const { @@ -240,14 +240,23 @@ if ( ! a ) { if ( ! PyUnicode_Check(a) ) { - mlog << Error << "\nPython3_Dict::lookup_string(const char * key) -> " - << "value for key \"" << key << "\" not a character string\n\n"; + if ( error_out ) { - exit ( 1 ); + mlog << Error << "\nPython3_Dict::lookup_string(const char * key) -> " + << "value for key \"" << key << "\" not a character string\n\n"; + + exit ( 1 ); + + } + + s.clear(); } +else { -s = PyUnicode_AsUTF8(a); + s = PyUnicode_AsUTF8(a); + +} return ( s ); diff --git a/met/src/libcode/vx_python3_utils/python3_dict.h b/met/src/libcode/vx_python3_utils/python3_dict.h index 7ce77d7b05..34cfd803de 100644 --- a/met/src/libcode/vx_python3_utils/python3_dict.h +++ b/met/src/libcode/vx_python3_utils/python3_dict.h @@ -68,7 +68,7 @@ class Python3_Dict { int lookup_int (const char * key) const; double lookup_double (const char * key) const; - ConcatString lookup_string (const char * key) const; + ConcatString lookup_string (const char * key, bool error_out = true) const; PyObject * lookup_dict (const char * key) const; PyObject * lookup_list (const char * key) const; diff --git a/met/src/libcode/vx_statistics/apply_mask.cc b/met/src/libcode/vx_statistics/apply_mask.cc index f5ea562630..b97e9272d3 100644 --- a/met/src/libcode/vx_statistics/apply_mask.cc +++ b/met/src/libcode/vx_statistics/apply_mask.cc @@ -84,6 +84,51 @@ Grid parse_vx_grid(const RegridInfo info, const Grid *fgrid, const Grid *ogrid) //////////////////////////////////////////////////////////////////////// +Grid parse_grid_string(const char *grid_str) { + Grid grid; + StringArray sa; + + // Parse as a white-space separated string + sa.parse_wsss(grid_str); + + // Search for a named grid + if(sa.n() == 1 && find_grid_by_name(sa[0].c_str(), grid)) { + mlog << Debug(3) << "Use the grid named \"" + << grid_str << "\".\n"; + } + // Parse grid definition + else if(sa.n() > 1 && parse_grid_def(sa, grid)) { + mlog << Debug(3) << "Use the grid defined by string \"" + << grid_str << "\".\n"; + } + // Extract the grid from a gridded data file + else { + mlog << Debug(3) << "Use the grid defined by file \"" + << grid_str << "\".\n"; + + Met2dDataFileFactory m_factory; + Met2dDataFile *met_ptr = (Met2dDataFile *) 0; + + // Open the data file + if(!(met_ptr = m_factory.new_met_2d_data_file(grid_str))) { + mlog << Error << "\nparse_grid_string() -> " + << "can't open file \"" << grid_str + << "\"\n\n"; + exit(1); + } + + // Store the grid + grid = met_ptr->grid(); + + // Cleanup + if(met_ptr) { delete met_ptr; met_ptr = 0; } + } + + return(grid); +} + +//////////////////////////////////////////////////////////////////////// + void parse_grid_weight(const Grid &grid, const GridWeightType t, DataPlane &wgt_dp) { int x, y; diff --git a/met/src/libcode/vx_statistics/apply_mask.h b/met/src/libcode/vx_statistics/apply_mask.h index 0ac9aac85e..5e62500d73 100644 --- a/met/src/libcode/vx_statistics/apply_mask.h +++ b/met/src/libcode/vx_statistics/apply_mask.h @@ -29,6 +29,8 @@ static const char poly_str_delim[] = "{}"; extern Grid parse_vx_grid(const RegridInfo, const Grid *, const Grid *); +extern Grid parse_grid_string(const char *); + extern void parse_grid_weight(const Grid &, const GridWeightType, DataPlane &); diff --git a/met/src/tools/core/ensemble_stat/Makefile.am b/met/src/tools/core/ensemble_stat/Makefile.am index d7cd14c178..172302f19a 100644 --- a/met/src/tools/core/ensemble_stat/Makefile.am +++ b/met/src/tools/core/ensemble_stat/Makefile.am @@ -26,6 +26,7 @@ ensemble_stat_LDADD = -lvx_stat_out \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_statistics \ -lvx_data2d \ -lvx_nc_obs \ -lvx_nc_util \ diff --git a/met/src/tools/core/grid_stat/Makefile.am b/met/src/tools/core/grid_stat/Makefile.am index 8db13c50bf..e1451258f0 100644 --- a/met/src/tools/core/grid_stat/Makefile.am +++ b/met/src/tools/core/grid_stat/Makefile.am @@ -26,6 +26,7 @@ grid_stat_LDADD = -lvx_stat_out \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_statistics \ -lvx_data2d \ -lvx_nc_util \ -lvx_regrid \ diff --git a/met/src/tools/core/mode/Makefile.am b/met/src/tools/core/mode/Makefile.am index b3260abef1..662dee58ef 100644 --- a/met/src/tools/core/mode/Makefile.am +++ b/met/src/tools/core/mode/Makefile.am @@ -39,6 +39,7 @@ mode_LDADD = -lvx_pxm \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_statistics \ -lvx_data2d \ -lvx_nc_util \ -lvx_regrid \ diff --git a/met/src/tools/core/mode_analysis/Makefile.am b/met/src/tools/core/mode_analysis/Makefile.am index 7305ea30a9..182faf29d1 100644 --- a/met/src/tools/core/mode_analysis/Makefile.am +++ b/met/src/tools/core/mode_analysis/Makefile.am @@ -8,10 +8,6 @@ MAINTAINERCLEANFILES = Makefile.in include ${top_srcdir}/Make-include -if ENABLE_PYTHON -LDFLAGS += -lvx_python3_utils -endif - # The program bin_PROGRAMS = mode_analysis @@ -25,6 +21,9 @@ mode_analysis_LDADD = -lvx_stat_out \ -lvx_gsl_prob \ -lvx_analysis_util \ -lvx_shapedata \ + -lvx_util \ + $(PYTHON_LIBS) \ + -lvx_statistics \ -lvx_data2d_factory \ -lvx_data2d_nc_met \ -lvx_data2d_nc_pinterp \ @@ -33,6 +32,7 @@ mode_analysis_LDADD = -lvx_stat_out \ -lvx_data2d_grib $(GRIB2_LIBS) \ -lvx_data2d \ -lvx_nc_util \ + -lvx_regrid \ -lvx_grid \ -lvx_config \ -lvx_gsl_prob \ @@ -43,9 +43,4 @@ mode_analysis_LDADD = -lvx_stat_out \ -lvx_log \ -lm -lnetcdf_c++4 -lnetcdf -lgsl -lgslcblas -lvx_util -if ENABLE_PYTHON -mode_analysis_LDADD += $(MET_PYTHON_LD) -lvx_data2d_python -lvx_python3_utils -lvx_data2d_python -lvx_python3_utils -lvx_util -endif - - EXTRA_DIST = config_to_att.h diff --git a/met/src/tools/core/pcp_combine/Makefile.am b/met/src/tools/core/pcp_combine/Makefile.am index 1a07c63b71..0fa6d2ecc4 100644 --- a/met/src/tools/core/pcp_combine/Makefile.am +++ b/met/src/tools/core/pcp_combine/Makefile.am @@ -20,8 +20,10 @@ pcp_combine_LDADD = -lvx_data2d_factory \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_statistics \ -lvx_data2d \ -lvx_nc_util \ + -lvx_regrid \ -lvx_grid \ -lvx_config \ -lvx_cal \ diff --git a/met/src/tools/core/point_stat/Makefile.am b/met/src/tools/core/point_stat/Makefile.am index ef64c96fc8..02dbac944a 100644 --- a/met/src/tools/core/point_stat/Makefile.am +++ b/met/src/tools/core/point_stat/Makefile.am @@ -26,6 +26,7 @@ point_stat_LDADD = -lvx_stat_out \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_statistics \ -lvx_data2d \ -lvx_nc_obs \ -lvx_nc_util \ diff --git a/met/src/tools/core/series_analysis/Makefile.am b/met/src/tools/core/series_analysis/Makefile.am index 79a33460f0..26352e71eb 100644 --- a/met/src/tools/core/series_analysis/Makefile.am +++ b/met/src/tools/core/series_analysis/Makefile.am @@ -26,6 +26,7 @@ series_analysis_LDADD = -lvx_stat_out \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_statistics \ -lvx_data2d \ -lvx_nc_util \ -lvx_regrid \ diff --git a/met/src/tools/core/stat_analysis/Makefile.am b/met/src/tools/core/stat_analysis/Makefile.am index 909143d9f3..60ca332858 100644 --- a/met/src/tools/core/stat_analysis/Makefile.am +++ b/met/src/tools/core/stat_analysis/Makefile.am @@ -8,19 +8,13 @@ MAINTAINERCLEANFILES = Makefile.in include ${top_srcdir}/Make-include -OPT_PYTHON_SOURCES = - -if ENABLE_PYTHON -LDFLAGS += -lvx_python3_utils -endif - # The program bin_PROGRAMS = stat_analysis stat_analysis_SOURCES = stat_analysis.cc \ aggr_stat_line.cc \ parse_stat_line.cc \ - stat_analysis_job.cc $(OPT_PYTHON_SOURCES) + stat_analysis_job.cc stat_analysis_CPPFLAGS = ${MET_CPPFLAGS} stat_analysis_LDFLAGS = ${MET_LDFLAGS} stat_analysis_LDADD = -lvx_stat_out \ @@ -34,6 +28,7 @@ stat_analysis_LDADD = -lvx_stat_out \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_statistics \ -lvx_data2d \ -lvx_nc_util \ -lvx_regrid \ diff --git a/met/src/tools/core/wavelet_stat/Makefile.am b/met/src/tools/core/wavelet_stat/Makefile.am index 4f6759db27..25150834a5 100644 --- a/met/src/tools/core/wavelet_stat/Makefile.am +++ b/met/src/tools/core/wavelet_stat/Makefile.am @@ -32,6 +32,7 @@ wavelet_stat_LDADD = -lvx_pxm \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_statistics \ -lvx_data2d \ -lvx_nc_util \ -lvx_regrid \ diff --git a/met/src/tools/dev_utils/Makefile.am b/met/src/tools/dev_utils/Makefile.am index 03a83fc690..9240cb48bd 100644 --- a/met/src/tools/dev_utils/Makefile.am +++ b/met/src/tools/dev_utils/Makefile.am @@ -127,6 +127,7 @@ gen_climo_bin_LDADD = -lvx_stat_out \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_statistics \ -lvx_data2d \ -lvx_nc_util \ -lvx_regrid \ diff --git a/met/src/tools/other/ascii2nc/Makefile.am b/met/src/tools/other/ascii2nc/Makefile.am index 3b4f21b46a..7fce7b81ba 100644 --- a/met/src/tools/other/ascii2nc/Makefile.am +++ b/met/src/tools/other/ascii2nc/Makefile.am @@ -39,6 +39,7 @@ ascii2nc_LDADD = -lvx_stat_out \ -lvx_data2d_nc_pinterp \ -lvx_data2d_nccf \ $(PYTHON_LIBS) \ + -lvx_statistics \ -lvx_data2d \ -lvx_nc_obs \ -lvx_nc_util \ diff --git a/met/src/tools/other/gen_vx_mask/Makefile.am b/met/src/tools/other/gen_vx_mask/Makefile.am index 2b52cb7cb6..f8b773183b 100644 --- a/met/src/tools/other/gen_vx_mask/Makefile.am +++ b/met/src/tools/other/gen_vx_mask/Makefile.am @@ -24,6 +24,7 @@ gen_vx_mask_LDADD = -lvx_shapedata \ -lvx_data2d_nc_met \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ + -lvx_statistics \ -lvx_gis \ -lvx_nc_util \ -lvx_data2d \ @@ -39,6 +40,7 @@ gen_vx_mask_LDADD = -lvx_shapedata \ -lvx_stat_out \ -lvx_statistics \ -lvx_gsl_prob \ + -lvx_regrid \ -lvx_grid \ -lvx_config \ -lvx_gsl_prob \ diff --git a/met/src/tools/other/grid_diag/Makefile.am b/met/src/tools/other/grid_diag/Makefile.am index 877772a982..1d30d1bce4 100644 --- a/met/src/tools/other/grid_diag/Makefile.am +++ b/met/src/tools/other/grid_diag/Makefile.am @@ -26,6 +26,7 @@ grid_diag_LDADD = -lvx_stat_out \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_statistics \ -lvx_data2d \ -lvx_nc_util \ -lvx_regrid \ diff --git a/met/src/tools/other/gsi_tools/Makefile.am b/met/src/tools/other/gsi_tools/Makefile.am index 40c550a1c1..1c94d4be2b 100644 --- a/met/src/tools/other/gsi_tools/Makefile.am +++ b/met/src/tools/other/gsi_tools/Makefile.am @@ -8,10 +8,6 @@ MAINTAINERCLEANFILES = Makefile.in include ${top_srcdir}/Make-include -if ENABLE_PYTHON -LDFLAGS += -lvx_data2d_python -lvx_python3_utils -endif - # The programs bin_PROGRAMS = gsid2mpr \ @@ -36,28 +32,31 @@ gsid2mpr_CPPFLAGS = ${MET_CPPFLAGS} gsid2mpr_LDFLAGS = ${MET_LDFLAGS} gsid2mpr_LDADD = -lvx_stat_out \ -lvx_statistics \ + -lvx_shapedata \ -lvx_gsl_prob \ + -lvx_analysis_util \ + -lvx_shapedata \ + -lvx_util \ + $(PYTHON_LIBS) \ + -lvx_statistics \ -lvx_data2d_factory \ -lvx_data2d_nc_met \ - -lvx_data2d_grib $(GRIB2_LIBS) \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_data2d_grib $(GRIB2_LIBS) \ -lvx_data2d \ -lvx_nc_util \ -lvx_regrid \ -lvx_grid \ -lvx_config \ + -lvx_gsl_prob \ -lvx_cal \ -lvx_util \ -lvx_math \ -lvx_color \ -lvx_log \ - -lm -lnetcdf_c++4 -lnetcdf -lgsl -lgslcblas - -if ENABLE_PYTHON -gsid2mpr_LDADD += $(MET_PYTHON_LD) -lvx_data2d_python -lvx_python3_utils -lvx_data2d_python -lvx_python3_utils -endif + -lm -lnetcdf_c++4 -lnetcdf -lgsl -lgslcblas -lvx_util gsidens2orank_SOURCES = gsidens2orank.h \ gsi_record.h \ @@ -78,25 +77,29 @@ gsidens2orank_CPPFLAGS = ${MET_CPPFLAGS} gsidens2orank_LDFLAGS = ${MET_LDFLAGS} gsidens2orank_LDADD = -lvx_stat_out \ -lvx_statistics \ + -lvx_shapedata \ -lvx_gsl_prob \ + -lvx_analysis_util \ + -lvx_shapedata \ + -lvx_util \ + $(PYTHON_LIBS) \ + -lvx_statistics \ -lvx_data2d_factory \ -lvx_data2d_nc_met \ - -lvx_data2d_grib $(GRIB2_LIBS) \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_data2d_grib $(GRIB2_LIBS) \ -lvx_data2d \ -lvx_nc_util \ -lvx_regrid \ -lvx_grid \ -lvx_config \ + -lvx_gsl_prob \ -lvx_cal \ -lvx_util \ -lvx_math \ -lvx_color \ -lvx_log \ - -lm -lnetcdf_c++4 -lnetcdf -lgsl -lgslcblas + -lm -lnetcdf_c++4 -lnetcdf -lgsl -lgslcblas -lvx_util -if ENABLE_PYTHON -gsidens2orank_LDADD += $(MET_PYTHON_LD) -lvx_data2d_python -lvx_python3_utils -lvx_data2d_python -lvx_python3_utils -endif diff --git a/met/src/tools/other/ioda2nc/Makefile.am b/met/src/tools/other/ioda2nc/Makefile.am index e4067043df..83d74ba5d1 100644 --- a/met/src/tools/other/ioda2nc/Makefile.am +++ b/met/src/tools/other/ioda2nc/Makefile.am @@ -26,6 +26,7 @@ ioda2nc_LDADD = -lvx_stat_out \ -lvx_data2d_nc_pinterp \ -lvx_data2d_nccf \ $(PYTHON_LIBS) \ + -lvx_statistics \ -lvx_data2d \ -lvx_nc_obs \ -lvx_nc_util \ diff --git a/met/src/tools/other/lidar2nc/Makefile.am b/met/src/tools/other/lidar2nc/Makefile.am index 7bf377b10a..118ea7efd4 100644 --- a/met/src/tools/other/lidar2nc/Makefile.am +++ b/met/src/tools/other/lidar2nc/Makefile.am @@ -26,6 +26,7 @@ lidar2nc_LDADD = -lvx_shapedata \ -lvx_data2d_nc_met \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_statistics \ -lvx_nc_obs \ -lvx_nc_util \ -lvx_data2d \ diff --git a/met/src/tools/other/madis2nc/Makefile.am b/met/src/tools/other/madis2nc/Makefile.am index f83950661e..e85cf3c975 100644 --- a/met/src/tools/other/madis2nc/Makefile.am +++ b/met/src/tools/other/madis2nc/Makefile.am @@ -25,6 +25,7 @@ madis2nc_LDADD = -lvx_stat_out \ -lvx_data2d_nc_pinterp \ -lvx_data2d_nccf \ $(PYTHON_LIBS) \ + -lvx_statistics \ -lvx_data2d \ -lvx_nc_obs \ -lvx_nc_util \ diff --git a/met/src/tools/other/mode_graphics/Makefile.am b/met/src/tools/other/mode_graphics/Makefile.am index b360dbfb55..fc5e1f530b 100644 --- a/met/src/tools/other/mode_graphics/Makefile.am +++ b/met/src/tools/other/mode_graphics/Makefile.am @@ -27,6 +27,7 @@ plot_mode_field_LDADD = -lvx_config \ -lvx_plot_util \ -lvx_data2d_nc_met \ $(PYTHON_LIBS) \ + -lvx_statistics \ -lvx_grid \ -lvx_nc_util \ -lvx_ps \ diff --git a/met/src/tools/other/mode_time_domain/Makefile.am b/met/src/tools/other/mode_time_domain/Makefile.am index 9b5d9df97f..fde9fbe977 100644 --- a/met/src/tools/other/mode_time_domain/Makefile.am +++ b/met/src/tools/other/mode_time_domain/Makefile.am @@ -55,6 +55,7 @@ mtd_LDADD = -lvx_pxm \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_statistics \ -lvx_data2d \ -lvx_nc_util \ -lvx_regrid \ diff --git a/met/src/tools/other/modis_regrid/Makefile.am b/met/src/tools/other/modis_regrid/Makefile.am index 843d226bbc..e63afad3f0 100644 --- a/met/src/tools/other/modis_regrid/Makefile.am +++ b/met/src/tools/other/modis_regrid/Makefile.am @@ -36,8 +36,10 @@ modis_regrid_LDADD = -lvx_pxm \ -lvx_data2d_grib $(GRIB2_LIBS) \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ + -lvx_statistics \ -lvx_data2d \ -lvx_nc_util \ + -lvx_regrid \ -lvx_grid \ -lvx_config \ -lvx_gsl_prob \ diff --git a/met/src/tools/other/pb2nc/Makefile.am b/met/src/tools/other/pb2nc/Makefile.am index 40ed36c2d2..6eb653a1d0 100644 --- a/met/src/tools/other/pb2nc/Makefile.am +++ b/met/src/tools/other/pb2nc/Makefile.am @@ -42,6 +42,7 @@ pb2nc_LDADD = -lvx_stat_out \ -lvx_data2d_nc_pinterp \ -lvx_data2d_nccf \ $(PYTHON_LIBS) \ + -lvx_statistics \ -lvx_data2d \ -lvx_nc_obs \ -lvx_nc_util \ diff --git a/met/src/tools/other/plot_data_plane/Makefile.am b/met/src/tools/other/plot_data_plane/Makefile.am index 5470661e67..3997aaf742 100644 --- a/met/src/tools/other/plot_data_plane/Makefile.am +++ b/met/src/tools/other/plot_data_plane/Makefile.am @@ -20,6 +20,7 @@ plot_data_plane_LDADD = -lvx_data2d_factory \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_statistics \ -lvx_nc_util \ -lvx_data2d \ -lvx_plot_util \ @@ -30,6 +31,7 @@ plot_data_plane_LDADD = -lvx_data2d_factory \ -lvx_afm \ -lvx_nav \ -lvx_gnomon \ + -lvx_regrid \ -lvx_grid \ -lvx_config \ -lvx_gsl_prob \ diff --git a/met/src/tools/other/plot_point_obs/Makefile.am b/met/src/tools/other/plot_point_obs/Makefile.am index 677a41a5b4..77e7c1711d 100644 --- a/met/src/tools/other/plot_point_obs/Makefile.am +++ b/met/src/tools/other/plot_point_obs/Makefile.am @@ -40,6 +40,7 @@ plot_point_obs_LDADD = -lvx_statistics \ -lvx_cal \ -lvx_util \ $(PYTHON_LIBS) \ + -lvx_statistics \ -lvx_math \ -lvx_color \ -lvx_log \ diff --git a/met/src/tools/other/point2grid/Makefile.am b/met/src/tools/other/point2grid/Makefile.am index 054a049df7..ccd38b1ac5 100644 --- a/met/src/tools/other/point2grid/Makefile.am +++ b/met/src/tools/other/point2grid/Makefile.am @@ -22,6 +22,7 @@ point2grid_LDADD = -lvx_statistics \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_statistics \ -lvx_nc_util \ -lvx_data2d \ -lvx_gnomon \ diff --git a/met/src/tools/other/regrid_data_plane/Makefile.am b/met/src/tools/other/regrid_data_plane/Makefile.am index 8c28e3f93f..32bbc91cea 100644 --- a/met/src/tools/other/regrid_data_plane/Makefile.am +++ b/met/src/tools/other/regrid_data_plane/Makefile.am @@ -21,6 +21,7 @@ regrid_data_plane_LDADD = -lvx_statistics \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_statistics \ -lvx_nc_util \ -lvx_data2d \ -lvx_gnomon \ diff --git a/met/src/tools/other/shift_data_plane/Makefile.am b/met/src/tools/other/shift_data_plane/Makefile.am index 67e02fcfea..af2d6875b8 100644 --- a/met/src/tools/other/shift_data_plane/Makefile.am +++ b/met/src/tools/other/shift_data_plane/Makefile.am @@ -21,6 +21,7 @@ shift_data_plane_LDADD = -lvx_statistics \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_statistics \ -lvx_nc_util \ -lvx_data2d \ -lvx_gnomon \ diff --git a/met/src/tools/other/wwmca_tool/Makefile.am b/met/src/tools/other/wwmca_tool/Makefile.am index 7972849bf4..ba4e6d5e69 100644 --- a/met/src/tools/other/wwmca_tool/Makefile.am +++ b/met/src/tools/other/wwmca_tool/Makefile.am @@ -44,6 +44,7 @@ wwmca_regrid_LDADD = -lvx_pxm \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_statistics \ -lvx_data2d \ -lvx_nc_util \ -lvx_regrid \ diff --git a/met/src/tools/tc_utils/rmw_analysis/Makefile.am b/met/src/tools/tc_utils/rmw_analysis/Makefile.am index eca92181cd..61223d7f69 100644 --- a/met/src/tools/tc_utils/rmw_analysis/Makefile.am +++ b/met/src/tools/tc_utils/rmw_analysis/Makefile.am @@ -26,6 +26,7 @@ rmw_analysis_LDADD = -lvx_stat_out \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_statistics \ -lvx_data2d \ -lvx_nc_util \ -lvx_regrid \ diff --git a/met/src/tools/tc_utils/tc_gen/Makefile.am b/met/src/tools/tc_utils/tc_gen/Makefile.am index 4865e6a517..281811cd88 100644 --- a/met/src/tools/tc_utils/tc_gen/Makefile.am +++ b/met/src/tools/tc_utils/tc_gen/Makefile.am @@ -27,6 +27,7 @@ tc_gen_LDADD = -lvx_stat_out \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_statistics \ -lvx_data2d \ -lvx_nc_util \ -lvx_regrid \ diff --git a/met/src/tools/tc_utils/tc_pairs/Makefile.am b/met/src/tools/tc_utils/tc_pairs/Makefile.am index 56d6239e2f..d5d4436698 100644 --- a/met/src/tools/tc_utils/tc_pairs/Makefile.am +++ b/met/src/tools/tc_utils/tc_pairs/Makefile.am @@ -29,6 +29,7 @@ tc_pairs_LDADD = -lvx_stat_out \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_statistics \ -lvx_data2d \ -lvx_nc_util \ -lvx_regrid \ diff --git a/met/src/tools/tc_utils/tc_rmw/Makefile.am b/met/src/tools/tc_utils/tc_rmw/Makefile.am index 5aed64e561..43d075577c 100644 --- a/met/src/tools/tc_utils/tc_rmw/Makefile.am +++ b/met/src/tools/tc_utils/tc_rmw/Makefile.am @@ -26,6 +26,7 @@ tc_rmw_LDADD = -lvx_stat_out \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_statistics \ -lvx_data2d \ -lvx_nc_util \ -lvx_regrid \ diff --git a/met/src/tools/tc_utils/tc_stat/Makefile.am b/met/src/tools/tc_utils/tc_stat/Makefile.am index 413d2a5141..06b56431ce 100644 --- a/met/src/tools/tc_utils/tc_stat/Makefile.am +++ b/met/src/tools/tc_utils/tc_stat/Makefile.am @@ -29,6 +29,7 @@ tc_stat_LDADD = -lvx_stat_out \ -lvx_data2d_nc_pinterp \ $(PYTHON_LIBS) \ -lvx_data2d_nccf \ + -lvx_statistics \ -lvx_data2d \ -lvx_nc_util \ -lvx_regrid \ diff --git a/test/xml/unit_python.xml b/test/xml/unit_python.xml index 58936ecbad..c204534f05 100644 --- a/test/xml/unit_python.xml +++ b/test/xml/unit_python.xml @@ -19,6 +19,68 @@ &TEST_DIR; true + + + &MET_BIN;/plot_data_plane + + PYTHON_GRID G212 + + \ + PYTHON_NUMPY \ + &OUTPUT_DIR;/python/letter_numpy_grid_name.ps \ + 'name = "&MET_BASE;/python/read_ascii_numpy_grid.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ + -plot_range 0.0 255.0 \ + -title "Grid Name: 'G212'" \ + -v 1 + + + &OUTPUT_DIR;/python/letter_numpy_grid_name.ps + + + + + + &MET_BIN;/plot_data_plane + + + PYTHON_GRID + lambert 185 129 12.19 -133.459 -95 40.635 6371.2 25 25 N + + + \ + PYTHON_NUMPY \ + &OUTPUT_DIR;/python/letter_numpy_grid_string.ps \ + 'name = "&MET_BASE;/python/read_ascii_numpy_grid.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ + -plot_range 0.0 255.0 \ + -title "Grid String: '${PYTHON_GRID}'" \ + -v 1 + + + &OUTPUT_DIR;/python/letter_numpy_grid_string.ps + + + + + + &MET_BIN;/plot_data_plane + + + PYTHON_GRID + &MET_DATA;/sample_fcst/2005080700/wrfprs_ruc13_12.tm00_G212 + + \ + PYTHON_NUMPY \ + &OUTPUT_DIR;/python/letter_numpy_grid_data_file.ps \ + 'name = "&MET_BASE;/python/read_ascii_numpy_grid.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ + -plot_range 0.0 255.0 \ + -title "Gridded Data File: 'wrfprs_ruc13_12.tm00_G212'" \ + -v 1 + + + &OUTPUT_DIR;/python/letter_numpy_grid_data_file.ps + + + &MET_BIN;/plot_data_plane From 0f08b74f6e38a07c5715a8b9732d528440702f1b Mon Sep 17 00:00:00 2001 From: MET Tools Test Account Date: Wed, 10 Mar 2021 17:03:26 -0700 Subject: [PATCH 67/86] Committing a fix for unit_python.xml directly to the develop branch. We referenced in a place where it's not defined. --- test/xml/unit_python.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/xml/unit_python.xml b/test/xml/unit_python.xml index c204534f05..1c89950159 100644 --- a/test/xml/unit_python.xml +++ b/test/xml/unit_python.xml @@ -52,7 +52,7 @@ &OUTPUT_DIR;/python/letter_numpy_grid_string.ps \ 'name = "&MET_BASE;/python/read_ascii_numpy_grid.py &DATA_DIR_PYTHON;/letter.txt LETTER";' \ -plot_range 0.0 255.0 \ - -title "Grid String: '${PYTHON_GRID}'" \ + -title "Grid String: 'lambert 185 129 12.19 -133.459 -95 40.635 6371.2 25 25 N'" \ -v 1 From 48bb90618ee3c2aaccc88c6dc8581b21f8ac8287 Mon Sep 17 00:00:00 2001 From: John Halley Gotway Date: Thu, 11 Mar 2021 13:14:16 -0700 Subject: [PATCH 68/86] Add *.dSYM to the .gitignore files in the src and internal_tests directories. --- met/internal_tests/.gitignore | 1 + met/internal_tests/basic/.gitignore | 1 + met/internal_tests/basic/vx_config/.gitignore | 1 + met/internal_tests/basic/vx_log/.gitignore | 1 + met/internal_tests/basic/vx_util/.gitignore | 1 + met/internal_tests/libcode/.gitignore | 1 + met/internal_tests/libcode/vx_data2d/.gitignore | 1 + met/internal_tests/libcode/vx_data2d_factory/.gitignore | 1 + met/internal_tests/libcode/vx_data2d_grib/.gitignore | 1 + met/internal_tests/libcode/vx_data2d_nc_met/.gitignore | 1 + met/internal_tests/libcode/vx_data2d_nccf/.gitignore | 1 + met/internal_tests/libcode/vx_geodesy/.gitignore | 1 + met/internal_tests/libcode/vx_grid/.gitignore | 1 + met/internal_tests/libcode/vx_nc_util/.gitignore | 1 + met/internal_tests/libcode/vx_physics/.gitignore | 1 + met/internal_tests/libcode/vx_plot_util/.gitignore | 1 + met/internal_tests/libcode/vx_ps/.gitignore | 1 + met/internal_tests/libcode/vx_series_data/.gitignore | 1 + met/internal_tests/libcode/vx_solar/.gitignore | 1 + met/internal_tests/libcode/vx_tc_util/.gitignore | 1 + met/internal_tests/tools/.gitignore | 1 + met/internal_tests/tools/other/.gitignore | 1 + met/internal_tests/tools/other/mode_time_domain/.gitignore | 1 + met/src/.gitignore | 1 + met/src/basic/.gitignore | 1 + met/src/basic/enum_to_string/.gitignore | 1 + met/src/basic/vx_cal/.gitignore | 1 + met/src/basic/vx_config/.gitignore | 1 + met/src/basic/vx_log/.gitignore | 1 + met/src/basic/vx_math/.gitignore | 1 + met/src/basic/vx_util/.gitignore | 1 + met/src/libcode/.gitignore | 1 + met/src/libcode/vx_afm/.gitignore | 1 + met/src/libcode/vx_analysis_util/.gitignore | 1 + met/src/libcode/vx_color/.gitignore | 1 + met/src/libcode/vx_data2d/.gitignore | 1 + met/src/libcode/vx_data2d_factory/.gitignore | 1 + met/src/libcode/vx_data2d_grib/.gitignore | 1 + met/src/libcode/vx_data2d_grib2/.gitignore | 1 + met/src/libcode/vx_data2d_nc_met/.gitignore | 1 + met/src/libcode/vx_data2d_nc_pinterp/.gitignore | 1 + met/src/libcode/vx_data2d_nccf/.gitignore | 1 + met/src/libcode/vx_data2d_python/.gitignore | 1 + met/src/libcode/vx_geodesy/.gitignore | 1 + met/src/libcode/vx_gis/.gitignore | 1 + met/src/libcode/vx_gnomon/.gitignore | 1 + met/src/libcode/vx_grid/.gitignore | 1 + met/src/libcode/vx_gsl_prob/.gitignore | 1 + met/src/libcode/vx_nav/.gitignore | 1 + met/src/libcode/vx_nc_obs/.gitignore | 1 + met/src/libcode/vx_nc_util/.gitignore | 1 + met/src/libcode/vx_pb_util/.gitignore | 1 + met/src/libcode/vx_plot_util/.gitignore | 1 + met/src/libcode/vx_ps/.gitignore | 1 + met/src/libcode/vx_pxm/.gitignore | 1 + met/src/libcode/vx_regrid/.gitignore | 1 + met/src/libcode/vx_render/.gitignore | 1 + met/src/libcode/vx_shapedata/.gitignore | 1 + met/src/libcode/vx_solar/.gitignore | 1 + met/src/libcode/vx_stat_out/.gitignore | 1 + met/src/libcode/vx_statistics/.gitignore | 1 + met/src/libcode/vx_summary/.gitignore | 1 + met/src/libcode/vx_tc_util/.gitignore | 1 + met/src/libcode/vx_time_series/.gitignore | 1 + met/src/tools/.gitignore | 1 + met/src/tools/core/.gitignore | 1 + met/src/tools/core/ensemble_stat/.gitignore | 1 + met/src/tools/core/grid_stat/.gitignore | 1 + met/src/tools/core/mode/.gitignore | 1 + met/src/tools/core/mode_analysis/.gitignore | 1 + met/src/tools/core/pcp_combine/.gitignore | 1 + met/src/tools/core/point_stat/.gitignore | 1 + met/src/tools/core/series_analysis/.gitignore | 1 + met/src/tools/core/stat_analysis/.gitignore | 1 + met/src/tools/core/wavelet_stat/.gitignore | 1 + met/src/tools/dev_utils/.gitignore | 1 + met/src/tools/dev_utils/shapefiles/.gitignore | 1 + met/src/tools/other/.gitignore | 1 + met/src/tools/other/ascii2nc/.gitignore | 1 + met/src/tools/other/gen_vx_mask/.gitignore | 1 + met/src/tools/other/gis_utils/.gitignore | 1 + met/src/tools/other/grid_diag/.gitignore | 1 + met/src/tools/other/gsi_tools/.gitignore | 1 + met/src/tools/other/ioda2nc/.gitignore | 1 + met/src/tools/other/lidar2nc/.gitignore | 1 + met/src/tools/other/madis2nc/.gitignore | 1 + met/src/tools/other/mode_graphics/.gitignore | 1 + met/src/tools/other/mode_time_domain/.gitignore | 1 + met/src/tools/other/modis_regrid/.gitignore | 1 + met/src/tools/other/pb2nc/.gitignore | 1 + met/src/tools/other/plot_data_plane/.gitignore | 1 + met/src/tools/other/plot_point_obs/.gitignore | 1 + met/src/tools/other/point2grid/.gitignore | 1 + met/src/tools/other/regrid_data_plane/.gitignore | 1 + met/src/tools/other/shift_data_plane/.gitignore | 1 + met/src/tools/other/wwmca_tool/.gitignore | 1 + met/src/tools/tc_utils/.gitignore | 1 + met/src/tools/tc_utils/rmw_analysis/.gitignore | 1 + met/src/tools/tc_utils/tc_dland/.gitignore | 1 + met/src/tools/tc_utils/tc_gen/.gitignore | 1 + met/src/tools/tc_utils/tc_pairs/.gitignore | 1 + met/src/tools/tc_utils/tc_rmw/.gitignore | 1 + met/src/tools/tc_utils/tc_stat/.gitignore | 1 + 103 files changed, 103 insertions(+) diff --git a/met/internal_tests/.gitignore b/met/internal_tests/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/internal_tests/.gitignore +++ b/met/internal_tests/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/internal_tests/basic/.gitignore b/met/internal_tests/basic/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/internal_tests/basic/.gitignore +++ b/met/internal_tests/basic/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/internal_tests/basic/vx_config/.gitignore b/met/internal_tests/basic/vx_config/.gitignore index 67630fc1ad..4591eec130 100644 --- a/met/internal_tests/basic/vx_config/.gitignore +++ b/met/internal_tests/basic/vx_config/.gitignore @@ -12,3 +12,4 @@ test_config .deps Makefile Makefile.in +*.dSYM diff --git a/met/internal_tests/basic/vx_log/.gitignore b/met/internal_tests/basic/vx_log/.gitignore index db14416cc0..ef6fcb721e 100644 --- a/met/internal_tests/basic/vx_log/.gitignore +++ b/met/internal_tests/basic/vx_log/.gitignore @@ -5,3 +5,4 @@ test_logger .deps Makefile Makefile.in +*.dSYM diff --git a/met/internal_tests/basic/vx_util/.gitignore b/met/internal_tests/basic/vx_util/.gitignore index dc6b171d93..a495037a4e 100644 --- a/met/internal_tests/basic/vx_util/.gitignore +++ b/met/internal_tests/basic/vx_util/.gitignore @@ -8,3 +8,4 @@ test_add_rows .deps Makefile Makefile.in +*.dSYM diff --git a/met/internal_tests/libcode/.gitignore b/met/internal_tests/libcode/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/internal_tests/libcode/.gitignore +++ b/met/internal_tests/libcode/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/internal_tests/libcode/vx_data2d/.gitignore b/met/internal_tests/libcode/vx_data2d/.gitignore index 061a79d193..f7b330410a 100644 --- a/met/internal_tests/libcode/vx_data2d/.gitignore +++ b/met/internal_tests/libcode/vx_data2d/.gitignore @@ -5,3 +5,4 @@ dump_default_table .deps Makefile Makefile.in +*.dSYM diff --git a/met/internal_tests/libcode/vx_data2d_factory/.gitignore b/met/internal_tests/libcode/vx_data2d_factory/.gitignore index f7fee338ad..4c7f17b56a 100644 --- a/met/internal_tests/libcode/vx_data2d_factory/.gitignore +++ b/met/internal_tests/libcode/vx_data2d_factory/.gitignore @@ -5,3 +5,4 @@ test_factory .deps Makefile Makefile.in +*.dSYM diff --git a/met/internal_tests/libcode/vx_data2d_grib/.gitignore b/met/internal_tests/libcode/vx_data2d_grib/.gitignore index 70d0ac9de2..f285e250bc 100644 --- a/met/internal_tests/libcode/vx_data2d_grib/.gitignore +++ b/met/internal_tests/libcode/vx_data2d_grib/.gitignore @@ -4,3 +4,4 @@ test_read_grib1 .deps Makefile Makefile.in +*.dSYM diff --git a/met/internal_tests/libcode/vx_data2d_nc_met/.gitignore b/met/internal_tests/libcode/vx_data2d_nc_met/.gitignore index 40ab9911a0..554cb3b99e 100644 --- a/met/internal_tests/libcode/vx_data2d_nc_met/.gitignore +++ b/met/internal_tests/libcode/vx_data2d_nc_met/.gitignore @@ -4,3 +4,4 @@ test_read_nc_met .deps Makefile Makefile.in +*.dSYM diff --git a/met/internal_tests/libcode/vx_data2d_nccf/.gitignore b/met/internal_tests/libcode/vx_data2d_nccf/.gitignore index 9f05a5dd30..c7cba20d1c 100644 --- a/met/internal_tests/libcode/vx_data2d_nccf/.gitignore +++ b/met/internal_tests/libcode/vx_data2d_nccf/.gitignore @@ -4,3 +4,4 @@ test_read_nccf .deps Makefile Makefile.in +*.dSYM diff --git a/met/internal_tests/libcode/vx_geodesy/.gitignore b/met/internal_tests/libcode/vx_geodesy/.gitignore index 17fc2778c9..826fb4b0c3 100644 --- a/met/internal_tests/libcode/vx_geodesy/.gitignore +++ b/met/internal_tests/libcode/vx_geodesy/.gitignore @@ -4,3 +4,4 @@ test_spheroid .deps Makefile Makefile.in +*.dSYM diff --git a/met/internal_tests/libcode/vx_grid/.gitignore b/met/internal_tests/libcode/vx_grid/.gitignore index 19037db3e9..733bb6d622 100644 --- a/met/internal_tests/libcode/vx_grid/.gitignore +++ b/met/internal_tests/libcode/vx_grid/.gitignore @@ -4,3 +4,4 @@ test_grid_area .deps Makefile Makefile.in +*.dSYM diff --git a/met/internal_tests/libcode/vx_nc_util/.gitignore b/met/internal_tests/libcode/vx_nc_util/.gitignore index a93467f330..71ddcde80a 100644 --- a/met/internal_tests/libcode/vx_nc_util/.gitignore +++ b/met/internal_tests/libcode/vx_nc_util/.gitignore @@ -4,3 +4,4 @@ test_pressure_levels .deps Makefile Makefile.in +*.dSYM diff --git a/met/internal_tests/libcode/vx_physics/.gitignore b/met/internal_tests/libcode/vx_physics/.gitignore index 094a57719a..f2ec7e5a62 100644 --- a/met/internal_tests/libcode/vx_physics/.gitignore +++ b/met/internal_tests/libcode/vx_physics/.gitignore @@ -4,3 +4,4 @@ test_thermo .deps Makefile Makefile.in +*.dSYM diff --git a/met/internal_tests/libcode/vx_plot_util/.gitignore b/met/internal_tests/libcode/vx_plot_util/.gitignore index d570f17da2..01b5ee0c75 100644 --- a/met/internal_tests/libcode/vx_plot_util/.gitignore +++ b/met/internal_tests/libcode/vx_plot_util/.gitignore @@ -4,3 +4,4 @@ test_map_region .deps Makefile Makefile.in +*.dSYM diff --git a/met/internal_tests/libcode/vx_ps/.gitignore b/met/internal_tests/libcode/vx_ps/.gitignore index ebef0f4ecf..916310e94c 100644 --- a/met/internal_tests/libcode/vx_ps/.gitignore +++ b/met/internal_tests/libcode/vx_ps/.gitignore @@ -4,3 +4,4 @@ test_ps .deps Makefile Makefile.in +*.dSYM diff --git a/met/internal_tests/libcode/vx_series_data/.gitignore b/met/internal_tests/libcode/vx_series_data/.gitignore index a29fc045ef..44e011fc66 100644 --- a/met/internal_tests/libcode/vx_series_data/.gitignore +++ b/met/internal_tests/libcode/vx_series_data/.gitignore @@ -4,3 +4,4 @@ test_series_data .deps Makefile Makefile.in +*.dSYM diff --git a/met/internal_tests/libcode/vx_solar/.gitignore b/met/internal_tests/libcode/vx_solar/.gitignore index 906dd21126..326d9d0cad 100644 --- a/met/internal_tests/libcode/vx_solar/.gitignore +++ b/met/internal_tests/libcode/vx_solar/.gitignore @@ -4,3 +4,4 @@ test_ra_dec .deps Makefile Makefile.in +*.dSYM diff --git a/met/internal_tests/libcode/vx_tc_util/.gitignore b/met/internal_tests/libcode/vx_tc_util/.gitignore index 6a46ceef48..bd2b6485c5 100644 --- a/met/internal_tests/libcode/vx_tc_util/.gitignore +++ b/met/internal_tests/libcode/vx_tc_util/.gitignore @@ -5,3 +5,4 @@ test_read .deps Makefile Makefile.in +*.dSYM diff --git a/met/internal_tests/tools/.gitignore b/met/internal_tests/tools/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/internal_tests/tools/.gitignore +++ b/met/internal_tests/tools/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/internal_tests/tools/other/.gitignore b/met/internal_tests/tools/other/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/internal_tests/tools/other/.gitignore +++ b/met/internal_tests/tools/other/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/internal_tests/tools/other/mode_time_domain/.gitignore b/met/internal_tests/tools/other/mode_time_domain/.gitignore index 092c0bed05..01fa57111b 100644 --- a/met/internal_tests/tools/other/mode_time_domain/.gitignore +++ b/met/internal_tests/tools/other/mode_time_domain/.gitignore @@ -4,3 +4,4 @@ test_velocity .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/.gitignore b/met/src/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/.gitignore +++ b/met/src/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/basic/.gitignore b/met/src/basic/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/basic/.gitignore +++ b/met/src/basic/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/basic/enum_to_string/.gitignore b/met/src/basic/enum_to_string/.gitignore index 02ea333a4d..7788d23d81 100644 --- a/met/src/basic/enum_to_string/.gitignore +++ b/met/src/basic/enum_to_string/.gitignore @@ -5,3 +5,4 @@ enum_parser.cc .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/basic/vx_cal/.gitignore b/met/src/basic/vx_cal/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/basic/vx_cal/.gitignore +++ b/met/src/basic/vx_cal/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/basic/vx_config/.gitignore b/met/src/basic/vx_config/.gitignore index cd85cdb95c..24c9ca6c7e 100644 --- a/met/src/basic/vx_config/.gitignore +++ b/met/src/basic/vx_config/.gitignore @@ -7,3 +7,4 @@ config.tab.h .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/basic/vx_log/.gitignore b/met/src/basic/vx_log/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/basic/vx_log/.gitignore +++ b/met/src/basic/vx_log/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/basic/vx_math/.gitignore b/met/src/basic/vx_math/.gitignore index 92b269f31e..7e5f500d16 100644 --- a/met/src/basic/vx_math/.gitignore +++ b/met/src/basic/vx_math/.gitignore @@ -5,3 +5,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/basic/vx_util/.gitignore b/met/src/basic/vx_util/.gitignore index 92b269f31e..7e5f500d16 100644 --- a/met/src/basic/vx_util/.gitignore +++ b/met/src/basic/vx_util/.gitignore @@ -5,3 +5,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/.gitignore b/met/src/libcode/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/.gitignore +++ b/met/src/libcode/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_afm/.gitignore b/met/src/libcode/vx_afm/.gitignore index 92b269f31e..7e5f500d16 100644 --- a/met/src/libcode/vx_afm/.gitignore +++ b/met/src/libcode/vx_afm/.gitignore @@ -5,3 +5,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_analysis_util/.gitignore b/met/src/libcode/vx_analysis_util/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_analysis_util/.gitignore +++ b/met/src/libcode/vx_analysis_util/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_color/.gitignore b/met/src/libcode/vx_color/.gitignore index db5274632b..222c1f46c6 100644 --- a/met/src/libcode/vx_color/.gitignore +++ b/met/src/libcode/vx_color/.gitignore @@ -6,3 +6,4 @@ color_parser_yacc.h .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_data2d/.gitignore b/met/src/libcode/vx_data2d/.gitignore index 92b269f31e..7e5f500d16 100644 --- a/met/src/libcode/vx_data2d/.gitignore +++ b/met/src/libcode/vx_data2d/.gitignore @@ -5,3 +5,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_data2d_factory/.gitignore b/met/src/libcode/vx_data2d_factory/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_data2d_factory/.gitignore +++ b/met/src/libcode/vx_data2d_factory/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_data2d_grib/.gitignore b/met/src/libcode/vx_data2d_grib/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_data2d_grib/.gitignore +++ b/met/src/libcode/vx_data2d_grib/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_data2d_grib2/.gitignore b/met/src/libcode/vx_data2d_grib2/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_data2d_grib2/.gitignore +++ b/met/src/libcode/vx_data2d_grib2/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_data2d_nc_met/.gitignore b/met/src/libcode/vx_data2d_nc_met/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_data2d_nc_met/.gitignore +++ b/met/src/libcode/vx_data2d_nc_met/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_data2d_nc_pinterp/.gitignore b/met/src/libcode/vx_data2d_nc_pinterp/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_data2d_nc_pinterp/.gitignore +++ b/met/src/libcode/vx_data2d_nc_pinterp/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_data2d_nccf/.gitignore b/met/src/libcode/vx_data2d_nccf/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_data2d_nccf/.gitignore +++ b/met/src/libcode/vx_data2d_nccf/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_data2d_python/.gitignore b/met/src/libcode/vx_data2d_python/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_data2d_python/.gitignore +++ b/met/src/libcode/vx_data2d_python/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_geodesy/.gitignore b/met/src/libcode/vx_geodesy/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_geodesy/.gitignore +++ b/met/src/libcode/vx_geodesy/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_gis/.gitignore b/met/src/libcode/vx_gis/.gitignore index 92b269f31e..7e5f500d16 100644 --- a/met/src/libcode/vx_gis/.gitignore +++ b/met/src/libcode/vx_gis/.gitignore @@ -5,3 +5,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_gnomon/.gitignore b/met/src/libcode/vx_gnomon/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_gnomon/.gitignore +++ b/met/src/libcode/vx_gnomon/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_grid/.gitignore b/met/src/libcode/vx_grid/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_grid/.gitignore +++ b/met/src/libcode/vx_grid/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_gsl_prob/.gitignore b/met/src/libcode/vx_gsl_prob/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_gsl_prob/.gitignore +++ b/met/src/libcode/vx_gsl_prob/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_nav/.gitignore b/met/src/libcode/vx_nav/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_nav/.gitignore +++ b/met/src/libcode/vx_nav/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_nc_obs/.gitignore b/met/src/libcode/vx_nc_obs/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_nc_obs/.gitignore +++ b/met/src/libcode/vx_nc_obs/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_nc_util/.gitignore b/met/src/libcode/vx_nc_util/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_nc_util/.gitignore +++ b/met/src/libcode/vx_nc_util/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_pb_util/.gitignore b/met/src/libcode/vx_pb_util/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_pb_util/.gitignore +++ b/met/src/libcode/vx_pb_util/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_plot_util/.gitignore b/met/src/libcode/vx_plot_util/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_plot_util/.gitignore +++ b/met/src/libcode/vx_plot_util/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_ps/.gitignore b/met/src/libcode/vx_ps/.gitignore index 92b269f31e..7e5f500d16 100644 --- a/met/src/libcode/vx_ps/.gitignore +++ b/met/src/libcode/vx_ps/.gitignore @@ -5,3 +5,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_pxm/.gitignore b/met/src/libcode/vx_pxm/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_pxm/.gitignore +++ b/met/src/libcode/vx_pxm/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_regrid/.gitignore b/met/src/libcode/vx_regrid/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_regrid/.gitignore +++ b/met/src/libcode/vx_regrid/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_render/.gitignore b/met/src/libcode/vx_render/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_render/.gitignore +++ b/met/src/libcode/vx_render/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_shapedata/.gitignore b/met/src/libcode/vx_shapedata/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_shapedata/.gitignore +++ b/met/src/libcode/vx_shapedata/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_solar/.gitignore b/met/src/libcode/vx_solar/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_solar/.gitignore +++ b/met/src/libcode/vx_solar/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_stat_out/.gitignore b/met/src/libcode/vx_stat_out/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_stat_out/.gitignore +++ b/met/src/libcode/vx_stat_out/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_statistics/.gitignore b/met/src/libcode/vx_statistics/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_statistics/.gitignore +++ b/met/src/libcode/vx_statistics/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_summary/.gitignore b/met/src/libcode/vx_summary/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_summary/.gitignore +++ b/met/src/libcode/vx_summary/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_tc_util/.gitignore b/met/src/libcode/vx_tc_util/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_tc_util/.gitignore +++ b/met/src/libcode/vx_tc_util/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/libcode/vx_time_series/.gitignore b/met/src/libcode/vx_time_series/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/libcode/vx_time_series/.gitignore +++ b/met/src/libcode/vx_time_series/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/.gitignore b/met/src/tools/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/tools/.gitignore +++ b/met/src/tools/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/core/.gitignore b/met/src/tools/core/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/tools/core/.gitignore +++ b/met/src/tools/core/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/core/ensemble_stat/.gitignore b/met/src/tools/core/ensemble_stat/.gitignore index ff7e359957..536328aba8 100644 --- a/met/src/tools/core/ensemble_stat/.gitignore +++ b/met/src/tools/core/ensemble_stat/.gitignore @@ -4,3 +4,4 @@ ensemble_stat .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/core/grid_stat/.gitignore b/met/src/tools/core/grid_stat/.gitignore index c4d713316d..5e2bcb8f66 100644 --- a/met/src/tools/core/grid_stat/.gitignore +++ b/met/src/tools/core/grid_stat/.gitignore @@ -4,3 +4,4 @@ grid_stat .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/core/mode/.gitignore b/met/src/tools/core/mode/.gitignore index 3eb452874c..8216720ddb 100644 --- a/met/src/tools/core/mode/.gitignore +++ b/met/src/tools/core/mode/.gitignore @@ -4,3 +4,4 @@ mode .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/core/mode_analysis/.gitignore b/met/src/tools/core/mode_analysis/.gitignore index 6108dd97e6..0e3a718728 100644 --- a/met/src/tools/core/mode_analysis/.gitignore +++ b/met/src/tools/core/mode_analysis/.gitignore @@ -4,3 +4,4 @@ mode_analysis .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/core/pcp_combine/.gitignore b/met/src/tools/core/pcp_combine/.gitignore index 012e9dce9d..2963b459b1 100644 --- a/met/src/tools/core/pcp_combine/.gitignore +++ b/met/src/tools/core/pcp_combine/.gitignore @@ -4,3 +4,4 @@ pcp_combine .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/core/point_stat/.gitignore b/met/src/tools/core/point_stat/.gitignore index 42294678d5..00088797c5 100644 --- a/met/src/tools/core/point_stat/.gitignore +++ b/met/src/tools/core/point_stat/.gitignore @@ -4,3 +4,4 @@ point_stat .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/core/series_analysis/.gitignore b/met/src/tools/core/series_analysis/.gitignore index 4e996c7b6c..4ae6680d91 100644 --- a/met/src/tools/core/series_analysis/.gitignore +++ b/met/src/tools/core/series_analysis/.gitignore @@ -4,3 +4,4 @@ series_analysis .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/core/stat_analysis/.gitignore b/met/src/tools/core/stat_analysis/.gitignore index aadcccda75..bcc102842e 100644 --- a/met/src/tools/core/stat_analysis/.gitignore +++ b/met/src/tools/core/stat_analysis/.gitignore @@ -4,3 +4,4 @@ stat_analysis .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/core/wavelet_stat/.gitignore b/met/src/tools/core/wavelet_stat/.gitignore index 31f707d25d..7ed6d133d1 100644 --- a/met/src/tools/core/wavelet_stat/.gitignore +++ b/met/src/tools/core/wavelet_stat/.gitignore @@ -4,3 +4,4 @@ wavelet_stat .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/dev_utils/.gitignore b/met/src/tools/dev_utils/.gitignore index 5993a9d79d..6dff8d3ff9 100644 --- a/met/src/tools/dev_utils/.gitignore +++ b/met/src/tools/dev_utils/.gitignore @@ -12,3 +12,4 @@ chk4copyright .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/dev_utils/shapefiles/.gitignore b/met/src/tools/dev_utils/shapefiles/.gitignore index d530071029..1ef4c3e5dc 100644 --- a/met/src/tools/dev_utils/shapefiles/.gitignore +++ b/met/src/tools/dev_utils/shapefiles/.gitignore @@ -4,3 +4,4 @@ make_mapfiles .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/other/.gitignore b/met/src/tools/other/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/tools/other/.gitignore +++ b/met/src/tools/other/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/other/ascii2nc/.gitignore b/met/src/tools/other/ascii2nc/.gitignore index 14e5831fd8..46e9e4600d 100644 --- a/met/src/tools/other/ascii2nc/.gitignore +++ b/met/src/tools/other/ascii2nc/.gitignore @@ -4,3 +4,4 @@ ascii2nc .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/other/gen_vx_mask/.gitignore b/met/src/tools/other/gen_vx_mask/.gitignore index 014957152f..def514617d 100644 --- a/met/src/tools/other/gen_vx_mask/.gitignore +++ b/met/src/tools/other/gen_vx_mask/.gitignore @@ -4,3 +4,4 @@ gen_vx_mask .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/other/gis_utils/.gitignore b/met/src/tools/other/gis_utils/.gitignore index 6f5206616c..6bcc0d4e28 100644 --- a/met/src/tools/other/gis_utils/.gitignore +++ b/met/src/tools/other/gis_utils/.gitignore @@ -6,3 +6,4 @@ gis_dump_dbf .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/other/grid_diag/.gitignore b/met/src/tools/other/grid_diag/.gitignore index 07d1d9d55d..2ea21992a5 100644 --- a/met/src/tools/other/grid_diag/.gitignore +++ b/met/src/tools/other/grid_diag/.gitignore @@ -4,3 +4,4 @@ grid_diag .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/other/gsi_tools/.gitignore b/met/src/tools/other/gsi_tools/.gitignore index f98752c930..d91d7ab411 100644 --- a/met/src/tools/other/gsi_tools/.gitignore +++ b/met/src/tools/other/gsi_tools/.gitignore @@ -5,3 +5,4 @@ gsid2mpr .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/other/ioda2nc/.gitignore b/met/src/tools/other/ioda2nc/.gitignore index 0d392220b0..58b6d8e6cc 100644 --- a/met/src/tools/other/ioda2nc/.gitignore +++ b/met/src/tools/other/ioda2nc/.gitignore @@ -5,3 +5,4 @@ ioda2nc .dirstamp Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/other/lidar2nc/.gitignore b/met/src/tools/other/lidar2nc/.gitignore index 25c289c10c..6a71017f06 100644 --- a/met/src/tools/other/lidar2nc/.gitignore +++ b/met/src/tools/other/lidar2nc/.gitignore @@ -4,3 +4,4 @@ lidar2nc .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/other/madis2nc/.gitignore b/met/src/tools/other/madis2nc/.gitignore index 048a33b4e4..eb5b9c9238 100644 --- a/met/src/tools/other/madis2nc/.gitignore +++ b/met/src/tools/other/madis2nc/.gitignore @@ -4,3 +4,4 @@ madis2nc .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/other/mode_graphics/.gitignore b/met/src/tools/other/mode_graphics/.gitignore index c344a0a483..0026eae5fa 100644 --- a/met/src/tools/other/mode_graphics/.gitignore +++ b/met/src/tools/other/mode_graphics/.gitignore @@ -6,3 +6,4 @@ plot_mode_field .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/other/mode_time_domain/.gitignore b/met/src/tools/other/mode_time_domain/.gitignore index 16272bfe7d..15cbce0f9f 100644 --- a/met/src/tools/other/mode_time_domain/.gitignore +++ b/met/src/tools/other/mode_time_domain/.gitignore @@ -6,3 +6,4 @@ mtd .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/other/modis_regrid/.gitignore b/met/src/tools/other/modis_regrid/.gitignore index 5659a845bd..de709555ac 100644 --- a/met/src/tools/other/modis_regrid/.gitignore +++ b/met/src/tools/other/modis_regrid/.gitignore @@ -4,3 +4,4 @@ modis_regrid .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/other/pb2nc/.gitignore b/met/src/tools/other/pb2nc/.gitignore index db0fc32617..ec395e3290 100644 --- a/met/src/tools/other/pb2nc/.gitignore +++ b/met/src/tools/other/pb2nc/.gitignore @@ -5,3 +5,4 @@ pb2nc .dirstamp Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/other/plot_data_plane/.gitignore b/met/src/tools/other/plot_data_plane/.gitignore index fb45412645..ba23c68111 100644 --- a/met/src/tools/other/plot_data_plane/.gitignore +++ b/met/src/tools/other/plot_data_plane/.gitignore @@ -4,3 +4,4 @@ plot_data_plane .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/other/plot_point_obs/.gitignore b/met/src/tools/other/plot_point_obs/.gitignore index 675f1ad662..c09f77682e 100644 --- a/met/src/tools/other/plot_point_obs/.gitignore +++ b/met/src/tools/other/plot_point_obs/.gitignore @@ -4,3 +4,4 @@ plot_point_obs .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/other/point2grid/.gitignore b/met/src/tools/other/point2grid/.gitignore index 7fc724ecfb..c86ed97c54 100644 --- a/met/src/tools/other/point2grid/.gitignore +++ b/met/src/tools/other/point2grid/.gitignore @@ -4,3 +4,4 @@ point2grid .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/other/regrid_data_plane/.gitignore b/met/src/tools/other/regrid_data_plane/.gitignore index 2bddf9a131..3b5934adac 100644 --- a/met/src/tools/other/regrid_data_plane/.gitignore +++ b/met/src/tools/other/regrid_data_plane/.gitignore @@ -4,3 +4,4 @@ regrid_data_plane .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/other/shift_data_plane/.gitignore b/met/src/tools/other/shift_data_plane/.gitignore index 84942ecee1..b539a2f8c3 100644 --- a/met/src/tools/other/shift_data_plane/.gitignore +++ b/met/src/tools/other/shift_data_plane/.gitignore @@ -4,3 +4,4 @@ shift_data_plane .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/other/wwmca_tool/.gitignore b/met/src/tools/other/wwmca_tool/.gitignore index a144c3a35d..ebecdacf1a 100644 --- a/met/src/tools/other/wwmca_tool/.gitignore +++ b/met/src/tools/other/wwmca_tool/.gitignore @@ -7,3 +7,4 @@ wwmca_plot .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/tc_utils/.gitignore b/met/src/tools/tc_utils/.gitignore index 6c6a5a4f31..1295d44db5 100644 --- a/met/src/tools/tc_utils/.gitignore +++ b/met/src/tools/tc_utils/.gitignore @@ -3,3 +3,4 @@ .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/tc_utils/rmw_analysis/.gitignore b/met/src/tools/tc_utils/rmw_analysis/.gitignore index 077554915a..ca237511e7 100644 --- a/met/src/tools/tc_utils/rmw_analysis/.gitignore +++ b/met/src/tools/tc_utils/rmw_analysis/.gitignore @@ -4,3 +4,4 @@ rmw_analysis .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/tc_utils/tc_dland/.gitignore b/met/src/tools/tc_utils/tc_dland/.gitignore index 75c82825a6..483d24cffa 100644 --- a/met/src/tools/tc_utils/tc_dland/.gitignore +++ b/met/src/tools/tc_utils/tc_dland/.gitignore @@ -4,3 +4,4 @@ tc_dland .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/tc_utils/tc_gen/.gitignore b/met/src/tools/tc_utils/tc_gen/.gitignore index 7b8c809236..28c0ebbfe4 100644 --- a/met/src/tools/tc_utils/tc_gen/.gitignore +++ b/met/src/tools/tc_utils/tc_gen/.gitignore @@ -4,3 +4,4 @@ tc_gen .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/tc_utils/tc_pairs/.gitignore b/met/src/tools/tc_utils/tc_pairs/.gitignore index d13a56b286..d69eaeca53 100644 --- a/met/src/tools/tc_utils/tc_pairs/.gitignore +++ b/met/src/tools/tc_utils/tc_pairs/.gitignore @@ -4,3 +4,4 @@ tc_pairs .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/tc_utils/tc_rmw/.gitignore b/met/src/tools/tc_utils/tc_rmw/.gitignore index b540dddec0..4f555bc016 100644 --- a/met/src/tools/tc_utils/tc_rmw/.gitignore +++ b/met/src/tools/tc_utils/tc_rmw/.gitignore @@ -4,3 +4,4 @@ tc_rmw .deps Makefile Makefile.in +*.dSYM diff --git a/met/src/tools/tc_utils/tc_stat/.gitignore b/met/src/tools/tc_utils/tc_stat/.gitignore index d18583f8b8..8f7a38909c 100644 --- a/met/src/tools/tc_utils/tc_stat/.gitignore +++ b/met/src/tools/tc_utils/tc_stat/.gitignore @@ -4,3 +4,4 @@ tc_stat .deps Makefile Makefile.in +*.dSYM From 6568493c6763869789ab142a98b4ca15fbf042aa Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Fri, 12 Mar 2021 13:56:04 -0700 Subject: [PATCH 69/86] Replaced tmp netcdf _name attribute with name_str. --- met/data/wrappers/read_tmp_dataplane.py | 4 ++-- met/data/wrappers/write_tmp_dataplane.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/met/data/wrappers/read_tmp_dataplane.py b/met/data/wrappers/read_tmp_dataplane.py index e5fb0d6cb0..e21c17ba3f 100644 --- a/met/data/wrappers/read_tmp_dataplane.py +++ b/met/data/wrappers/read_tmp_dataplane.py @@ -29,8 +29,8 @@ met_attrs[attr] = attr_val grid['nx'], grid['ny'] = int(grid['nx']), int(grid['ny']) met_attrs['grid'] = grid -met_attrs['name'] = met_attrs['_name'] -del met_attrs['_name'] +met_attrs['name'] = met_attrs['name_str'] +del met_attrs['name_str'] met_info['met_data'] = met_data met_info['attrs'] = met_attrs print(met_info) diff --git a/met/data/wrappers/write_tmp_dataplane.py b/met/data/wrappers/write_tmp_dataplane.py index f7ff2d7559..c04b1da6d0 100644 --- a/met/data/wrappers/write_tmp_dataplane.py +++ b/met/data/wrappers/write_tmp_dataplane.py @@ -54,7 +54,7 @@ for attr, attr_val in met_info['attrs'].items(): print(attr, attr_val, type(attr_val)) if attr == 'name': - setattr(ds, '_name', attr_val) + setattr(ds, 'name_str', attr_val) if type(attr_val) == str: setattr(ds, attr, attr_val) if type(attr_val) == dict: From 22f5e98acba65a3e5454fdee939a4ea87a4d334a Mon Sep 17 00:00:00 2001 From: David Fillmore Date: Mon, 15 Mar 2021 14:49:09 -0600 Subject: [PATCH 70/86] Append user script path to system path. --- met/data/wrappers/write_tmp_dataplane.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/met/data/wrappers/write_tmp_dataplane.py b/met/data/wrappers/write_tmp_dataplane.py index c04b1da6d0..6af7d115a9 100644 --- a/met/data/wrappers/write_tmp_dataplane.py +++ b/met/data/wrappers/write_tmp_dataplane.py @@ -23,6 +23,11 @@ pyembed_module_name = sys.argv[2] sys.argv = sys.argv[2:] +# append user script dir to system path +pyembed_dir, pyembed_file = os.path.split(pyembed_module_name) +if pyembed_dir: + os.path.append(pyembed_dir) + if not pyembed_module_name.endswith('.py'): pyembed_module_name += '.py' From dee5d3b0f1b4c69bb941c7ea7d8175506c215746 Mon Sep 17 00:00:00 2001 From: johnhg Date: Mon, 15 Mar 2021 17:51:14 -0600 Subject: [PATCH 71/86] Revert "Feature 1319 no pickle" (#1717) --- met/data/wrappers/Makefile.am | 8 +- met/data/wrappers/read_pickle_dataplane.py | 15 +++ met/data/wrappers/read_tmp_ascii.py | 49 ------- met/data/wrappers/read_tmp_dataplane.py | 36 ------ ...tmp_point.py => write_pickle_dataplane.py} | 24 +++- met/data/wrappers/write_pickle_mpr.py | 5 +- met/data/wrappers/write_pickle_point.py | 5 - met/data/wrappers/write_tmp_dataplane.py | 68 ---------- .../vx_data2d_python/python_dataplane.cc | 54 ++++---- .../vx_python3_utils/python3_script.cc | 121 +----------------- .../libcode/vx_python3_utils/python3_script.h | 17 +-- met/src/tools/other/ascii2nc/ascii2nc.cc | 1 - .../tools/other/ascii2nc/python_handler.cc | 48 ++++--- met/src/tools/other/ascii2nc/python_handler.h | 10 +- 14 files changed, 96 insertions(+), 365 deletions(-) create mode 100644 met/data/wrappers/read_pickle_dataplane.py delete mode 100644 met/data/wrappers/read_tmp_ascii.py delete mode 100644 met/data/wrappers/read_tmp_dataplane.py rename met/data/wrappers/{write_tmp_point.py => write_pickle_dataplane.py} (52%) delete mode 100644 met/data/wrappers/write_tmp_dataplane.py diff --git a/met/data/wrappers/Makefile.am b/met/data/wrappers/Makefile.am index 5061e51d51..d8a6d5a026 100644 --- a/met/data/wrappers/Makefile.am +++ b/met/data/wrappers/Makefile.am @@ -23,12 +23,10 @@ wrappersdir = $(pkgdatadir)/wrappers wrappers_DATA = \ generic_python.py \ generic_pickle.py \ - read_tmp_dataplane.py \ - write_tmp_dataplane.py \ + read_pickle_dataplane.py \ + write_pickle_dataplane.py \ write_pickle_mpr.py \ - read_tmp_ascii.py \ - write_pickle_point.py \ - write_tmp_point.py + write_pickle_point.py EXTRA_DIST = ${wrappers_DATA} diff --git a/met/data/wrappers/read_pickle_dataplane.py b/met/data/wrappers/read_pickle_dataplane.py new file mode 100644 index 0000000000..f97f153df7 --- /dev/null +++ b/met/data/wrappers/read_pickle_dataplane.py @@ -0,0 +1,15 @@ +######################################################################## +# +# Reads temporary pickle file into memory. +# +# usage: /path/to/python read_pickle_dataplane.py pickle.tmp +# +######################################################################## + +import sys +import numpy as np +import pickle + +print('Python Script:\t', sys.argv[0]) +print('Load Pickle:\t', sys.argv[1]) +met_info = pickle.load(open(sys.argv[1], "rb")) diff --git a/met/data/wrappers/read_tmp_ascii.py b/met/data/wrappers/read_tmp_ascii.py deleted file mode 100644 index b4f4303044..0000000000 --- a/met/data/wrappers/read_tmp_ascii.py +++ /dev/null @@ -1,49 +0,0 @@ -""" -Module Name: read_tmp_ascii.py - -Read MET Point Observations from a text file created by write_tmp_point.py script - or MET Matched Pairs from a text file created by write_tmp_mpr.py script - -Point observation format: - Message_Type, Station_ID, Valid_Time, Lat, Lon, Elevation, - GRIB_Code or Variable_Name, Level, Height, QC_String, Observation_Value - -Version Date -1.0.0 2021/02/18 David Fillmore Initial version -""" - -__author__ = 'David Fillmore' -__version__ = '1.0.0' -__email__ = 'met_help@ucar.edu' - -import argparse - -point_data = None - -def read_tmp_ascii(filename): - """ - Arguments: - filename (string): temporary file created by write_tmp_point.py - - Returns: - (list of lists): point data - """ - print('read_tmp_ascii:' + filename) - f = open(filename, 'r') - lines = f.readlines() - f.close() - - global point_data - point_data = [eval(line.strip('\n')) for line in lines] - - return point_data - -if __name__ == '__main__': - """ - Parse command line arguments - """ - parser = argparse.ArgumentParser() - parser.add_argument('--filename', type=str) - args = parser.parse_args() - - data = read_tmp_ascii(args.filename) diff --git a/met/data/wrappers/read_tmp_dataplane.py b/met/data/wrappers/read_tmp_dataplane.py deleted file mode 100644 index e21c17ba3f..0000000000 --- a/met/data/wrappers/read_tmp_dataplane.py +++ /dev/null @@ -1,36 +0,0 @@ -######################################################################## -# -# Reads temporary file into memory. -# -# usage: /path/to/python read_tmp_dataplane.py dataplane.tmp -# -######################################################################## - -import sys -import numpy as np -import netCDF4 as nc - -print('Python Script:\t', sys.argv[0]) -met_info = {} - -netcdf_filename = sys.argv[1] -print('Read NetCDF:\t', netcdf_filename) - -# read NetCDF file -ds = nc.Dataset(netcdf_filename, 'r') -met_data = ds['met_data'][:] -met_attrs = {} -grid = {} -for attr, attr_val in ds.__dict__.items(): - if 'grid' in attr: - grid_attr = attr.split('.')[1] - grid[grid_attr] = attr_val - else: - met_attrs[attr] = attr_val -grid['nx'], grid['ny'] = int(grid['nx']), int(grid['ny']) -met_attrs['grid'] = grid -met_attrs['name'] = met_attrs['name_str'] -del met_attrs['name_str'] -met_info['met_data'] = met_data -met_info['attrs'] = met_attrs -print(met_info) diff --git a/met/data/wrappers/write_tmp_point.py b/met/data/wrappers/write_pickle_dataplane.py similarity index 52% rename from met/data/wrappers/write_tmp_point.py rename to met/data/wrappers/write_pickle_dataplane.py index 94f56cd3dd..079557538b 100644 --- a/met/data/wrappers/write_tmp_point.py +++ b/met/data/wrappers/write_pickle_dataplane.py @@ -3,30 +3,40 @@ # Adapted from a script provided by George McCabe # Adapted by Randy Bullock # -# usage: /path/to/python write_tmp_point.py \ -# tmp_ascii_output_filename .py +# usage: /path/to/python write_pickle_dataplane.py \ +# pickle_output_filename .py # ######################################################################## import os import sys +import pickle import importlib.util +import xarray as xr print('Python Script:\t', sys.argv[0]) print('User Command:\t', sys.argv[2:]) -print('Write Temporary Ascii:\t', sys.argv[1]) +print('Write Pickle:\t', sys.argv[1]) -tmp_filename = sys.argv[1] +pickle_filename = sys.argv[1] pyembed_module_name = sys.argv[2] sys.argv = sys.argv[2:] +if not pyembed_module_name.endswith('.py'): + pyembed_module_name += '.py' + user_base = os.path.basename(pyembed_module_name).replace('.py','') spec = importlib.util.spec_from_file_location(user_base, pyembed_module_name) met_in = importlib.util.module_from_spec(spec) spec.loader.exec_module(met_in) -f = open(tmp_filename, 'w') -for line in met_in.point_data: - f.write(str(line) + '\n') +if isinstance(met_in.met_data, xr.DataArray): + met_info = { 'attrs': met_in.met_data.attrs, 'met_data': met_in.met_data } +else: + met_info = { 'attrs': met_in.attrs, 'met_data': met_in.met_data } + +print(met_info) + +pickle.dump( met_info, open( pickle_filename, "wb" ) ) diff --git a/met/data/wrappers/write_pickle_mpr.py b/met/data/wrappers/write_pickle_mpr.py index efde687bf7..2e3f2d0d04 100644 --- a/met/data/wrappers/write_pickle_mpr.py +++ b/met/data/wrappers/write_pickle_mpr.py @@ -18,7 +18,6 @@ print('Write Pickle:\t', sys.argv[1]) pickle_filename = sys.argv[1] -tmp_filename = pickle_filename + '.txt' pyembed_module_name = sys.argv[2] sys.argv = sys.argv[2:] @@ -29,8 +28,6 @@ met_in = importlib.util.module_from_spec(spec) spec.loader.exec_module(met_in) -f = open(tmp_filename, 'w') -for line in met_in.mpr_data: - f.write(str(line) + '\n') +print(met_in) pickle.dump( met_in.mpr_data, open( pickle_filename, "wb" ) ) diff --git a/met/data/wrappers/write_pickle_point.py b/met/data/wrappers/write_pickle_point.py index 907c0e005d..1f5ee35bdb 100644 --- a/met/data/wrappers/write_pickle_point.py +++ b/met/data/wrappers/write_pickle_point.py @@ -18,7 +18,6 @@ print('Write Pickle:\t', sys.argv[1]) pickle_filename = sys.argv[1] -tmp_filename = pickle_filename + '.txt' pyembed_module_name = sys.argv[2] sys.argv = sys.argv[2:] @@ -29,8 +28,4 @@ met_in = importlib.util.module_from_spec(spec) spec.loader.exec_module(met_in) -f = open(tmp_filename, 'w') -for line in met_in.point_data: - f.write(str(line) + '\n') - pickle.dump( met_in.point_data, open( pickle_filename, "wb" ) ) diff --git a/met/data/wrappers/write_tmp_dataplane.py b/met/data/wrappers/write_tmp_dataplane.py deleted file mode 100644 index 6af7d115a9..0000000000 --- a/met/data/wrappers/write_tmp_dataplane.py +++ /dev/null @@ -1,68 +0,0 @@ -######################################################################## -# -# Adapted from a script provided by George McCabe -# Adapted by Randy Bullock -# -# usage: /path/to/python write_tmp_dataplane.py \ -# tmp_output_filename .py -# -######################################################################## - -import os -import sys -import importlib.util -import netCDF4 as nc - -print('Python Script:\t', sys.argv[0]) -print('User Command:\t', sys.argv[2:]) - -netcdf_filename = sys.argv[1] - -print('Write NetCDF:\t', netcdf_filename) - -pyembed_module_name = sys.argv[2] -sys.argv = sys.argv[2:] - -# append user script dir to system path -pyembed_dir, pyembed_file = os.path.split(pyembed_module_name) -if pyembed_dir: - os.path.append(pyembed_dir) - -if not pyembed_module_name.endswith('.py'): - pyembed_module_name += '.py' - -user_base = os.path.basename(pyembed_module_name).replace('.py','') - -spec = importlib.util.spec_from_file_location(user_base, pyembed_module_name) -met_in = importlib.util.module_from_spec(spec) -spec.loader.exec_module(met_in) - -met_info = {'met_data': met_in.met_data} -if hasattr(met_in.met_data, 'attrs') and met_in.met_data.attrs: - attrs = met_in.met_data.attrs -else: - attrs = met_in.attrs -met_info['attrs'] = attrs - -print('write_tmp_dataplane') -print(met_info) - -# write NetCDF file -ds = nc.Dataset(netcdf_filename, 'w') - -nx, ny = met_in.met_data.shape -ds.createDimension('x', nx) -ds.createDimension('y', ny) -dp = ds.createVariable('met_data', met_in.met_data.dtype, ('x', 'y')) -dp[:] = met_in.met_data - -for attr, attr_val in met_info['attrs'].items(): - print(attr, attr_val, type(attr_val)) - if attr == 'name': - setattr(ds, 'name_str', attr_val) - if type(attr_val) == str: - setattr(ds, attr, attr_val) - if type(attr_val) == dict: - for key in attr_val: - setattr(ds, attr + '.' + key, attr_val[key]) -ds.close() diff --git a/met/src/libcode/vx_data2d_python/python_dataplane.cc b/met/src/libcode/vx_data2d_python/python_dataplane.cc index 8f70af5109..d5ace046d0 100644 --- a/met/src/libcode/vx_data2d_python/python_dataplane.cc +++ b/met/src/libcode/vx_data2d_python/python_dataplane.cc @@ -31,15 +31,15 @@ GlobalPython GP; // this needs external linkage static const char * user_ppath = 0; -static const char write_tmp_nc [] = "MET_BASE/wrappers/write_tmp_dataplane.py"; +static const char write_pickle [] = "MET_BASE/wrappers/write_pickle_dataplane.py"; -static const char read_tmp_nc [] = "read_tmp_dataplane"; // NO ".py" suffix +static const char read_pickle [] = "read_pickle_dataplane"; // NO ".py" suffix -static const char tmp_nc_base_name [] = "tmp_met_nc"; +static const char pickle_base_name [] = "tmp_met_pickle"; -static const char tmp_nc_var_name [] = "met_info"; +static const char pickle_var_name [] = "met_info"; -static const char tmp_nc_file_var_name [] = "tmp_nc_filename"; +static const char pickle_file_var_name [] = "pickle_filename"; //////////////////////////////////////////////////////////////////////// @@ -51,7 +51,7 @@ static bool straight_python_dataplane(const char * script_name, Grid & met_grid_out, VarInfoPython &vinfo); -static bool tmp_nc_dataplane(const char * script_name, +static bool pickle_dataplane(const char * script_name, int script_argc, char ** script_argv, const bool use_xarray, DataPlane & met_dp_out, Grid & met_grid_out, VarInfoPython &vinfo); @@ -69,9 +69,9 @@ bool python_dataplane(const char * user_script_name, bool status = false; -if ( (user_ppath = getenv(user_python_path_env)) != 0 ) { // do_tmp_nc = true; +if ( (user_ppath = getenv(user_python_path_env)) != 0 ) { // do_pickle = true; - status = tmp_nc_dataplane(user_script_name, + status = pickle_dataplane(user_script_name, user_script_argc, user_script_argv, use_xarray, met_dp_out, met_grid_out, vinfo); @@ -276,7 +276,7 @@ return ( true ); //////////////////////////////////////////////////////////////////////// -bool tmp_nc_dataplane(const char * user_script_name, +bool pickle_dataplane(const char * user_script_name, int user_script_argc, char ** user_script_argv, const bool use_xarray, DataPlane & met_dp_out, Grid & met_grid_out, VarInfoPython &vinfo) @@ -287,7 +287,7 @@ int j; int status; ConcatString command; ConcatString path; -ConcatString tmp_nc_path; +ConcatString pickle_path; const char * tmp_dir = 0; Wchar_Argv wa; @@ -301,14 +301,14 @@ if ( ! tmp_dir ) tmp_dir = default_tmp_dir; path << cs_erase << tmp_dir << '/' - << tmp_nc_base_name; + << pickle_base_name; -tmp_nc_path = make_temp_file_name(path.text(), 0); +pickle_path = make_temp_file_name(path.text(), 0); command << cs_erase << user_ppath << ' ' // user's path to python - << replace_path(write_tmp_nc) << ' ' // write_tmp_nc.py - << tmp_nc_path << ' ' // tmp_nc output filename + << replace_path(write_pickle) << ' ' // write_pickle.py + << pickle_path << ' ' // pickle output filename << user_script_name; // user's script name for (j=1; j " + mlog << Error << "\npickle_dataplane() -> " << "command \"" << command.text() << "\" failed ... status = " << status << "\n\n"; @@ -346,15 +346,15 @@ if ( PyErr_Occurred() ) { PyErr_Print(); - mlog << Warning << "\ntmp_nc_dataplane() -> " + mlog << Warning << "\npickle_dataplane() -> " << "an error occurred initializing python\n\n"; return ( false ); } -mlog << Debug(3) << "Reading temporary tmp_nc file: " - << tmp_nc_path << "\n"; +mlog << Debug(3) << "Reading temporary pickle file: " + << pickle_path << "\n"; // // set the arguments @@ -362,9 +362,9 @@ mlog << Debug(3) << "Reading temporary tmp_nc file: " StringArray a; -a.add(read_tmp_nc); +a.add(read_pickle); -a.add(tmp_nc_path); +a.add(pickle_path); wa.set(a); @@ -374,7 +374,7 @@ PySys_SetArgv (wa.wargc(), wa.wargv()); // import the python wrapper script as a module // -path = get_short_name(read_tmp_nc); +path = get_short_name(read_pickle); PyObject * module_obj = PyImport_ImportModule (path.text()); @@ -392,7 +392,7 @@ if ( PyErr_Occurred() ) { PyErr_Print(); - mlog << Warning << "\ntmp_nc_dataplane() -> " + mlog << Warning << "\npickle_dataplane() -> " << "an error occurred importing module " << '\"' << path << "\"\n\n"; @@ -402,7 +402,7 @@ if ( PyErr_Occurred() ) { if ( ! module_obj ) { - mlog << Warning << "\ntmp_nc_dataplane() -> " + mlog << Warning << "\npickle_dataplane() -> " << "error running python script\n\n"; return ( false ); @@ -410,7 +410,7 @@ if ( ! module_obj ) { } // - // read the tmp_nc file + // read the pickle file // // @@ -419,13 +419,13 @@ if ( ! module_obj ) { PyObject * module_dict_obj = PyModule_GetDict (module_obj); -PyObject * key_obj = PyUnicode_FromString (tmp_nc_var_name); +PyObject * key_obj = PyUnicode_FromString (pickle_var_name); PyObject * data_obj = PyDict_GetItem (module_dict_obj, key_obj); if ( ! data_obj || ! PyDict_Check(data_obj) ) { - mlog << Error << "\ntmp_nc_dataplane() -> " + mlog << Error << "\npickle_dataplane() -> " << "bad dict object\n\n"; exit ( 1 ); @@ -450,7 +450,7 @@ dataplane_from_numpy_array(np, attrs_dict_obj, met_dp_out, met_grid_out, vinfo); // cleanup // -remove_temp_file(tmp_nc_path); +remove_temp_file(pickle_path); // // done diff --git a/met/src/libcode/vx_python3_utils/python3_script.cc b/met/src/libcode/vx_python3_utils/python3_script.cc index 7bd567ae2c..56837b65d0 100644 --- a/met/src/libcode/vx_python3_utils/python3_script.cc +++ b/met/src/libcode/vx_python3_utils/python3_script.cc @@ -27,8 +27,6 @@ using namespace std; static const char sq = '\''; // single quote -static const char read_tmp_ascii_py [] = "MET_BASE/wrappers/read_tmp_ascii.py"; - //////////////////////////////////////////////////////////////////////// @@ -77,12 +75,8 @@ void Python3_Script::clear() Module = 0; -ModuleAscii = 0; - Dict = 0; -DictAscii = 0; - Script_Filename.clear(); @@ -165,29 +159,14 @@ return ( var ); } -//////////////////////////////////////////////////////////////////////// - -PyObject * Python3_Script::lookup_ascii(const char * name) const - -{ - -PyObject * var = 0; - -var = PyDict_GetItemString (DictAscii, name); - -return ( var ); - -} //////////////////////////////////////////////////////////////////////// -PyObject * Python3_Script::run(const char * command) const +void Python3_Script::run(const char * command) const { -PyObject * pobj; - if ( empty(command) ) { mlog << Error << "\nPython3_Script::run(const char *) -> " @@ -197,9 +176,7 @@ if ( empty(command) ) { } -pobj = PyRun_String(command, Py_file_input, Dict, Dict); - -if ( ! pobj ) { +if ( ! PyRun_String(command, Py_file_input, Dict, Dict) ) { mlog << Error << "\nPython3_Script::run(const char *) -> " << "command \"" << command << "\" failed!\n\n"; @@ -211,7 +188,7 @@ if ( ! pobj ) { fflush(stdout); fflush(stderr); -return pobj; +return; } @@ -257,98 +234,6 @@ return; } -//////////////////////////////////////////////////////////////////////// - -void Python3_Script::import_read_tmp_ascii_py(void) - -{ - -ConcatString module; - -module << cs_erase - << replace_path(read_tmp_ascii_py); - -ConcatString command; - -run_python_string("import sys"); - -command << cs_erase - << "sys.path.append(\"" - << module.dirname().c_str() - << "\")"; - -mlog << Debug(3) << command << "\n"; - -run_python_string(command.text()); - -mlog << Debug(2) << "Importing " << module << "\n"; - -ConcatString path = "read_tmp_ascii"; - -ModuleAscii = PyImport_ImportModule(path.text()); - -if ( ! ModuleAscii ) { - - PyErr_Print(); - mlog << Error << "\nPython3_Script::Python3_Script(const char *) -> " - << "unable to import module \"" << path << "\"\n\n"; - - Py_Finalize(); - - exit ( 1 ); - -} - -DictAscii = PyModule_GetDict(ModuleAscii); - - // - // done - // - -fflush(stdout); -fflush(stderr); - -} - -//////////////////////////////////////////////////////////////////////// - -PyObject* Python3_Script::read_tmp_ascii(const char * tmp_filename) const - -{ - -mlog << Debug(2) << "Reading temporary ascii file: " - << tmp_filename << "\n"; - -ConcatString command; - -command << "read_tmp_ascii(\"" - << tmp_filename - << "\")"; - -mlog << Debug(3) << command << "\n"; - -PyErr_Clear(); - -PyObject * pobj; - -pobj = PyRun_String(command.text(), Py_file_input, DictAscii, DictAscii); - -if ( PyErr_Occurred() ) { - - mlog << Error << "\nPython3_Script::read_tmp_ascii() -> " - << "command \"" << command << "\" failed!\n\n"; - - exit ( 1 ); -} - -PyTypeObject* type = pobj->ob_type; - -const char* p = type->tp_name; - -mlog << Debug(2) << "read_tmp_ascii return type: " << p << "\n"; - -return pobj; -} //////////////////////////////////////////////////////////////////////// diff --git a/met/src/libcode/vx_python3_utils/python3_script.h b/met/src/libcode/vx_python3_utils/python3_script.h index 6930d226a5..20069762f9 100644 --- a/met/src/libcode/vx_python3_utils/python3_script.h +++ b/met/src/libcode/vx_python3_utils/python3_script.h @@ -41,10 +41,6 @@ class Python3_Script { PyObject * Dict; // script dictionary, not allocated - PyObject * ModuleAscii; - - PyObject * DictAscii; - ConcatString Script_Filename; @@ -66,8 +62,6 @@ class Python3_Script { PyObject * module(); PyObject * dict(); - PyObject * module_ascii(); - PyObject * dict_ascii(); // // do stuff @@ -79,15 +73,10 @@ class Python3_Script { PyObject * lookup(const char * name) const; - PyObject * lookup_ascii(const char * name) const; - - PyObject * run(const char * command) const; // runs a command in the namespace of the script + void run(const char * command) const; // runs a command in the namespace of the script void read_pickle (const char * variable_name, const char * pickle_filename) const; - void import_read_tmp_ascii_py (void); - - PyObject * read_tmp_ascii (const char * tmp_filename) const; }; @@ -98,10 +87,6 @@ inline PyObject * Python3_Script::module() { return ( Module ); } inline PyObject * Python3_Script::dict() { return ( Dict ); } -inline PyObject * Python3_Script::module_ascii() { return ( ModuleAscii ); } - -inline PyObject * Python3_Script::dict_ascii() { return ( DictAscii ); } - inline ConcatString Python3_Script::filename() const { return ( Script_Filename ); } diff --git a/met/src/tools/other/ascii2nc/ascii2nc.cc b/met/src/tools/other/ascii2nc/ascii2nc.cc index 360329659c..4ced5397b1 100644 --- a/met/src/tools/other/ascii2nc/ascii2nc.cc +++ b/met/src/tools/other/ascii2nc/ascii2nc.cc @@ -43,7 +43,6 @@ // 015 03-20-19 Fillmore Add aeronetv2 and aeronetv3 options. // 016 01-30-20 Bullock Add python option. // 017 01-25-21 Halley Gotway MET #1630 Handle zero obs. -// 018 03-01-21 Fillmore Replace pickle files for temporary ascii. // //////////////////////////////////////////////////////////////////////// diff --git a/met/src/tools/other/ascii2nc/python_handler.cc b/met/src/tools/other/ascii2nc/python_handler.cc index d894ab6c64..e2733a605e 100644 --- a/met/src/tools/other/ascii2nc/python_handler.cc +++ b/met/src/tools/other/ascii2nc/python_handler.cc @@ -27,12 +27,13 @@ using namespace std; static const char generic_python_wrapper [] = "generic_python"; +static const char generic_pickle_wrapper [] = "generic_pickle"; -static const char write_tmp_ascii_wrapper[] = "MET_BASE/wrappers/write_tmp_point.py"; +static const char write_pickle_wrapper [] = "MET_BASE/wrappers/write_pickle_point.py"; static const char list_name [] = "point_data"; -static const char tmp_base_name [] = "tmp_ascii2nc"; +static const char pickle_base_name [] = "tmp_ascii2nc_pickle"; //////////////////////////////////////////////////////////////////////// @@ -56,7 +57,7 @@ PythonHandler::PythonHandler(const string &program_name) : FileHandler(program_n { -use_tmp_ascii = false; +use_pickle = false; } @@ -81,13 +82,13 @@ for (j=1; j<(a.n()); ++j) { // j starts at one here, not zero } -use_tmp_ascii = false; +use_pickle = false; const char * c = getenv(user_python_path_env); if ( c ) { - use_tmp_ascii = true; + use_pickle = true; user_path_to_python = c; @@ -230,7 +231,7 @@ bool PythonHandler::readAsciiFiles(const vector< ConcatString > &ascii_filename_ bool status = false; -if ( use_tmp_ascii ) status = do_tmp_ascii (); +if ( use_pickle ) status = do_pickle (); else status = do_straight (); return ( status ); @@ -319,10 +320,10 @@ return ( true ); // - // wrapper usage: /path/to/python wrapper.py tmp_output_filename user_script_name [ user_script args ... ] + // wrapper usage: /path/to/python wrapper.py pickle_output_filename user_script_name [ user_script args ... ] // -bool PythonHandler::do_tmp_ascii() +bool PythonHandler::do_pickle() { @@ -330,7 +331,7 @@ int j; const int N = user_script_args.n(); ConcatString command; ConcatString path; -ConcatString tmp_ascii_path; +ConcatString pickle_path; const char * tmp_dir = 0; int status; @@ -344,16 +345,15 @@ if ( ! tmp_dir ) tmp_dir = default_tmp_dir; path << cs_erase << tmp_dir << '/' - << tmp_base_name; + << pickle_base_name; -tmp_ascii_path = make_temp_file_name(path.text(), 0); -tmp_ascii_path << ".txt"; +pickle_path = make_temp_file_name(path.text(), 0); command << cs_erase - << user_path_to_python << ' ' // user's path to python - << replace_path(write_tmp_ascii_wrapper) << ' ' // write_tmp_point.py - << tmp_ascii_path << ' ' // temporary ascii output filename - << user_script_filename; // user's script name + << user_path_to_python << ' ' // user's path to python + << replace_path(write_pickle_wrapper) << ' ' // write_pickle.py + << pickle_path << ' ' // pickle output filename + << user_script_filename; // user's script name for (j=0; j " + mlog << Error << "\nPythonHandler::do_pickle() -> " << "command \"" << command.text() << "\" failed ... status = " << status << "\n\n"; @@ -375,20 +375,18 @@ if ( status ) { ConcatString wrapper; -wrapper = generic_python_wrapper; +wrapper = generic_pickle_wrapper; Python3_Script script(wrapper.text()); -script.import_read_tmp_ascii_py(); +script.read_pickle(list_name, pickle_path.text()); -PyObject * dobj = script.read_tmp_ascii(tmp_ascii_path.text()); - -PyObject * obj = script.lookup_ascii(list_name); +PyObject * obj = script.lookup(list_name); if ( ! PyList_Check(obj) ) { - mlog << Error << "\nPythonHandler::do_tmp_ascii() -> " - << "tmp ascii object is not a list!\n\n"; + mlog << Error << "\nPythonHandler::do_pickle() -> " + << "pickle object is not a list!\n\n"; exit ( 1 ); @@ -400,7 +398,7 @@ load_python_obs(obj); // cleanup // -remove_temp_file(tmp_ascii_path); +remove_temp_file(pickle_path); // // done diff --git a/met/src/tools/other/ascii2nc/python_handler.h b/met/src/tools/other/ascii2nc/python_handler.h index b0fb2ef492..abae8ddd5d 100644 --- a/met/src/tools/other/ascii2nc/python_handler.h +++ b/met/src/tools/other/ascii2nc/python_handler.h @@ -50,9 +50,9 @@ class PythonHandler : public FileHandler static string getFormatString() { return "python"; } - bool use_tmp_ascii; + bool use_pickle; - ConcatString user_path_to_python; // if we're using temporary ascii + ConcatString user_path_to_python; // if we're using pickle ConcatString user_script_filename; @@ -68,13 +68,15 @@ class PythonHandler : public FileHandler virtual bool readAsciiFiles(const vector< ConcatString > &ascii_filename_list); - bool do_tmp_ascii(); - bool do_straight (); // straight-up python, no temporary ascii + bool do_pickle (); + bool do_straight (); // straight-up python, no pickle void load_python_obs(PyObject *); // python object is list of lists bool read_obs_from_script (const char * script_name, const char * variable_name); + + bool read_obs_from_pickle (const char * pickle_name, const char * variable_name); }; From 92f0ff07be798a70c05247abba620a44db485e17 Mon Sep 17 00:00:00 2001 From: "Julie.Prestopnik" Date: Wed, 17 Mar 2021 17:41:56 -0600 Subject: [PATCH 72/86] Fixed typos, added content, and modified release date format --- met/docs/conf.py | 2 +- met/docs/index.rst | 85 ++++++++++++++++++++++++++++++++++++++++++++-- 2 files changed, 83 insertions(+), 4 deletions(-) diff --git a/met/docs/conf.py b/met/docs/conf.py index efa6f948c9..13f65b1b9d 100644 --- a/met/docs/conf.py +++ b/met/docs/conf.py @@ -24,7 +24,7 @@ verinfo = version release = f'{version}' release_year = '2021' -release_date = f'{release_year}0302' +release_date = f'{release_year}-03-31' copyright = f'{release_year}, {author}' # -- General configuration --------------------------------------------------- diff --git a/met/docs/index.rst b/met/docs/index.rst index 4e00f3168d..a66a482aa1 100644 --- a/met/docs/index.rst +++ b/met/docs/index.rst @@ -1,16 +1,95 @@ ===================== MET version |version| ===================== -Developed by the `Developmental Testbed Center `_, Boulder, CO +Developed by the `Developmental Testbed Center `_, +Boulder, CO .. image:: _static/METplus_banner_photo_web.png History ------- -The Model Evaluation Tools (MET) were developed by the Developmental Testbed Center (DTC) and released in January 2008. The goal of the tools was to provide the community with a platform independent and extensible framework for reproducible verification. The DTC partners, including NCAR, NOAA, and the USAF, decided to start by replicating the NOAA EMC (see list of acronyms below) Mesoscale Branch verification package, called VSDB. In the first release, MET included several pre-processing, statistical, and analysis tools to provided the primary functionality as the EMC VSDB system, and also included a spatial verification package called MODE. +The Model Evaluation Tools (MET) were developed by the Developmental Testbed +Center (DTC) and released in January 2008. The goal of the tools was to +provide the community with a platform-independent and extensible framework +for reproducible verification. +The DTC partners, including NCAR, NOAA, and the USAF, decided to start by +replicating the NOAA EMC (see list of acronyms below) Mesoscale Branch +verification package, called VSDB. +In the first release, MET included several pre-processing, statistical, +and analysis tools to provide the same primary functionality as the EMC VSDB +system, and also included a spatial verification package called MODE. -Over the years, MET and VSDB packages grew in complexity. Verification capability at other NOAA laboratories, such as ESRL, were also under heavy development. An effort to unify verification capability was first started under the HIWPP project and led by NOAA ESRL. In 2015, the NGGPS Program Office started working groups to focus on several aspects of the next gen system, including the Verification and Validation Working Group. This group made the recommendation to use MET as the foundation for a unified verification capability. In 2016, NCAR and GSD leads visited EMC to gather requirements. At that time, the concept of METplus was developed as it extends beyond the original code base. It was originally called METplus but several constraints have driven the transition to the use of METplus. METplus is now the unified verification, validation, and diagnostics capability for NOAA's UFS and a component of NCAR's SIMA modeling frameworks. It being actively developed by NCAR, ESRL, EMC and is open to community contributions. +Over the years, MET and VSDB packages grew in complexity. Verification +capability at other NOAA laboratories, such as ESRL, were also under heavy +development. An effort to unify verification capability was first started +under the HIWPP project and led by NOAA ESRL. In 2015, the NGGPS +Program Office started working groups to focus on several aspects of the +next gen system, including the Verification and Validation Working Group. +This group made the recommendation to use MET as the foundation for a +unified verification capability. In 2016, NCAR and GSD leads visited EMC +to gather requirements. At that time, the concept of METplus was developed +as it extends beyond the original code base. It was originally MET+ but +several constraints have driven the transition to the use of METplus. +METplus is now the unified verification, validation, and +diagnostics capability for NOAA's UFS and a component of NCAR's SIMA +modeling frameworks. It is being actively developed by NCAR, ESRL, EMC +and is open to community contributions. +METplus Concept +--------------- +METplus is the overarching, or umbrella, repository and hence framework for +the Unified Forecast System verification capability. It is intended to be +extensible through adding additional capability developed by the community. +The core components of the framework include MET, the associated database and +display systems called METviewer and METexpress, and a suite of Python +wrappers to provide low-level automation and examples, also called use-cases. +A description of each tool along with some ancillary repositories are as +follows: + +* **MET** - core statistical tool that matches up grids with either gridded + analyses or point observations and applies configurable methods to compute + statistics and diagnostics +* **METviewer** - core database and display system intended for deep analysis + of MET output +* **METexpress** - core database and display system intended for quick + analysis via pre-defined queries of MET output +* **METplus wrappers** - suite of Python-based wrappers that provide + low-level automation of MET tools and newly developed plotting capability +* **METplus use-cases** - configuration files and sample data to show how to + invoke METplus wrappers to make using MET tools easier and reproducible +* **METcalcpy** - suite of Python-based scripts to be used by other + components of METplus tools for statistical aggregation, event + equalization, and other analysis needs +* **METplotpy** - suite of Python-based scripts to plot MET output, + and in come cases provide additional post-processing of output prior + to plotting +* **METdatadb** - database to store MET output and to be used by both + METviewer and METexpress + +The umbrella repository will be brought together by using a software package +called `manage_externals `_ +developed by the Community Earth System Modeling (CESM) team, hosted at NCAR +and NOAA Earth System's Research Laboratory. The manage_externals paackage +was developed because CESM is comprised of a number of different components +that are developed and managed independently. Each component also may have +additional "external" dependencies that need to be maintained independently. + +Acronyms +-------- + +* **MET** - Model Evaluation Tools +* **DTC** - Developmental Testbed Center +* **NCAR** - National Center for Atmospheric Research +* **NOAA** - National Oceanic and Atmospheric Administration +* **EMC** - Environmental Modeling Center +* **VSDB** - Verification Statistics Data Base +* **MODE** - Method for Object-Based Diagnostic Evaluation +* **UFS** - Unified Forecast System +* **SIMA** -System for Integrated Modeling of the Atmosphere +* **ESRL** - Earth Systems Research Laboratory +* **HIWPP** - High Impact Weather Predication Project +* **NGGPS** - Next Generation Global Predicatio System +* **GSD** - Global Systems Division .. toctree:: :hidden: From 40fce114a3b1cca69b2cf24649f8eb98d9ce74f8 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Thu, 18 Mar 2021 08:57:10 -0600 Subject: [PATCH 73/86] #1715 Initial release --- test/config/PB2NCConfig_pbl | 163 ++++++++++++++++++++++++++++++++++++ 1 file changed, 163 insertions(+) create mode 100644 test/config/PB2NCConfig_pbl diff --git a/test/config/PB2NCConfig_pbl b/test/config/PB2NCConfig_pbl new file mode 100644 index 0000000000..80df6014b0 --- /dev/null +++ b/test/config/PB2NCConfig_pbl @@ -0,0 +1,163 @@ +//////////////////////////////////////////////////////////////////////////////// +// +// PB2NC configuration file. +// +// For additional information, see the MET_BASE/config/README file. +// +//////////////////////////////////////////////////////////////////////////////// + +// +// PrepBufr message type +// +message_type = ["ONLYSF", "ADPUPA"]; + +// +// Mapping of message type group name to comma-separated list of values +// Derive PRMSL only for SURFACE message types +// +message_type_group_map = [ + { key = "SURFACE"; val = "ADPSFC,SFCSHP,MSONET"; }, + { key = "ANYAIR"; val = "AIRCAR,AIRCFT"; }, + { key = "ANYSFC"; val = "ADPSFC,SFCSHP,ADPUPA,PROFLR,MSONET"; }, + { key = "ONLYSF"; val = "ADPSFC,SFCSHP"; } +]; + +// +// Mapping of input PrepBufr message types to output message types +// +message_type_map = []; + +// +// PrepBufr station ID +// +station_id = []; + +//////////////////////////////////////////////////////////////////////////////// + +// +// Observation time window +// +obs_window = { + beg = -2700; + end = 2700; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Observation retention regions +// +mask = { + grid = ""; + poly = ""; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Observing location elevation +// +elevation_range = { + beg = -1000; + end = 100000; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Observation types +// +pb_report_type = [ 120, 220, 221, 122, 222, 223, 224, 131, 133, 233, 153, 156, 157, 180, 280, 181, 182, 281, 282, 183, 284, 187, 287 ]; + +in_report_type = []; + +instrument_type = []; + +//////////////////////////////////////////////////////////////////////////////// + +// +// Vertical levels to retain +// +level_range = { + beg = 1; + end = 511; +} + +level_category = [0, 1, 4, 5, 6]; + +/////////////////////////////////////////////////////////////////////////////// + +// +// BUFR variable names to retain or derive. +// Use obs_bufr_map to rename variables in the output. +// If empty, process all available variables. +// +obs_bufr_var = ["TOB", "UOB", "VOB", "TOCC", "D_RH", "TDO", "PMO", "HOVI", "CEILING", "MXGS", "D_CAPE", "D_PBL"]; +//obs_bufr_var = ["TOB", "UOB", "VOB", "TOCC", "D_RH", "TDO", "PMO", "HOVI", "CEILING", "MXGS", "D_CAPE"]; +//////////////////////////////////////////////////////////////////////////////// + +// +// Mapping of input BUFR variable names to output variables names. +// The default PREPBUFR map, obs_prepbufr_map, is appended to this map. +// +obs_bufr_map = []; + +// +// Default mapping for PREPBUFR. Replace input BUFR variable names with GRIB +// abbreviations in the output. This default map is appended to obs_bufr_map. +// This should not typically be overridden. +// +obs_prefbufr_map = [ + { key = "POB"; val = "PRES"; }, + { key = "QOB"; val = "SPFH"; }, + { key = "TOB"; val = "TMP"; }, + { key = "UOB"; val = "UGRD"; }, + { key = "VOB"; val = "VGRD"; }, + { key = "D_DPT"; val = "DPT"; }, + { key = "D_WDIR"; val = "WDIR"; }, + { key = "D_WIND"; val = "WIND"; }, + { key = "D_RH"; val = "RH"; }, + { key = "D_MIXR"; val = "MIXR"; }, + { key = "D_PBL"; val = "HPBL"; }, + { key = "D_PRMSL"; val = "PRMSL"; }, + { key = "D_CAPE"; val = "CAPE"; }, + { key = "TDO"; val = "DPT"; }, + { key = "PMO"; val = "PRMSL"; }, + { key = "TOCC"; val = "TCDC"; }, + { key = "HOVI"; val = "VIS"; }, + { key = "CEILING"; val = "HGT"; }, + { key = "MXGS"; val = "GUST"; } +]; + +//////////////////////////////////////////////////////////////////////////////// + +quality_mark_thresh = 9; +event_stack_flag = TOP; + +//////////////////////////////////////////////////////////////////////////////// + +// +// Time periods for the summarization +// obs_var (string array) is added and works like grib_code (int array) +// when use_var_id is enabled and variable names are saved. +// +time_summary = { + flag = FALSE; + raw_data = FALSE; + beg = "000000"; + end = "235959"; + step = 300; + width = 600; + grib_code = []; + obs_var = [ "TMP", "WDIR", "RH" ]; + type = [ "min", "max", "range", "mean", "stdev", "median", "p80" ]; + vld_freq = 0; + vld_thresh = 0.0; +} + +//////////////////////////////////////////////////////////////////////////////// + +tmp_dir = "/tmp"; +version = "V10.0"; + +//////////////////////////////////////////////////////////////////////////////// From 1ac92d7ea4e71500b9afe7d83cb797db44c42d2b Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Thu, 18 Mar 2021 08:59:15 -0600 Subject: [PATCH 74/86] #1715 Do not combined if there are no overlapping beteewn TQZ and UV records --- met/src/tools/other/pb2nc/pb2nc.cc | 74 ++++++++++++++++++++++-------- 1 file changed, 54 insertions(+), 20 deletions(-) diff --git a/met/src/tools/other/pb2nc/pb2nc.cc b/met/src/tools/other/pb2nc/pb2nc.cc index b2b6eba1fe..97048d444d 100644 --- a/met/src/tools/other/pb2nc/pb2nc.cc +++ b/met/src/tools/other/pb2nc/pb2nc.cc @@ -2920,16 +2920,27 @@ int combine_tqz_and_uv(map pqtzuv_map_tq, float *pqtzuv_tq, *pqtzuv_uv; float *pqtzuv_merged = (float *) 0; float *next_pqtzuv, *prev_pqtzuv; + float tq_pres_max, tq_pres_min, uv_pres_max, uv_pres_min; std::map::iterator it, it_tq, it_uv; // Gets pressure levels for TQZ records - for (it=pqtzuv_map_tq.begin(); it!=pqtzuv_map_tq.end(); ++it) { - tq_levels.add(int(it->first)); + it = pqtzuv_map_tq.begin(); + tq_pres_min = tq_pres_max = it->first; + for (; it!=pqtzuv_map_tq.end(); ++it) { + float pres_v = it->first; + if (tq_pres_min > pres_v) tq_pres_min = pres_v; + if (tq_pres_max < pres_v) tq_pres_max = pres_v; + tq_levels.add(nint(pres_v)); } // Gets pressure levels for common records - for (it=pqtzuv_map_uv.begin(); it!=pqtzuv_map_uv.end(); ++it) { - if (tq_levels.has(int(it->first))) { - common_levels.add(int(it->first)); + it = pqtzuv_map_uv.begin(); + uv_pres_min = uv_pres_max = it->first; + for (; it!=pqtzuv_map_uv.end(); ++it) { + float pres_v = it->first; + if (uv_pres_min > pres_v) uv_pres_min = pres_v; + if (uv_pres_max < pres_v) uv_pres_max = pres_v; + if (tq_levels.has(nint(pres_v))) { + common_levels.add(nint(pres_v)); } } @@ -2937,22 +2948,37 @@ int combine_tqz_and_uv(map pqtzuv_map_tq, log_tqz_and_uv(pqtzuv_map_tq, pqtzuv_map_uv, method_name); } + bool no_overlap = (tq_pres_max < uv_pres_min) || (tq_pres_min > uv_pres_max); + mlog << Debug(6) << method_name << "TQZ pressures: " << tq_pres_max + << " to " << tq_pres_min << " UV pressures: " << uv_pres_max + << " to " << uv_pres_min << (no_overlap ? " no overlap!" : " overlapping") << "\n"; + if( no_overlap ) { + mlog << Warning << method_name + << "Can not combine TQ and UV records because of no overlapping.\n"; + mlog << Warning << " TQZ record count: " << tq_count + << ", UV record count: " << uv_count + << " common_levels: " << common_levels.n() << "\n"; + return pqtzuv_map_merged.size(); + } + // Select first record by 1) merging two records with the same pressure // level or 2) interpolate + int tq_pres, uv_pres; next_pqtzuv = (float *)0; it_tq = pqtzuv_map_tq.begin(); it_uv = pqtzuv_map_uv.begin(); pqtzuv_tq = (float *)it_tq->second; pqtzuv_uv = (float *)it_uv->second;; pqtzuv_merged = new float[mxr8vt]; - if (common_levels.has(int(it_tq->first)) - || common_levels.has(int(it_uv->first))) { + tq_pres = nint(it_tq->first); + uv_pres = nint(it_uv->first); + if (common_levels.has(tq_pres) || common_levels.has(uv_pres)) { // Found the records with the same precsure level - if (it_tq->first != it_uv->first) { - if (common_levels.has(int(it_uv->first))) { + if (tq_pres != uv_pres) { + if (common_levels.has(uv_pres)) { pqtzuv_uv = pqtzuv_map_uv[it_uv->first]; } - else if (common_levels.has(int(it_tq->first))) { + else if (common_levels.has(tq_pres)) { pqtzuv_tq = pqtzuv_map_tq[it_tq->first]; } } @@ -2968,7 +2994,7 @@ int combine_tqz_and_uv(map pqtzuv_map_tq, prev_pqtzuv = (float *)it_uv->second; ++it_uv; } - next_pqtzuv = it_uv->second; + next_pqtzuv = (float *)it_uv->second; } else { //Interpolate TQZ into UV @@ -2978,7 +3004,7 @@ int combine_tqz_and_uv(map pqtzuv_map_tq, prev_pqtzuv = (float *)it_tq->second; ++it_tq; } - next_pqtzuv = it_tq->second; + next_pqtzuv = (float *)it_tq->second; } interpolate_pqtzuv(prev_pqtzuv, pqtzuv_merged, next_pqtzuv); } @@ -2996,6 +3022,7 @@ int combine_tqz_and_uv(map pqtzuv_map_tq, if(mlog.verbosity_level() >= PBL_DEBUG_LEVEL) { log_merged_tqz_uv(pqtzuv_map_tq, pqtzuv_map_uv, pqtzuv_map_merged, method_name); } + delete [] pqtzuv_merged; } return pqtzuv_map_merged.size(); @@ -3048,7 +3075,7 @@ float compute_pbl(map pqtzuv_map_tq, pbl_data_vgrd[index] = pqtzuv[5]; if (!is_eq(pbl_data_spfh[index], bad_data_float)) spfh_cnt++; if (!is_eq(pbl_data_hgt[index], bad_data_float)) hgt_cnt++; - selected_levels.add(int(it->first)); + selected_levels.add(nint(it->first)); } index--; @@ -3070,7 +3097,7 @@ float compute_pbl(map pqtzuv_map_tq, if (!is_eq(highest_pressure, bad_data_float)) { index = MAX_PBL_LEVEL - 1; for (; it!=pqtzuv_map_tq.end(); ++it) { - int pres_level = int(it->first); + int pres_level = nint(it->first); if (selected_levels.has(pres_level)) break; float *pqtzuv = pqtzuv_map_merged[it->first]; @@ -3192,9 +3219,14 @@ int interpolate_by_pressure(int length, float *pres_data, float *var_data) { << var_data[idx_start] << " and " << var_data[idx_end] << "\n"; float data_diff = var_data[idx_end] - var_data[idx_start]; for (idx2 = idx_start+1; idx2 pqtzuv_map_pivot, if (first_pres < it_pivot->first) break; } mlog << Debug(8) << method_name << "pivot->first: " << it_pivot->first - << " aux->first " << it_aux->first << " first_pres: " << first_pres - << " prev_pqtzuv[0]" << prev_pqtzuv[0] << "\n"; + << " aux->first: " << it_aux->first << " first_pres: " << first_pres + << " prev_pqtzuv[0]: " << prev_pqtzuv[0] << "\n"; // Find next UV level for (; it_aux!=pqtzuv_map_aux.end(); ++it_aux) { // Skip the records below the first mathcing/interpolated level From 9a9713545d232a2230aab7e69ec5f498e1578816 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Thu, 18 Mar 2021 08:59:58 -0600 Subject: [PATCH 75/86] #1715 Added pb2nc_compute_pbl_cape --- test/xml/unit_pb2nc.xml | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/test/xml/unit_pb2nc.xml b/test/xml/unit_pb2nc.xml index a65f52110d..89ba3c0d30 100644 --- a/test/xml/unit_pb2nc.xml +++ b/test/xml/unit_pb2nc.xml @@ -131,6 +131,25 @@ + + &MET_BIN;/pb2nc + + STATION_ID + MASK_GRID + MASK_POLY + QUALITY_MARK_THRESH 2 + + \ + &DATA_DIR_OBS;/prepbufr/nam.20210311.t00z.prepbufr.tm00 \ + &OUTPUT_DIR;/pb2nc/nam.20210311.t00z.prepbufr.tm00.nc \ + &CONFIG_DIR;/PB2NCConfig_pbl \ + -v 1 + + + &OUTPUT_DIR;/pb2nc/nam.20210311.t00z.prepbufr.tm00.nc + + + &MET_BIN;/pb2nc From edb124ba2e74245bf9fed32a062dbcd85451facc Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Thu, 18 Mar 2021 09:08:04 -0600 Subject: [PATCH 76/86] #1715 Added pb2nc_compute_pbl_cape --- test/xml/unit_pb2nc.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/xml/unit_pb2nc.xml b/test/xml/unit_pb2nc.xml index 89ba3c0d30..0366d902f9 100644 --- a/test/xml/unit_pb2nc.xml +++ b/test/xml/unit_pb2nc.xml @@ -141,12 +141,12 @@ \ &DATA_DIR_OBS;/prepbufr/nam.20210311.t00z.prepbufr.tm00 \ - &OUTPUT_DIR;/pb2nc/nam.20210311.t00z.prepbufr.tm00.nc \ + &OUTPUT_DIR;/pb2nc/nam.20210311.t00z.prepbufr.tm00.pbl.nc \ &CONFIG_DIR;/PB2NCConfig_pbl \ -v 1 - &OUTPUT_DIR;/pb2nc/nam.20210311.t00z.prepbufr.tm00.nc + &OUTPUT_DIR;/pb2nc/nam.20210311.t00z.prepbufr.tm00.pbl.nc From aefabdb867e2741c52a1ee6137dfd02c2fb70c5b Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Thu, 18 Mar 2021 13:31:14 -0600 Subject: [PATCH 77/86] #1715 Reduced obs_bufr_var. Removed pb_report_type --- test/config/PB2NCConfig_pbl | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/test/config/PB2NCConfig_pbl b/test/config/PB2NCConfig_pbl index 80df6014b0..eeedd7a3e4 100644 --- a/test/config/PB2NCConfig_pbl +++ b/test/config/PB2NCConfig_pbl @@ -67,7 +67,7 @@ elevation_range = { // // Observation types // -pb_report_type = [ 120, 220, 221, 122, 222, 223, 224, 131, 133, 233, 153, 156, 157, 180, 280, 181, 182, 281, 282, 183, 284, 187, 287 ]; +pb_report_type = []; in_report_type = []; @@ -92,8 +92,7 @@ level_category = [0, 1, 4, 5, 6]; // Use obs_bufr_map to rename variables in the output. // If empty, process all available variables. // -obs_bufr_var = ["TOB", "UOB", "VOB", "TOCC", "D_RH", "TDO", "PMO", "HOVI", "CEILING", "MXGS", "D_CAPE", "D_PBL"]; -//obs_bufr_var = ["TOB", "UOB", "VOB", "TOCC", "D_RH", "TDO", "PMO", "HOVI", "CEILING", "MXGS", "D_CAPE"]; +obs_bufr_var = ["D_CAPE", "D_PBL"]; //////////////////////////////////////////////////////////////////////////////// // From b7fb7c1c0386e2c7aa4f44c97d3d3766e4981164 Mon Sep 17 00:00:00 2001 From: Howard Soh Date: Thu, 18 Mar 2021 13:41:32 -0600 Subject: [PATCH 78/86] #1715 Added a blank line for Error/Warning --- met/src/tools/other/pb2nc/pb2nc.cc | 27 +++++++++++++-------------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/met/src/tools/other/pb2nc/pb2nc.cc b/met/src/tools/other/pb2nc/pb2nc.cc index 97048d444d..b36095ccf5 100644 --- a/met/src/tools/other/pb2nc/pb2nc.cc +++ b/met/src/tools/other/pb2nc/pb2nc.cc @@ -2428,7 +2428,7 @@ void write_netcdf_hdr_data() { // Check for no messages retained if(dim_count <= 0) { - mlog << Error << method_name << " -> " + mlog << Error << "\n" << method_name << " -> " << "No PrepBufr messages retained. Nothing to write.\n\n"; // Delete the NetCDF file remove_temp_file(ncfile); @@ -2950,14 +2950,13 @@ int combine_tqz_and_uv(map pqtzuv_map_tq, bool no_overlap = (tq_pres_max < uv_pres_min) || (tq_pres_min > uv_pres_max); mlog << Debug(6) << method_name << "TQZ pressures: " << tq_pres_max - << " to " << tq_pres_min << " UV pressures: " << uv_pres_max + << " to " << tq_pres_min << " UV pressures: " << uv_pres_max << " to " << uv_pres_min << (no_overlap ? " no overlap!" : " overlapping") << "\n"; if( no_overlap ) { - mlog << Warning << method_name - << "Can not combine TQ and UV records because of no overlapping.\n"; - mlog << Warning << " TQZ record count: " << tq_count - << ", UV record count: " << uv_count - << " common_levels: " << common_levels.n() << "\n"; + mlog << Warning << "\n" << method_name + << "Can not combine TQ and UV records because of no overlapping." + << " TQZ count: " << tq_count << ", UV count: " << uv_count + << " common_levels: " << common_levels.n() << "\n\n"; return pqtzuv_map_merged.size(); } @@ -3061,7 +3060,7 @@ float compute_pbl(map pqtzuv_map_tq, hgt_cnt = spfh_cnt = 0; for (it=pqtzuv_map_merged.begin(); it!=pqtzuv_map_merged.end(); ++it) { if (index < 0) { - mlog << Error << method_name << "negative index: " << index << "\n"; + mlog << Error << "\n" << method_name << "negative index: " << index << "\n\n"; break; } @@ -3081,7 +3080,7 @@ float compute_pbl(map pqtzuv_map_tq, index--; } if (index != -1) { - mlog << Error << method_name << "Missing some levels (" << index << ")\n"; + mlog << Error << "\n" << method_name << "Missing some levels (" << index << ")\n"; } if (pbl_level > MAX_PBL_LEVEL) { @@ -3166,10 +3165,10 @@ void insert_pbl(float *obs_arr, const float pbl_value, const int pbl_code, hdr_info << unix_to_yyyymmdd_hhmmss(hdr_vld_ut) << " " << hdr_typ << " " << hdr_sid; if (is_eq(pbl_value, bad_data_float)) { - mlog << Warning << "Failed to compute PBL " << hdr_info << "\n\n"; + mlog << Warning << "\nFailed to compute PBL " << hdr_info << "\n\n"; } else if (pbl_value < hdr_elv) { - mlog << Warning << "Not saved because the computed PBL (" << pbl_value + mlog << Warning << "\nNot saved because the computed PBL (" << pbl_value << ") is less than the station elevation (" << hdr_elv << "). " << hdr_info << "\n\n"; obs_arr[4] = 0; @@ -3183,7 +3182,7 @@ void insert_pbl(float *obs_arr, const float pbl_value, const int pbl_code, << " lat: " << hdr_lat << ", lon: " << hdr_lon << ", elv: " << hdr_elv << " " << hdr_info << "\n\n"; if (obs_arr[4] > MAX_PBL) { - mlog << Warning << " Computed PBL (" << obs_arr[4] << " from " + mlog << Warning << "\nComputed PBL (" << obs_arr[4] << " from " << pbl_value << ") is too high, Reset to " << MAX_PBL << " " << hdr_info<< "\n\n"; obs_arr[4] = MAX_PBL; @@ -3254,10 +3253,10 @@ void interpolate_pqtzuv(float *prev_pqtzuv, float *cur_pqtzuv, float *next_pqtzu if ((nint(prev_pqtzuv[0]) == nint(cur_pqtzuv[0])) || (nint(next_pqtzuv[0]) == nint(cur_pqtzuv[0])) || (nint(prev_pqtzuv[0]) == nint(next_pqtzuv[0]))) { - mlog << Error << method_name + mlog << Error << "\n" << method_name << " Can't interpolate because of same pressure levels. prev: " << prev_pqtzuv[0] << ", cur: " << cur_pqtzuv[0] - << ", next: " << prev_pqtzuv[0] << "\n"; + << ", next: " << prev_pqtzuv[0] << "\n\n"; } else { float p_ratio = (cur_pqtzuv[0] - prev_pqtzuv[0]) / (next_pqtzuv[0] - prev_pqtzuv[0]); From 8dbef78ebac8b2501e6a50ac127a69b836f1a17a Mon Sep 17 00:00:00 2001 From: johnhg Date: Thu, 18 Mar 2021 22:31:45 -0600 Subject: [PATCH 79/86] Per #1725, return good status from TrackInfoArray::add() when using an ATCF line to create a new track. (#1726) --- met/src/libcode/vx_tc_util/track_info.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/met/src/libcode/vx_tc_util/track_info.cc b/met/src/libcode/vx_tc_util/track_info.cc index b7c443c0f3..312d9aa620 100644 --- a/met/src/libcode/vx_tc_util/track_info.cc +++ b/met/src/libcode/vx_tc_util/track_info.cc @@ -801,6 +801,7 @@ bool TrackInfoArray::add(const ATCFTrackLine &l, bool check_dup, bool check_anly TrackInfo t; t.add(l, check_dup, check_anly); Track.push_back(t); + status = true; } return(status); From 5866b2ac1b80dd8f3783e358feba3da2f269d7ff Mon Sep 17 00:00:00 2001 From: johnhg Date: Thu, 18 Mar 2021 23:03:30 -0600 Subject: [PATCH 80/86] Per #1705, update the threshold node heirarchy by adding a climo_prob() function to determine the climatological probability of a CDP-type threshold. Also update derive_climo_prob() in pair_base.cc to call the new climo_prob() function. (#1724) --- met/src/basic/vx_config/threshold.cc | 150 +++++++++++++++++++++ met/src/basic/vx_config/threshold.h | 29 ++-- met/src/libcode/vx_statistics/pair_base.cc | 39 +----- 3 files changed, 177 insertions(+), 41 deletions(-) diff --git a/met/src/basic/vx_config/threshold.cc b/met/src/basic/vx_config/threshold.cc index 7879f7090a..b75ec9784a 100644 --- a/met/src/basic/vx_config/threshold.cc +++ b/met/src/basic/vx_config/threshold.cc @@ -166,6 +166,37 @@ return ( n ); //////////////////////////////////////////////////////////////////////// +double Or_Node::climo_prob() const + +{ + +if ( !left_child || !right_child ) { + + mlog << Error << "\nOr_Node::climo_prob() -> " + << "node not populated!\n\n"; + + exit ( 1 ); + +} + +double prob = bad_data_double; +double prob_left = left_child->climo_prob(); +double prob_right = right_child->climo_prob(); + +if ( !is_bad_data(prob_left) && !is_bad_data(prob_right) ) { + + prob = min(prob_left + prob_right, 1.0); + +} + +return ( prob ); + +} + + +//////////////////////////////////////////////////////////////////////// + + bool Or_Node::need_perc() const { @@ -356,6 +387,55 @@ return ( n ); //////////////////////////////////////////////////////////////////////// +double And_Node::climo_prob() const + +{ + +if ( !left_child || !right_child ) { + + mlog << Error << "\nAnd_Node::climo_prob() -> " + << "node not populated!\n\n"; + + exit ( 1 ); + +} + +double prob = bad_data_double; +double prob_left = left_child->climo_prob(); +double prob_right = right_child->climo_prob(); + + // + // For opposing inequalities, compute the difference in percentiles + // + +if ( !is_bad_data(prob_left) && !is_bad_data(prob_right) ) { + + // + // Support complex threshold types >a&&b + // + + if ( ( left_child->type() == thresh_gt || left_child->type() == thresh_ge ) && + ( right_child->type() == thresh_lt || right_child->type() == thresh_le ) ) { + + prob = max( 0.0, prob_right - ( 1.0 - prob_left ) ); + + } + else if ( ( left_child->type() == thresh_lt || left_child->type() == thresh_le ) && + ( right_child->type() == thresh_gt || right_child->type() == thresh_ge ) ) { + + prob = max( 0.0, prob_left - ( 1.0 - prob_right ) ); + + } +} + +return ( prob ); + +} + + +//////////////////////////////////////////////////////////////////////// + + bool And_Node::need_perc() const { @@ -540,6 +620,23 @@ return ( n ); //////////////////////////////////////////////////////////////////////// +double Not_Node::climo_prob() const + +{ + +double prob = bad_data_double; +double prob_child = child->climo_prob(); + +if ( !is_bad_data(prob_child) ) prob = 1.0 - prob_child; + +return ( prob ); + +} + + +//////////////////////////////////////////////////////////////////////// + + bool Not_Node::need_perc() const { @@ -1065,6 +1162,59 @@ return; //////////////////////////////////////////////////////////////////////// +double Simple_Node::climo_prob() const + +{ + +double prob = bad_data_double; + +if ( Ptype == perc_thresh_climo_dist ) { + + // Climo probability varies based on the threshold type + switch ( op ) { + + case thresh_lt: + case thresh_le: + + prob = PT/100.0; + break; + + case thresh_eq: + + prob = 0.0; + break; + + case thresh_ne: + + prob = 1.0; + break; + + case thresh_gt: + case thresh_ge: + + prob = 1.0 - PT/100.0; + break; + + default: + + mlog << Error << "\nSimple_Node::climo_prob() -> " + << "cannot convert climatological distribution percentile " + << "threshold to a probability!\n\n"; + + exit ( 1 ); + break; + + } // switch +} + +return ( prob ); + +} + + +//////////////////////////////////////////////////////////////////////// + + bool Simple_Node::need_perc() const { diff --git a/met/src/basic/vx_config/threshold.h b/met/src/basic/vx_config/threshold.h index ebca96a81c..493173e58d 100644 --- a/met/src/basic/vx_config/threshold.h +++ b/met/src/basic/vx_config/threshold.h @@ -157,6 +157,8 @@ class ThreshNode { virtual double pvalue() const = 0; + virtual double climo_prob() const = 0; + virtual bool need_perc() const = 0; virtual void set_perc(const NumArray *, const NumArray *, const NumArray *) = 0; @@ -197,6 +199,8 @@ class Or_Node : public ThreshNode { double pvalue() const; + double climo_prob() const; + bool need_perc() const; void set_perc(const NumArray *, const NumArray *, const NumArray *); @@ -217,10 +221,10 @@ class Or_Node : public ThreshNode { //////////////////////////////////////////////////////////////////////// -inline ThreshType Or_Node::type() const { return ( thresh_complex ); } -inline double Or_Node::value() const { return ( bad_data_double ); } -inline PercThreshType Or_Node::ptype() const { return ( no_perc_thresh_type ); } -inline double Or_Node::pvalue() const { return ( bad_data_double ); } +inline ThreshType Or_Node::type() const { return ( thresh_complex ); } +inline double Or_Node::value() const { return ( bad_data_double ); } +inline PercThreshType Or_Node::ptype() const { return ( no_perc_thresh_type ); } +inline double Or_Node::pvalue() const { return ( bad_data_double ); } //////////////////////////////////////////////////////////////////////// @@ -244,6 +248,8 @@ class And_Node : public ThreshNode { double pvalue() const; + double climo_prob() const; + bool need_perc() const; void set_perc(const NumArray *, const NumArray *, const NumArray *); @@ -293,6 +299,8 @@ class Not_Node : public ThreshNode { double pvalue() const; + double climo_prob() const; + bool need_perc() const; void set_perc(const NumArray *, const NumArray *, const NumArray *); @@ -363,6 +371,8 @@ class Simple_Node : public ThreshNode { double pvalue() const; + double climo_prob() const; + bool need_perc() const; void get_simple_nodes(vector &) const; @@ -435,6 +445,7 @@ class SingleThresh { double get_value() const; PercThreshType get_ptype() const; double get_pvalue() const; + double get_climo_prob() const; void get_simple_nodes(vector &) const; void multiply_by(const double); @@ -451,11 +462,11 @@ class SingleThresh { //////////////////////////////////////////////////////////////////////// -inline ThreshType SingleThresh::get_type() const { return ( node ? node->type() : thresh_na ); } -inline double SingleThresh::get_value() const { return ( node ? node->value() : bad_data_double ); } -inline PercThreshType SingleThresh::get_ptype() const { return ( node ? node->ptype() : no_perc_thresh_type ); } -inline double SingleThresh::get_pvalue() const { return ( node ? node->pvalue() : bad_data_double ); } - +inline ThreshType SingleThresh::get_type() const { return ( node ? node->type() : thresh_na ); } +inline double SingleThresh::get_value() const { return ( node ? node->value() : bad_data_double ); } +inline PercThreshType SingleThresh::get_ptype() const { return ( node ? node->ptype() : no_perc_thresh_type ); } +inline double SingleThresh::get_pvalue() const { return ( node ? node->pvalue() : bad_data_double ); } +inline double SingleThresh::get_climo_prob() const { return ( node ? node->climo_prob() : bad_data_double ); } //////////////////////////////////////////////////////////////////////// diff --git a/met/src/libcode/vx_statistics/pair_base.cc b/met/src/libcode/vx_statistics/pair_base.cc index 8066ed262f..0fe6a1b006 100644 --- a/met/src/libcode/vx_statistics/pair_base.cc +++ b/met/src/libcode/vx_statistics/pair_base.cc @@ -1064,46 +1064,21 @@ NumArray derive_climo_prob(const ClimoCDFInfo &cdf_info, const NumArray &mn_na, const NumArray &sd_na, const SingleThresh &othresh) { int i, n_mn, n_sd; - double prob; NumArray climo_prob, climo_vals; + double prob; // Number of valid climo mean and standard deviation n_mn = mn_na.n_valid(); n_sd = sd_na.n_valid(); - // For CDP threshold types, the climo probability is constant - if(othresh.get_ptype() == perc_thresh_climo_dist) { - - // Climo probability varies based on the threshold type - switch(othresh.get_type()) { - - case thresh_lt: - case thresh_le: - prob = othresh.get_pvalue()/100.0; - break; - - case thresh_eq: - prob = 0.0; - break; - - case thresh_ne: - prob = 1.0; - break; + // Check for constant climo probability + if(!is_bad_data(prob = othresh.get_climo_prob())) { - case thresh_gt: - case thresh_ge: - prob = 1.0 - othresh.get_pvalue()/100.0; - break; - - default: - mlog << Error << "\nderive_climo_prob() -> " - << "climatological threshold \"" << othresh.get_str() - << "\" cannot be converted to a probability!\n\n"; - exit(1); - break; - } + mlog << Debug(4) + << "For threshold " << othresh.get_str() + << ", using a constant climatological probability value of " + << prob << ".\n"; - // Add constant climo probability value climo_prob.add_const(prob, n_mn); } // If both mean and standard deviation were provided, use them to From 1a9f73a97b41565808717a08fec168182dbacccf Mon Sep 17 00:00:00 2001 From: johnhg Date: Thu, 18 Mar 2021 23:04:00 -0600 Subject: [PATCH 81/86] Bugfix 1716 develop perc_thresh (#1722) * Per #1716, committing changes from Randy Bullock to support floating point percentile thresholds. * Per #1716, no code changes, just consistent formatting. * Per #1716, change SFP50 example to SFP33.3 to show an example of using floating point percentile values. --- met/docs/Users_Guide/config_options.rst | 2 +- met/src/basic/vx_config/my_config_scanner.cc | 78 ++++++++++---------- 2 files changed, 41 insertions(+), 39 deletions(-) diff --git a/met/docs/Users_Guide/config_options.rst b/met/docs/Users_Guide/config_options.rst index 564e222e95..d8acbab286 100644 --- a/met/docs/Users_Guide/config_options.rst +++ b/met/docs/Users_Guide/config_options.rst @@ -81,7 +81,7 @@ The configuration file language supports the following data types: * The following percentile threshold types are supported: * "SFP" for a percentile of the sample forecast values. - e.g. ">SFP50" means greater than the 50-th forecast percentile. + e.g. ">SFP33.3" means greater than the 33.3-rd forecast percentile. * "SOP" for a percentile of the sample observation values. e.g. ">SOP75" means greater than the 75-th observation percentile. diff --git a/met/src/basic/vx_config/my_config_scanner.cc b/met/src/basic/vx_config/my_config_scanner.cc index 57246913cf..1acae0582b 100644 --- a/met/src/basic/vx_config/my_config_scanner.cc +++ b/met/src/basic/vx_config/my_config_scanner.cc @@ -169,6 +169,8 @@ static bool replace_env(ConcatString &); static bool is_fort_thresh_no_spaces(); +static bool is_simple_perc_thresh(); + static int do_simple_perc_thresh(); @@ -370,6 +372,8 @@ if ( is_float_v2() ) { if ( do_float() ) return ( token(FLOAT) ); } if ( is_fort_thresh_no_spaces() ) { return ( do_fort_thresh() ); } +if ( is_simple_perc_thresh() ) { return ( do_simple_perc_thresh() ); } + int t; if ( is_id() ) { t = do_id(); return ( token(t) ); } @@ -533,7 +537,6 @@ if ( is_lhs ) { strncpy(configlval.text, configtext, max_id_length); return ( if ( strcmp(configtext, "print" ) == 0 ) { return ( PRINT ); } - // // boolean? // @@ -554,17 +557,13 @@ for (j=0; jlookup(configtext); if ( e && (e->is_number()) && (! is_lhs) ) { - // cout << "=================== id = \"" << configtext << "\" is_lhs = " << (is_lhs ? "true" : "false") << "\n"; - - // cout << "do_id() -> \n"; - // e->dump(cout); - if ( e->type() == IntegerType ) { set_int(configlval.nval, e->i_value()); @@ -613,28 +607,20 @@ if ( e && (! is_lhs) && (e->type() == UserFunctionType) ) { } - /////////////////////////////////////////////////////////////////////// - - - - // // fortran threshold without spaces? (example: "le150") // -if ( (strncmp(configtext, "lt", 2) == 0) && is_number(configtext + 2, max_id_length - 2) ) { return ( do_fort_thresh() ); } - for (j=0; j " @@ -1482,11 +1493,8 @@ if ( index < 0 ) { } - configlval.pc_info.perc_index = index; -// configlval.pc_info.is_simple = true; -configlval.pc_info.value = value; -// configlval.pc_info.value2 = bad_data_double;; +configlval.pc_info.value = value; return ( SIMPLE_PERC_THRESH ); @@ -1495,9 +1503,3 @@ return ( SIMPLE_PERC_THRESH ); //////////////////////////////////////////////////////////////////////// - - - - - - From 8dfd7c075df6e59cc2714c41f9d35fe04fb3eb13 Mon Sep 17 00:00:00 2001 From: johnhg Date: Fri, 19 Mar 2021 12:32:00 -0600 Subject: [PATCH 82/86] Update pull_request_template.md --- .github/pull_request_template.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index b85706f034..a475d51459 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -4,7 +4,9 @@ - [ ] Recommend testing for the reviewer(s) to perform, including the location of input datasets, and any additional instructions:
-- [ ] Do these changes include sufficient documentation and testing updates? **[Yes or No]** +- [ ] Do these changes include sufficient documentation updates, ensuring that no errors or warnings exist in the build of the documentation? **[Yes or No]** + +- [ ] Do these changes include sufficient testing updates? **[Yes or No]** - [ ] Will this PR result in changes to the test suite? **[Yes or No]**
If **yes**, describe the new output and/or changes to the existing output:
From e2f77e44518877aaf75682ad4cfb78a730af0044 Mon Sep 17 00:00:00 2001 From: johnhg Date: Mon, 29 Mar 2021 10:33:31 -0600 Subject: [PATCH 83/86] Feature 1733 exc (#1734) * Per #1733, add column_exc_name, column_exc_val, init_exc_name, and init_exc_val options to the TCStat config files. * Per #1733, enhance tc_stat to support the column_exc and init_exc config file and job command filtering options. * Per #1733, update stat_analysis to support the -column_exc job filtering option. Still need to update docuementation and add unit tests. * Per #1773, update the user's guide with the new config and job command options. * Per #1733, add call to stat_analysis to exercise -column_str and -column_exc options. * Per #1733, I ran into a namespace conflict in tc_stat where -init_exc was used for to filter by time AND my string value. So I switched to using -init_str_exc instead. And made the corresponding change to -column_str_exc in stat_analysis and tc_stat. Also changed internal variable names to use IncMap and ExcMap to keep the logic clear. * Per #1733, tc_stat config file updates to switch from column_exc and init_exc to column_str_exc and init_str_exc. * Per #1733, add tc_stat and stat_analysis jobs to exercise the string filtering options. --- met/data/config/TCStatConfig_default | 12 + met/docs/Users_Guide/config_options.rst | 16 +- met/docs/Users_Guide/config_options_tc.rst | 52 ++- met/docs/Users_Guide/gsi-tools.rst | 4 +- met/docs/Users_Guide/stat-analysis.rst | 15 +- met/docs/Users_Guide/tc-stat.rst | 24 +- met/src/basic/vx_config/config_constants.h | 4 + met/src/libcode/vx_analysis_util/stat_job.cc | 77 +++- met/src/libcode/vx_analysis_util/stat_job.h | 3 +- .../tc_utils/tc_stat/tc_stat_conf_info.cc | 219 +++++----- met/src/tools/tc_utils/tc_stat/tc_stat_job.cc | 379 +++++++++++------- met/src/tools/tc_utils/tc_stat/tc_stat_job.h | 6 +- test/config/TCStatConfig_ALAL2010 | 12 + test/config/TCStatConfig_PROBRIRW | 12 + test/xml/unit_stat_analysis.xml | 15 + test/xml/unit_tc_stat.xml | 15 +- 16 files changed, 562 insertions(+), 303 deletions(-) diff --git a/met/data/config/TCStatConfig_default b/met/data/config/TCStatConfig_default index 3fc882f0f2..d385b01353 100644 --- a/met/data/config/TCStatConfig_default +++ b/met/data/config/TCStatConfig_default @@ -111,6 +111,12 @@ column_thresh_val = []; column_str_name = []; column_str_val = []; +// +// Stratify by excluding strings in non-numeric data columns. +// +column_str_exc_name = []; +column_str_exc_val = []; + // // Similar to the column_thresh options above // @@ -123,6 +129,12 @@ init_thresh_val = []; init_str_name = []; init_str_val = []; +// +// Similar to the column_str_exc options above +// +init_str_exc_name = []; +init_str_exc_val = []; + // // Stratify by the ADECK and BDECK distances to land. // diff --git a/met/docs/Users_Guide/config_options.rst b/met/docs/Users_Guide/config_options.rst index d8acbab286..e1f65cc7b4 100644 --- a/met/docs/Users_Guide/config_options.rst +++ b/met/docs/Users_Guide/config_options.rst @@ -3748,17 +3748,19 @@ Where "job_name" is set to one of the following: Job command FILTERING options that may be used only when -line_type has been listed once. These options take two arguments: the name of the data column to be used and the min, max, or exact value for that column. - If multiple column eq/min/max/str options are listed, the job will be + If multiple column eq/min/max/str/exc options are listed, the job will be performed on their intersection: .. code-block:: none - "-column_min col_name value" e.g. -column_min BASER 0.02 - "-column_max col_name value" - "-column_eq col_name value" - "-column_thresh col_name threshold" e.g. -column_thresh FCST '>273' - "-column_str col_name string" separate multiple filtering strings - with commas + "-column_min col_name value" e.g. -column_min BASER 0.02 + "-column_max col_name value" + "-column_eq col_name value" + "-column_thresh col_name threshold" e.g. -column_thresh FCST '>273' + "-column_str col_name string" separate multiple filtering strings + with commas + "-column_str_exc col_name string" separate multiple filtering strings + with commas Job command options to DEFINE the analysis job. Unless otherwise noted, diff --git a/met/docs/Users_Guide/config_options_tc.rst b/met/docs/Users_Guide/config_options_tc.rst index 155a394ed0..11f7330b4b 100644 --- a/met/docs/Users_Guide/config_options_tc.rst +++ b/met/docs/Users_Guide/config_options_tc.rst @@ -517,8 +517,8 @@ For example: Stratify by performing string matching on non-numeric data columns. Specify a comma-separated list of columns names and values -to be checked. May add using the "-column_str name string" job command -options. +to be included in the analysis. +May add using the "-column_str name string" job command options. For example: @@ -531,6 +531,23 @@ For example: column_str_name = []; column_str_val = []; +**column_str_exc_name, column_str_exc_val** + +Stratify by performing string matching on non-numeric data columns. +Specify a comma-separated list of columns names and values +to be excluded from the analysis. +May add using the "-column_str_exc name string" job command options. + +For example: + +| column_str_exc_name = [ "LEVEL" ]; +| column_str_exc_val = [ "TD" ]; +| + +.. code-block:: none + + column_str_exc_name = []; + column_str_exc_val = []; **init_thresh_name, init_thresh_val** @@ -567,6 +584,23 @@ For example: init_str_name = []; init_str_val = []; +**init_str_exc_name, init_str_exc_val** + +Just like the column_str_exc options above, but apply the string matching only +when lead = 0. If lead = 0 string does match, discard the entire track. +May add using the "-init_str_exc name thresh" job command options. + +For example: + +| init_str_exc_name = [ "LEVEL" ]; +| init_str_exc_val = [ "HU" ]; +| + +.. code-block:: none + + init_str_exc_name = []; + init_str_exc_val = []; + **water_only** Stratify by the ADECK and BDECK distances to land. Once either the ADECK or @@ -747,8 +781,10 @@ Where "job_name" is set to one of the following: "-track_watch_warn name" "-column_thresh name thresh" "-column_str name string" + "-column_str_exc name string" "-init_thresh name thresh" "-init_str name string" + "-init_str_exc name string" Additional filtering options that may be used only when -line_type has been listed only once. These options take two arguments: the name @@ -758,11 +794,13 @@ Where "job_name" is set to one of the following: .. code-block:: none - "-column_min col_name value" For example: -column_min TK_ERR 100.00 - "-column_max col_name value" - "-column_eq col_name value" - "-column_str col_name string" separate multiple filtering strings - with commas + "-column_min col_name value" For example: -column_min TK_ERR 100.00 + "-column_max col_name value" + "-column_eq col_name value" + "-column_str col_name string" separate multiple filtering strings + with commas + "-column_str_exc col_name string" separate multiple filtering strings + with commas Required Args: -dump_row diff --git a/met/docs/Users_Guide/gsi-tools.rst b/met/docs/Users_Guide/gsi-tools.rst index 0e7a6fb92a..019e5c3f7b 100644 --- a/met/docs/Users_Guide/gsi-tools.rst +++ b/met/docs/Users_Guide/gsi-tools.rst @@ -230,7 +230,7 @@ The GSID2MPR tool writes the same set of MPR output columns for the conventional - PRS_MAX_WGT - Pressure of the maximum weighing function -The gsid2mpr output may be passed to the Stat-Analysis tool to derive additional statistics. In particular, users should consider running the **aggregate_stat** job type to read MPR lines and compute partial sums (SL1L2), continuous statistics (CNT), contingency table counts (CTC), or contingency table statistics (CTS). Stat-Analysis has been enhanced to parse any extra columns found at the end of the input lines. Users can filter the values in those extra columns using the **-column_thresh** and **-column_str** job command options. +The gsid2mpr output may be passed to the Stat-Analysis tool to derive additional statistics. In particular, users should consider running the **aggregate_stat** job type to read MPR lines and compute partial sums (SL1L2), continuous statistics (CNT), contingency table counts (CTC), or contingency table statistics (CTS). Stat-Analysis has been enhanced to parse any extra columns found at the end of the input lines. Users can filter the values in those extra columns using the **-column_thresh**, **-column_str**, and **-column_str_exc** job command options. An example of the Stat-Analysis calling sequence is shown below: @@ -425,7 +425,7 @@ The GSID2MPR tool writes the same set of ORANK output columns for the convention - TZFND - d(Tz)/d(Tr) -The gsidens2orank output may be passed to the Stat-Analysis tool to derive additional statistics. In particular, users should consider running the **aggregate_stat** job type to read ORANK lines and ranked histograms (RHIST), probability integral transform histograms (PHIST), and spread-skill variance output (SSVAR). Stat-Analysis has been enhanced to parse any extra columns found at the end of the input lines. Users can filter the values in those extra columns using the **-column_thresh** and **-column_str** job command options. +The gsidens2orank output may be passed to the Stat-Analysis tool to derive additional statistics. In particular, users should consider running the **aggregate_stat** job type to read ORANK lines and ranked histograms (RHIST), probability integral transform histograms (PHIST), and spread-skill variance output (SSVAR). Stat-Analysis has been enhanced to parse any extra columns found at the end of the input lines. Users can filter the values in those extra columns using the **-column_thresh**, **-column_str**, and **-column_str_exc** job command options. An example of the Stat-Analysis calling sequence is shown below: diff --git a/met/docs/Users_Guide/stat-analysis.rst b/met/docs/Users_Guide/stat-analysis.rst index 50655fc573..ce2d8c7654 100644 --- a/met/docs/Users_Guide/stat-analysis.rst +++ b/met/docs/Users_Guide/stat-analysis.rst @@ -522,13 +522,14 @@ This job command option is extremely useful. It can be used multiple times to sp .. code-block:: none - -column_min col_name value - -column_max col_name value - -column_eq col_name value - -column_thresh col_name thresh - -column_str col_name string - -The column filtering options may be used when the **-line_type** has been set to a single value. These options take two arguments, the name of the data column to be used followed by a value, string, or threshold to be applied. If multiple column_min/max/eq/thresh/str options are listed, the job will be performed on their intersection. Each input line is only retained if its value meets the numeric filtering criteria defined or matches one of the strings defined by the **-column_str** option. Multiple filtering strings may be listed using commas. Defining thresholds in MET is described in :numref:`config_options`. + -column_min col_name value + -column_max col_name value + -column_eq col_name value + -column_thresh col_name thresh + -column_str col_name string + -column_str_exc col_name string + +The column filtering options may be used when the **-line_type** has been set to a single value. These options take two arguments, the name of the data column to be used followed by a value, string, or threshold to be applied. If multiple column_min/max/eq/thresh/str options are listed, the job will be performed on their intersection. Each input line is only retained if its value meets the numeric filtering criteria defined, matches one of the strings defined by the **-column_str** option, or does not match any of the string defined by the **-column_str_exc** option. Multiple filtering strings may be listed using commas. Defining thresholds in MET is described in :numref:`config_options`. .. code-block:: none diff --git a/met/docs/Users_Guide/tc-stat.rst b/met/docs/Users_Guide/tc-stat.rst index f1ddfbeb7d..c66a6f6894 100644 --- a/met/docs/Users_Guide/tc-stat.rst +++ b/met/docs/Users_Guide/tc-stat.rst @@ -251,7 +251,16 @@ _________________________ column_str_name = []; column_str_val = []; -The **column_str_name** and **column_str_val** fields stratify by performing string matching on non-numeric data columns. Specify a comma-separated list of columns names and values to be checked. The length of the **column_str_val** should match that of the **column_str_name**. Using the **-column_str name val** option within the job command lines may further refine these selections. +The **column_str_name** and **column_str_val** fields stratify by performing string matching on non-numeric data columns. Specify a comma-separated list of columns names and values to be **included** in the analysis. The length of the **column_str_val** should match that of the **column_str_name**. Using the **-column_str name val** option within the job command lines may further refine these selections. + +_________________________ + +.. code-block:: none + + column_str_exc_name = []; + column_str_exc_val = []; + +The **column_str_exc_name** and **column_str_exc_val** fields stratify by performing string matching on non-numeric data columns. Specify a comma-separated list of columns names and values to be **excluded** from the analysis. The length of the **column_str_exc_val** should match that of the **column_str_exc_name**. Using the **-column_str_exc name val** option within the job command lines may further refine these selections. _________________________ @@ -260,7 +269,7 @@ _________________________ init_thresh_name = []; init_thresh_val = []; -The **init_thresh_name** and **init_thresh_val** fields stratify by applying thresholds to numeric data columns only when lead = 0. If lead =0, but the value does not meet the threshold, discard the entire track. The length of the **init_thresh_val** should match that of the **init_thresh_name**. Using the **-init_thresh name val** option within the job command lines may further refine these selections. +The **init_thresh_name** and **init_thresh_val** fields stratify by applying thresholds to numeric data columns only when lead = 0. If lead = 0, but the value does not meet the threshold, discard the entire track. The length of the **init_thresh_val** should match that of the **init_thresh_name**. Using the **-init_thresh name val** option within the job command lines may further refine these selections. _________________________ @@ -269,7 +278,16 @@ _________________________ init_str_name = []; init_str_val = []; -The **init_str_name** and **init_str_val** fields stratify by performing string matching on non-numeric data columns only when lead = 0. If lead =0, but the string does not match, discard the entire track. The length of the **init_str_val** should match that of the **init_str_name**. Using the **-init_str name val** option within the job command lines may further refine these selections. +The **init_str_name** and **init_str_val** fields stratify by performing string matching on non-numeric data columns only when lead = 0. If lead = 0, but the string **does not** match, discard the entire track. The length of the **init_str_val** should match that of the **init_str_name**. Using the **-init_str name val** option within the job command lines may further refine these selections. + +_________________________ + +.. code-block:: none + + init_str_exc_name = []; + init_str_exc_val = []; + +The **init_str_exc_name** and **init_str_exc_val** fields stratify by performing string matching on non-numeric data columns only when lead = 0. If lead = 0, and the string **does** match, discard the entire track. The length of the **init_str_exc_val** should match that of the **init_str_exc_name**. Using the **-init_str_exc name val** option within the job command lines may further refine these selections. _________________________ diff --git a/met/src/basic/vx_config/config_constants.h b/met/src/basic/vx_config/config_constants.h index 1ae8d5d90d..e63a6935f0 100644 --- a/met/src/basic/vx_config/config_constants.h +++ b/met/src/basic/vx_config/config_constants.h @@ -1037,10 +1037,14 @@ static const char conf_key_column_thresh_name[] = "column_thresh_name"; static const char conf_key_column_thresh_val[] = "column_thresh_val"; static const char conf_key_column_str_name[] = "column_str_name"; static const char conf_key_column_str_val[] = "column_str_val"; +static const char conf_key_column_str_exc_name[] = "column_str_exc_name"; +static const char conf_key_column_str_exc_val[] = "column_str_exc_val"; static const char conf_key_init_thresh_name[] = "init_thresh_name"; static const char conf_key_init_thresh_val[] = "init_thresh_val"; static const char conf_key_init_str_name[] = "init_str_name"; static const char conf_key_init_str_val[] = "init_str_val"; +static const char conf_key_init_str_exc_name[] = "init_str_exc_name"; +static const char conf_key_init_str_exc_val[] = "init_str_exc_val"; static const char conf_key_water_only[] = "water_only"; static const char conf_key_rirw_track[] = "rirw.track"; static const char conf_key_rirw_time_adeck[] = "rirw.adeck.time"; diff --git a/met/src/libcode/vx_analysis_util/stat_job.cc b/met/src/libcode/vx_analysis_util/stat_job.cc index 346ba9e03f..870d5cac1d 100644 --- a/met/src/libcode/vx_analysis_util/stat_job.cc +++ b/met/src/libcode/vx_analysis_util/stat_job.cc @@ -172,7 +172,8 @@ void STATAnalysisJob::clear() { wmo_fisher_stats.clear(); column_thresh_map.clear(); - column_str_map.clear(); + column_str_inc_map.clear(); + column_str_exc_map.clear(); by_column.clear(); @@ -306,7 +307,8 @@ void STATAnalysisJob::assign(const STATAnalysisJob & aj) { wmo_fisher_stats = aj.wmo_fisher_stats; column_thresh_map = aj.column_thresh_map; - column_str_map = aj.column_str_map; + column_str_inc_map = aj.column_str_inc_map; + column_str_exc_map = aj.column_str_exc_map; by_column = aj.by_column; @@ -507,9 +509,16 @@ void STATAnalysisJob::dump(ostream & out, int depth) const { thr_it->second.dump(out, depth + 1); } - out << prefix << "column_str_map ...\n"; - for(map::const_iterator str_it = column_str_map.begin(); - str_it != column_str_map.end(); str_it++) { + out << prefix << "column_str_inc_map ...\n"; + for(map::const_iterator str_it = column_str_inc_map.begin(); + str_it != column_str_inc_map.end(); str_it++) { + out << prefix << str_it->first << ": \n"; + str_it->second.dump(out, depth + 1); + } + + out << prefix << "column_str_exc_map ...\n"; + for(map::const_iterator str_it = column_str_exc_map.begin(); + str_it != column_str_exc_map.end(); str_it++) { out << prefix << str_it->first << ": \n"; str_it->second.dump(out, depth + 1); } @@ -948,8 +957,8 @@ int STATAnalysisJob::is_keeper(const STATLine & L) const { // // column_str // - for(map::const_iterator str_it = column_str_map.begin(); - str_it != column_str_map.end(); str_it++) { + for(map::const_iterator str_it = column_str_inc_map.begin(); + str_it != column_str_inc_map.end(); str_it++) { // // Check if the current value is in the list for the column @@ -957,6 +966,18 @@ int STATAnalysisJob::is_keeper(const STATLine & L) const { if(!str_it->second.has(L.get_item(str_it->first.c_str(), false))) return(0); } + // + // column_str_exc + // + for(map::const_iterator str_it = column_str_exc_map.begin(); + str_it != column_str_exc_map.end(); str_it++) { + + // + // Check if the current value is not in the list for the column + // + if(str_it->second.has(L.get_item(str_it->first.c_str(), false))) return(0); + } + // // For MPR lines, check mask_grid, mask_poly, and mask_sid // @@ -1125,7 +1146,10 @@ void STATAnalysisJob::parse_job_command(const char *jobstring) { column_thresh_map.clear(); } else if(jc_array[i] == "-column_str" ) { - column_str_map.clear(); + column_str_inc_map.clear(); + } + else if(jc_array[i] == "-column_str_exc" ) { + column_str_exc_map.clear(); } else if(jc_array[i] == "-set_hdr" ) { hdr_name.clear(); @@ -1376,12 +1400,30 @@ void STATAnalysisJob::parse_job_command(const char *jobstring) { col_value.add_css(jc_array[i+2]); // If the column name is already present in the map, add to it - if(column_str_map.count(col_name) > 0) { - column_str_map[col_name].add(col_value); + if(column_str_inc_map.count(col_name) > 0) { + column_str_inc_map[col_name].add(col_value); } // Otherwise, add a new map entry else { - column_str_map.insert(pair(col_name, col_value)); + column_str_inc_map.insert(pair(col_name, col_value)); + } + i+=2; + } + else if(jc_array[i] == "-column_str_exc") { + + // Parse the column name and value + col_name = to_upper((string)jc_array[i+1]); + col_value.clear(); + col_value.set_ignore_case(1); + col_value.add_css(jc_array[i+2]); + + // If the column name is already present in the map, add to it + if(column_str_exc_map.count(col_name) > 0) { + column_str_exc_map[col_name].add(col_value); + } + // Otherwise, add a new map entry + else { + column_str_exc_map.insert(pair(col_name, col_value)); } i+=2; } @@ -2461,14 +2503,23 @@ ConcatString STATAnalysisJob::get_jobstring() const { } // column_str - for(map::const_iterator str_it = column_str_map.begin(); - str_it != column_str_map.end(); str_it++) { + for(map::const_iterator str_it = column_str_inc_map.begin(); + str_it != column_str_inc_map.end(); str_it++) { for(i=0; isecond.n(); i++) { js << "-column_str " << str_it->first << " " << str_it->second[i] << " "; } } + // column_str_exc + for(map::const_iterator str_it = column_str_exc_map.begin(); + str_it != column_str_exc_map.end(); str_it++) { + + for(i=0; isecond.n(); i++) { + js << "-column_str_exc " << str_it->first << " " << str_it->second[i] << " "; + } + } + // by_column if(by_column.n() > 0) { for(i=0; i column_thresh_map; // ASCII column string matching - map column_str_map; + map column_str_inc_map; + map column_str_exc_map; StringArray hdr_name; StringArray hdr_value; diff --git a/met/src/tools/tc_utils/tc_stat/tc_stat_conf_info.cc b/met/src/tools/tc_utils/tc_stat/tc_stat_conf_info.cc index 3b21161363..1bdc3af262 100644 --- a/met/src/tools/tc_utils/tc_stat/tc_stat_conf_info.cc +++ b/met/src/tools/tc_utils/tc_stat/tc_stat_conf_info.cc @@ -22,6 +22,16 @@ using namespace std; #include "vx_log.h" +//////////////////////////////////////////////////////////////////////// + +// Functions for parsing config entries +static void parse_conf_thresh_map(MetConfig &, + const char *, const char *, + map &); +static void parse_conf_string_map(MetConfig &, + const char *, const char *, + map &); + //////////////////////////////////////////////////////////////////////// // // Code for class TCStatConfInfo @@ -63,7 +73,7 @@ void TCStatConfInfo::clear() { //////////////////////////////////////////////////////////////////////// void TCStatConfInfo::read_config(const char *default_file_name, - const char *user_file_name) { + const char *user_file_name) { // Read the config file constants Conf.read(replace_path(config_const_filename).c_str()); @@ -84,8 +94,7 @@ void TCStatConfInfo::read_config(const char *default_file_name, void TCStatConfInfo::process_config() { int i; - StringArray sa, sa_val, sa_new; - ThreshArray ta_val, ta_new; + StringArray sa; ConcatString poly_file; // Conf: Version @@ -119,12 +128,12 @@ void TCStatConfInfo::process_config() { // Conf: TCStatJob::InitInc sa = Conf.lookup_string_array(conf_key_init_inc); - for(i=0; i " - << "the \"column_thresh_name\" and \"column_thresh_val\" " - << "entries must have the same length.\n\n"; - exit(1); - } + parse_conf_thresh_map(Conf, + conf_key_column_thresh_name, conf_key_column_thresh_val, + Filter.ColumnThreshMap); - // Add entries to the map - for(i=0; i 0) { - Filter.ColumnThreshMap[sa[i]].add(ta_val[i]); - } - else { - ta_new.clear(); - ta_new.add(ta_val[i]); - Filter.ColumnThreshMap.insert(pair(sa[i], ta_new)); - } - } // end for i - - // Conf: TCStatJob::ColumnStrName, TCStatJob::ColumnStrVal - sa = Conf.lookup_string_array(conf_key_column_str_name); - sa_val = Conf.lookup_string_array(conf_key_column_str_val); - - // Check that they are the same length - if(sa.n_elements() != sa_val.n_elements()) { - mlog << Error - << "\nTCStatConfInfo::process_config() -> " - << "the \"column_str_name\" and \"column_str_val\" " - << "entries must have the same length.\n\n"; - exit(1); - } + // Conf: TCStatJob::ColumnStrIncName, TCStatJob::ColumnStrIncVal + parse_conf_string_map(Conf, + conf_key_column_str_name, conf_key_column_str_val, + Filter.ColumnStrIncMap); - // Add entries to the map - for(i=0; i 0) { - Filter.ColumnStrMap[sa[i]].add(sa_val[i]); - } - else { - sa_new.clear(); - sa_new.set_ignore_case(1); - sa_new.add(sa_val[i]); - Filter.ColumnStrMap.insert(pair(sa[i], sa_new)); - } - } // end for i + // Conf: TCStatJob::ColumnStrExcName, TCStatJob::ColumnStrExcVal + parse_conf_string_map(Conf, + conf_key_column_str_exc_name, conf_key_column_str_exc_val, + Filter.ColumnStrExcMap); // Conf: TCStatJob::InitThreshName, TCStatJob::InitThreshVal - sa = Conf.lookup_string_array(conf_key_init_thresh_name); - ta_val = Conf.lookup_thresh_array(conf_key_init_thresh_val); + parse_conf_thresh_map(Conf, + conf_key_init_thresh_name, conf_key_init_thresh_val, + Filter.InitThreshMap); - // Check that they are the same length - if(sa.n_elements() != ta_val.n_elements()) { - mlog << Error - << "\nTCStatConfInfo::process_config() -> " - << "the \"init_thresh_name\" and \"init_thresh_val\" " - << "entries must have the same length.\n\n"; - exit(1); - } - - // Add entries to the map - for(i=0; i 0) { - Filter.InitThreshMap[sa[i]].add(ta_val[i]); - } - else { - ta_new.clear(); - ta_new.add(ta_val[i]); - Filter.InitThreshMap.insert(pair(sa[i], ta_new)); - } - } // end for i - - // Conf: TCStatJob::InitStrName, TCStatJob::InitStrVal - sa = Conf.lookup_string_array(conf_key_init_str_name); - sa_val = Conf.lookup_string_array(conf_key_init_str_val); - - // Check that they are the same length - if(sa.n_elements() != sa_val.n_elements()) { - mlog << Error - << "\nTCStatConfInfo::process_config() -> " - << "the \"init_str_name\" and \"init_str_val\" " - << "entries must have the same length.\n\n"; - exit(1); - } + // Conf: TCStatJob::InitStrIncName, TCStatJob::InitStrIncVal + parse_conf_string_map(Conf, + conf_key_init_str_name, conf_key_init_str_val, + Filter.InitStrIncMap); - // Add entries to the map - for(i=0; i 0) { - Filter.InitStrMap[sa[i]].add(sa_val[i]); - } - else { - sa_new.clear(); - sa_new.set_ignore_case(1); - sa_new.add(sa_val[i]); - Filter.InitStrMap.insert(pair(sa[i], sa_new)); - } - } // end for i + // Conf: TCStatJob::InitStrExcName, TCStatJob::InitStrExcVal + parse_conf_string_map(Conf, + conf_key_init_str_exc_name, conf_key_init_str_exc_val, + Filter.InitStrExcMap); // Conf: TCStatJob::WaterOnly Filter.WaterOnly = Conf.lookup_bool(conf_key_water_only); @@ -311,7 +248,7 @@ void TCStatConfInfo::process_config() { // Conf: Jobs Jobs = Conf.lookup_string_array(conf_key_jobs); - if(Jobs.n_elements() == 0) { + if(Jobs.n() == 0) { mlog << Error << "\nTCStatConfInfo::process_config() -> " << "must specify at least one entry in \"jobs\".\n\n"; @@ -322,3 +259,73 @@ void TCStatConfInfo::process_config() { } //////////////////////////////////////////////////////////////////////// + +void parse_conf_thresh_map(MetConfig &conf, + const char *conf_key_name, const char *conf_key_val, + map &m) { + StringArray sa; + ThreshArray ta_val, ta_new; + + sa = conf.lookup_string_array(conf_key_name); + ta_val = conf.lookup_thresh_array(conf_key_val); + + // Check that they are the same length + if(sa.n() != ta_val.n()) { + mlog << Error + << "\nTCStatConfInfo::parse_conf_thresh_map() -> " + << "the \"" << conf_key_name << "\" and \"" << conf_key_val << "\" " + << "entries must have the same length.\n\n"; + exit(1); + } + + // Add entries to the map + for(int i=0; i 0) { + m[sa[i]].add(ta_val[i]); + } + else { + ta_new.clear(); + ta_new.add(ta_val[i]); + m.insert(pair(sa[i], ta_new)); + } + } // end for i + + return; +} + +//////////////////////////////////////////////////////////////////////// + +void parse_conf_string_map(MetConfig &conf, + const char *conf_key_name, const char *conf_key_val, + map &m) { + StringArray sa, sa_val, sa_new; + + sa = conf.lookup_string_array(conf_key_name); + sa_val = conf.lookup_string_array(conf_key_val); + + // Check that they are the same length + if(sa.n() != sa_val.n()) { + mlog << Error + << "\nTCStatConfInfo::parse_conf_string_map() -> " + << "the \"" << conf_key_name << "\" and \"" << conf_key_val << "\" " + << "entries must have the same length.\n\n"; + exit(1); + } + + // Add entries to the map + for(int i=0; i 0) { + m[sa[i]].add(sa_val[i]); + } + else { + sa_new.clear(); + sa_new.set_ignore_case(1); + sa_new.add(sa_val[i]); + m.insert(pair(sa[i], sa_new)); + } + } // end for i + + return; +} + +//////////////////////////////////////////////////////////////////////// diff --git a/met/src/tools/tc_utils/tc_stat/tc_stat_job.cc b/met/src/tools/tc_utils/tc_stat/tc_stat_job.cc index e472af405b..6f6f812878 100644 --- a/met/src/tools/tc_utils/tc_stat/tc_stat_job.cc +++ b/met/src/tools/tc_utils/tc_stat/tc_stat_job.cc @@ -112,11 +112,11 @@ TCStatJob *TCStatJobFactory::new_tc_stat_job(const char *jobstring) { a = job->parse_job_command(jobstring); // Check for unused arguments - if(a.n_elements() > 0) { + if(a.n() > 0) { // Build list of unknown args - for(i=0; i " @@ -220,9 +220,11 @@ void TCStatJob::clear() { LineType.clear(); TrackWatchWarn.clear(); ColumnThreshMap.clear(); - ColumnStrMap.clear(); + ColumnStrIncMap.clear(); + ColumnStrExcMap.clear(); InitThreshMap.clear(); - InitStrMap.clear(); + InitStrIncMap.clear(); + InitStrExcMap.clear(); EventEqualLead.clear(); EventEqualCases.clear(); @@ -301,9 +303,11 @@ void TCStatJob::assign(const TCStatJob & j) { LineType = j.LineType; TrackWatchWarn = j.TrackWatchWarn; ColumnThreshMap = j.ColumnThreshMap; - ColumnStrMap = j.ColumnStrMap; + ColumnStrIncMap = j.ColumnStrIncMap; + ColumnStrExcMap = j.ColumnStrExcMap; InitThreshMap = j.InitThreshMap; - InitStrMap = j.InitStrMap; + InitStrIncMap = j.InitStrIncMap; + InitStrExcMap = j.InitStrExcMap; DumpFile = j.DumpFile; open_dump_file(); @@ -423,8 +427,14 @@ void TCStatJob::dump(ostream & out, int depth) const { thr_it->second.dump(out, depth + 1); } - out << prefix << "ColumnStrMap ...\n"; - for(str_it=ColumnStrMap.begin(); str_it!= ColumnStrMap.end(); str_it++) { + out << prefix << "ColumnStrIncMap ...\n"; + for(str_it=ColumnStrIncMap.begin(); str_it!= ColumnStrIncMap.end(); str_it++) { + out << prefix << str_it->first << ": \n"; + str_it->second.dump(out, depth + 1); + } + + out << prefix << "ColumnStrExcMap ...\n"; + for(str_it=ColumnStrExcMap.begin(); str_it!= ColumnStrExcMap.end(); str_it++) { out << prefix << str_it->first << ": \n"; str_it->second.dump(out, depth + 1); } @@ -435,8 +445,14 @@ void TCStatJob::dump(ostream & out, int depth) const { thr_it->second.dump(out, depth + 1); } - out << prefix << "InitStrMap ...\n"; - for(str_it=InitStrMap.begin(); str_it!= InitStrMap.end(); str_it++) { + out << prefix << "InitStrIncMap ...\n"; + for(str_it=InitStrIncMap.begin(); str_it!= InitStrIncMap.end(); str_it++) { + out << prefix << str_it->first << ": \n"; + str_it->second.dump(out, depth + 1); + } + + out << prefix << "InitStrExcMap ...\n"; + for(str_it=InitStrExcMap.begin(); str_it!= InitStrExcMap.end(); str_it++) { out << prefix << str_it->first << ": \n"; str_it->second.dump(out, depth + 1); } @@ -501,7 +517,7 @@ bool TCStatJob::is_keeper_track(const TrackPairInfo &pair, map::const_iterator str_it; // Check TrackWatchWarn for each TrackPoint - if(TrackWatchWarn.n_elements() > 0) { + if(TrackWatchWarn.n() > 0) { // Assume track will not be kept keep = false; @@ -539,7 +555,11 @@ bool TCStatJob::is_keeper_track(const TrackPairInfo &pair, keep = false; n.RejInitThresh += pair.n_points(); } - else if(InitStrMap.size() > 0) { + else if(InitStrIncMap.size() > 0) { + keep = false; + n.RejInitStr += pair.n_points(); + } + else if(InitStrExcMap.size() > 0) { keep = false; n.RejInitStr += pair.n_points(); } @@ -567,10 +587,10 @@ bool TCStatJob::is_keeper_track(const TrackPairInfo &pair, } } - // Check InitStr + // Check InitStrInc if(keep == true) { - for(str_it=InitStrMap.begin(); str_it!= InitStrMap.end(); str_it++) { + for(str_it=InitStrIncMap.begin(); str_it!= InitStrIncMap.end(); str_it++) { // Retrieve the column value v_str = pair.line(i_init)->get_item(str_it->first.c_str()); @@ -584,6 +604,23 @@ bool TCStatJob::is_keeper_track(const TrackPairInfo &pair, } } + // Check InitStrExc + if(keep == true) { + + for(str_it=InitStrExcMap.begin(); str_it!= InitStrExcMap.end(); str_it++) { + + // Retrieve the column value + v_str = pair.line(i_init)->get_item(str_it->first.c_str()); + + // Check the string value + if(str_it->second.has(v_str)) { + keep = false; + n.RejInitStr += pair.n_points(); + break; + } + } + } + // Check OutInitMask if(keep == true) { @@ -606,11 +643,11 @@ bool TCStatJob::is_keeper_track(const TrackPairInfo &pair, // MET-667 Check this track for required lead times // If no required lead times were defined, do nothing. - if(keep == true && LeadReq.n_elements() > 0){ + if(keep == true && LeadReq.n() > 0){ // Loop through the points and see if any of the // lead times are in the list of required lead times // defined in the configuration file. - for(int j=0; j::const_iterator str_it; // Check TC-STAT header columns - if(AModel.n_elements() > 0 && + if(AModel.n() > 0 && !AModel.has(line.amodel())) { keep = false; n.RejAModel++; } - else if(BModel.n_elements() > 0 && + else if(BModel.n() > 0 && !BModel.has(line.bmodel())) { keep = false; n.RejBModel++; } - else if(Desc.n_elements() > 0 && + else if(Desc.n() > 0 && !Desc.has(line.desc())) { keep = false; n.RejDesc++; } - else if(StormId.n_elements() > 0 && + else if(StormId.n() > 0 && !has_storm_id(StormId, (string)line.basin(), (string)line.cyclone(), line.init())) { keep = false; n.RejStormId++; } - else if(Basin.n_elements() > 0 && + else if(Basin.n() > 0 && !Basin.has(line.basin())) { keep = false; n.RejBasin++; } - else if(Cyclone.n_elements() > 0 && + else if(Cyclone.n() > 0 && !Cyclone.has(line.cyclone())) { keep = false; n.RejCyclone++; } - else if(StormName.n_elements() > 0 && + else if(StormName.n() > 0 && !StormName.has(line.storm_name())) { keep = false; n.RejStormName++; } else if(InitBeg > 0 && line.init() < InitBeg) { keep = false; n.RejInit++; } else if(InitEnd > 0 && line.init() > InitEnd) { keep = false; n.RejInit++; } - else if(InitInc.n_elements() > 0 && + else if(InitInc.n() > 0 && !InitInc.has(line.init())) { keep = false; n.RejInit++; } - else if(InitExc.n_elements() > 0 && + else if(InitExc.n() > 0 && InitExc.has(line.init())) { keep = false; n.RejInit++; } - else if(InitHour.n_elements() > 0 && + else if(InitHour.n() > 0 && !InitHour.has(line.init_hour())) { keep = false; n.RejInitHour++; } - else if(Lead.n_elements() > 0 && + else if(Lead.n() > 0 && !Lead.has(line.lead())) { keep = false; n.RejLead++; } else if(ValidBeg > 0 && line.valid() < ValidBeg) { keep = false; n.RejValid++; } else if(ValidEnd > 0 && line.valid() > ValidEnd) { keep = false; n.RejValid++; } - else if(ValidInc.n_elements() > 0 && + else if(ValidInc.n() > 0 && !ValidInc.has(line.valid())) { keep = false; n.RejValid++; } - else if(ValidExc.n_elements() > 0 && + else if(ValidExc.n() > 0 && ValidExc.has(line.valid())) { keep = false; n.RejValid++; } - else if(ValidHour.n_elements() > 0 && + else if(ValidHour.n() > 0 && !ValidHour.has(line.valid_hour())) { keep = false; n.RejValidHour++; } - else if(InitMask.n_elements() > 0 && + else if(InitMask.n() > 0 && !InitMask.has(line.init_mask())) { keep = false; n.RejInitMask++; } - else if(ValidMask.n_elements() > 0 && + else if(ValidMask.n() > 0 && !ValidMask.has(line.valid_mask())) { keep = false; n.RejValidMask++; } - else if(LineType.n_elements() > 0 && + else if(LineType.n() > 0 && !LineType.has(line.line_type())) { keep = false; n.RejLineType++; } // Check that PROBRIRW lines include the requested probability type @@ -701,27 +738,45 @@ bool TCStatJob::is_keeper_line(const TCStatLine &line, // Check the column threshold if(!thr_it->second.check_dbl(v_dbl)) { - keep = false; - n.RejColumnThresh++; - break; + keep = false; + n.RejColumnThresh++; + break; } } } - // Check ColumnStrMap + // Check ColumnStrIncMap if(keep == true) { // Loop through the column string matching - for(str_it=ColumnStrMap.begin(); str_it!= ColumnStrMap.end(); str_it++) { + for(str_it=ColumnStrIncMap.begin(); str_it!= ColumnStrIncMap.end(); str_it++) { // Retrieve the column value v_str = line.get_item(str_it->first.c_str()); // Check the string value if(!str_it->second.has(v_str)) { - keep = false; - n.RejColumnStr++; - break; + keep = false; + n.RejColumnStr++; + break; + } + } + } + + // Check ColumnStrExcMap + if(keep == true) { + + // Loop through the column string matching + for(str_it=ColumnStrExcMap.begin(); str_it!= ColumnStrExcMap.end(); str_it++) { + + // Retrieve the column value + v_str = line.get_item(str_it->first.c_str()); + + // Check the string value + if(str_it->second.has(v_str)) { + keep = false; + n.RejColumnStr++; + break; } } } @@ -805,10 +860,10 @@ double TCStatJob::get_column_double(const TCStatLine &line, v = atof(line.get_item(sa[0].c_str())); // If multiple columns, compute the requested difference - if(sa.n_elements() > 1) { + if(sa.n() > 1) { // Loop through the column - for(i=1; i 0) s << "-init_beg " << unix_to_yyyymmdd_hhmmss(InitBeg) << " "; if(InitEnd > 0) s << "-init_end " << unix_to_yyyymmdd_hhmmss(InitEnd) << " "; - for(i=0; i 0) s << "-valid_beg " << unix_to_yyyymmdd_hhmmss(ValidBeg) << " "; if(ValidEnd > 0) s << "-valid_end " << unix_to_yyyymmdd_hhmmss(ValidEnd) << " "; - for(i=0; isecond.n_elements(); i++) { + for(i=0; isecond.n(); i++) { s << "-column_thresh " << thr_it->first << " " << thr_it->second[i].get_str() << " "; } } - for(str_it=ColumnStrMap.begin(); str_it!= ColumnStrMap.end(); str_it++) { - for(i=0; isecond.n_elements(); i++) { + for(str_it=ColumnStrIncMap.begin(); str_it!= ColumnStrIncMap.end(); str_it++) { + for(i=0; isecond.n(); i++) { s << "-column_str " << str_it->first << " " << str_it->second[i] << " "; } } + for(str_it=ColumnStrExcMap.begin(); str_it!= ColumnStrExcMap.end(); str_it++) { + for(i=0; isecond.n(); i++) { + s << "-column_str_exc " << str_it->first << " " + << str_it->second[i] << " "; + } + } for(thr_it=InitThreshMap.begin(); thr_it!= InitThreshMap.end(); thr_it++) { - for(i=0; isecond.n_elements(); i++) { + for(i=0; isecond.n(); i++) { s << "-init_thresh " << thr_it->first << " " << thr_it->second[i].get_str() << " "; } } - for(str_it=InitStrMap.begin(); str_it!= InitStrMap.end(); str_it++) { - for(i=0; isecond.n_elements(); i++) { + for(str_it=InitStrIncMap.begin(); str_it!= InitStrIncMap.end(); str_it++) { + for(i=0; isecond.n(); i++) { s << "-init_str " << str_it->first << " " << str_it->second[i] << " "; } } + for(str_it=InitStrExcMap.begin(); str_it!= InitStrExcMap.end(); str_it++) { + for(i=0; isecond.n(); i++) { + s << "-init_str_exc " << str_it->first << " " + << str_it->second[i] << " "; + } + } if(WaterOnly != default_water_only) s << "-water_only " << bool_to_string(WaterOnly) << " "; if(RIRWTrack != default_rirw_track) { @@ -1223,7 +1294,7 @@ ConcatString TCStatJob::serialize() const { s << "-match_points " << bool_to_string(MatchPoints) << " "; if(EventEqual != default_event_equal) s << "-event_equal " << bool_to_string(EventEqual) << " "; - for(i=0; i " << "the track-based " << TCStatLineType_TCMPR_Str @@ -1611,7 +1682,7 @@ void TCStatJobFilter::filter_tracks(TCLineCounts &n) { if(EventEqual == true) event_equalize_tracks(); // Check for no common cases - if(EventEqualSet == true && EventEqualCases.n_elements() == 0) { + if(EventEqualSet == true && EventEqualCases.n() == 0) { mlog << Debug(1) << "Event equalization of tracks found no common cases.\n"; } @@ -1650,7 +1721,7 @@ void TCStatJobFilter::filter_lines(TCLineCounts &n) { if(EventEqual == true) event_equalize_lines(); // Check for no common cases - if(EventEqualSet == true && EventEqualCases.n_elements() == 0) { + if(EventEqualSet == true && EventEqualCases.n() == 0) { mlog << Debug(1) << "Event equalization of lines found no common cases.\n"; } @@ -1793,7 +1864,7 @@ StringArray TCStatJobSummary::parse_job_command(const char *jobstring) { a = TCStatJob::parse_job_command(jobstring); // Loop over the StringArray elements - for(i=0; i " << "this function may only be called when using the " << "-column option in the job command line:\n" @@ -1904,7 +1975,7 @@ void TCStatJobSummary::do_job(const StringArray &file_list, // // If not specified, assume TCMPR by adding it to the LineType - if(LineType.n_elements() == 0) LineType.add(TCStatLineType_TCMPR_Str); + if(LineType.n() == 0) LineType.add(TCStatLineType_TCMPR_Str); // Add the input file list TCSTFiles.add_files(file_list); @@ -1913,7 +1984,7 @@ void TCStatJobSummary::do_job(const StringArray &file_list, if(LineType.has(TCStatLineType_TCMPR_Str)) { // TCMPR and non-TCMPR LineTypes cannot be mixed - for(i=0; i " << "the track-based " << TCStatLineType_TCMPR_Str @@ -1950,7 +2021,7 @@ void TCStatJobSummary::summarize_tracks(TCLineCounts &n) { if(EventEqual == true) event_equalize_tracks(); // Check for no common cases - if(EventEqualSet == true && EventEqualCases.n_elements() == 0) { + if(EventEqualSet == true && EventEqualCases.n() == 0) { mlog << Debug(1) << "Event equalization of tracks found no common cases.\n"; } @@ -1992,7 +2063,7 @@ void TCStatJobSummary::summarize_lines(TCLineCounts &n) { if(EventEqual == true) event_equalize_lines(); // Check for no common cases - if(EventEqualSet == true && EventEqualCases.n_elements() == 0) { + if(EventEqualSet == true && EventEqualCases.n() == 0) { mlog << Debug(1) << "Event equalization of lines found no common cases.\n"; } @@ -2039,7 +2110,7 @@ void TCStatJobSummary::process_pair(TrackPairInfo &pair) { for(i=0; i&m) { mlog << Debug(5) << "Summary Map Insert (" << it->first << ") " - << it->second.Val.n_elements() << " values: " + << it->second.Val.n() << " values: " << it->second.Val.serialize() << "\n"; // Add the pair to the map @@ -2132,7 +2203,7 @@ void TCStatJobSummary::add_map(map&m) { mlog << Debug(5) << "Summary Map Add (" << it->first << ") " - << it->second.Val.n_elements() << " values: " + << it->second.Val.n() << " values: " << it->second.Val.serialize() << "\n"; // Add the value for the existing key @@ -2165,11 +2236,11 @@ void TCStatJobSummary::do_output(ostream &out) { // Setup the output table out_at.set_size((int) SummaryMap.size() + 1, - ByColumn.n_elements() + 24); + ByColumn.n() + 24); // Left-justify case info and right-justify summary output for(i=0; isecond.Val.n_elements(); i++) { + for(i=0; isecond.Val.n(); i++) { if(!is_bad_data(it->second.Val[i])) { v.add(it->second.Val[i]); init.add(it->second.Init[i]); @@ -2244,7 +2315,7 @@ void TCStatJobSummary::do_output(ostream &out) { // Build index array index.clear(); - for(i=0; isecond.Val.n_elements()); - out_at.set_entry(r, c++, v.n_elements()); + out_at.set_entry(r, c++, it->second.Val.n()); + out_at.set_entry(r, c++, v.n()); out_at.set_entry(r, c++, mean_ci.v); out_at.set_entry(r, c++, mean_ci.v_ncl[0]); out_at.set_entry(r, c++, mean_ci.v_ncu[0]); @@ -2352,11 +2423,11 @@ void TCStatJobSummary::compute_fsp(NumArray &total, NumArray &best, mlog << Debug(4) << "Computing frequency of superior performance for " - << Column.n_elements() << " columns and " - << case_list.n_elements() << " cases.\n"; + << Column.n() << " columns and " + << case_list.n() << " cases.\n"; // Loop over the columns being summarized - for(i=0; isecond.Hdr.n_elements(); k++) { + for(k=0; ksecond.Hdr.n(); k++) { // Check if entry matches the current case if(strncasecmp(Column[i].c_str(), it->first.c_str(), @@ -2498,9 +2569,9 @@ bool is_time_series(const TimeArray &init, const NumArray &lead, dsec = bad_data_int; // The arrays should all be of the same length > 1 - if(init.n_elements() != lead.n_elements() || - init.n_elements() != valid.n_elements() || - init.n_elements() < 2) { + if(init.n() != lead.n() || + init.n() != valid.n() || + init.n() < 2) { mlog << Debug(4) << "Skipping time-series computations since the array " << "lengths differ.\n"; @@ -2513,7 +2584,7 @@ bool is_time_series(const TimeArray &init, const NumArray &lead, dvalid = valid[1] - valid[0]; // Loop over the entries to determine the time spacing - for(i=0; i= mean); @@ -2609,8 +2680,8 @@ int compute_time_to_indep(const NumArray &val, int ds) { exp_runs = 1.0 + 2.0*(n_abv * n_bel)/(n_abv + n_bel); // Calculate effective sample size, time to independence - eff_size = val.n_elements()*(n_run_abv + n_run_bel)/exp_runs; - tind = ds*val.n_elements()/eff_size; + eff_size = val.n()*(n_run_abv + n_run_bel)/exp_runs; + tind = ds*val.n()/eff_size; return(nint(tind)); } @@ -2622,7 +2693,7 @@ StringArray intersection(const StringArray &s1, const StringArray &s2) { int i; // Add elements common to both list - for(i=0; isecond.Hdr.n_elements(); + r += it->second.Hdr.n(); } // Format the output table out_at.set_size(r + 1, - 9 + ByColumn.n_elements() + 15); - setup_table(out_at, 9 + ByColumn.n_elements(), get_precision()); + 9 + ByColumn.n() + 15); + setup_table(out_at, 9 + ByColumn.n(), get_precision()); // Initialize row and column indices r = c = 0; @@ -3339,7 +3410,7 @@ void TCStatJobRIRW::do_mpr_output(ostream &out) { out_at.set_entry(r, c++, "WINDOW_END"); // Write case column names - for(i=0; isecond.Hdr.n_elements(); i++,r++) { + for(i=0; isecond.Hdr.n(); i++,r++) { // Initialize column counter c = 0; @@ -3384,14 +3455,14 @@ void TCStatJobRIRW::do_mpr_output(ostream &out) { // Write case column values sa = it->first.split(":"); - for(j=1; jsecond.Hdr[i]; sa = cs.split(":"); - for(j=0; j 0) { if(!mask_poly.latlon_is_inside_dege(lat, lon)) { @@ -3967,7 +4038,7 @@ bool check_masks(const MaskPoly &mask_poly, const Grid &mask_grid, } // - // Check grid masking. + // Check grid masking // if(mask_grid.nx() > 0 || mask_grid.ny() > 0) { mask_grid.latlon_to_xy(lat, -1.0*lon, grid_x, grid_y); @@ -3977,7 +4048,7 @@ bool check_masks(const MaskPoly &mask_poly, const Grid &mask_grid, } // - // Check area mask. + // Check area mask // if(mask_area.nx() > 0 || mask_area.ny() > 0) { if(!mask_area.s_is_on(nint(grid_x), nint(grid_y))) { diff --git a/met/src/tools/tc_utils/tc_stat/tc_stat_job.h b/met/src/tools/tc_utils/tc_stat/tc_stat_job.h index f97d5ae581..4ad98be987 100644 --- a/met/src/tools/tc_utils/tc_stat/tc_stat_job.h +++ b/met/src/tools/tc_utils/tc_stat/tc_stat_job.h @@ -286,13 +286,15 @@ class TCStatJob { map ColumnThreshMap; // ASCII column string matching - map ColumnStrMap; + map ColumnStrIncMap; + map ColumnStrExcMap; // Numeric column thresholds map InitThreshMap; // ASCII column string matching - map InitStrMap; + map InitStrIncMap; + map InitStrExcMap; // Variables to the store the analysis job specification ConcatString DumpFile; // Dump TrackPairInfo used to a file diff --git a/test/config/TCStatConfig_ALAL2010 b/test/config/TCStatConfig_ALAL2010 index 714029745f..8f59d36f5b 100644 --- a/test/config/TCStatConfig_ALAL2010 +++ b/test/config/TCStatConfig_ALAL2010 @@ -112,6 +112,12 @@ column_thresh_val = []; column_str_name = []; column_str_val = []; +// +// Stratify by excluding strings in non-numeric data columns. +// +column_str_exc_name = []; +column_str_exc_val = []; + // // Similar to the column_thresh options above // @@ -124,6 +130,12 @@ init_thresh_val = []; init_str_name = []; init_str_val = []; +// +// Similar to the column_str_exc options above +// +init_str_exc_name = []; +init_str_exc_val = []; + // // Stratify by the ADECK and BDECK distances to land. // diff --git a/test/config/TCStatConfig_PROBRIRW b/test/config/TCStatConfig_PROBRIRW index 9843408d2a..bf443a9e66 100644 --- a/test/config/TCStatConfig_PROBRIRW +++ b/test/config/TCStatConfig_PROBRIRW @@ -112,6 +112,12 @@ column_thresh_val = []; column_str_name = []; column_str_val = []; +// +// Stratify by excluding strings in non-numeric data columns. +// +column_str_exc_name = []; +column_str_exc_val = []; + // // Similar to the column_thresh options above // @@ -124,6 +130,12 @@ init_thresh_val = []; init_str_name = []; init_str_val = []; +// +// Similar to the column_str_exc options above +// +init_str_exc_name = []; +init_str_exc_val = []; + // // Stratify by the ADECK and BDECK distances to land. // diff --git a/test/xml/unit_stat_analysis.xml b/test/xml/unit_stat_analysis.xml index 799011f8a6..1558d101fd 100644 --- a/test/xml/unit_stat_analysis.xml +++ b/test/xml/unit_stat_analysis.xml @@ -315,6 +315,21 @@
+ + &MET_BIN;/stat_analysis + \ + -lookin &OUTPUT_DIR;/point_stat/point_stat_GRIB1_NAM_GDAS_120000L_20120409_120000V.stat \ + -job filter -line_type MPR -fcst_var TMP -fcst_lev Z2 -vx_mask DTC165 \ + -column_str OBS_SID KDLN,KDHT,KDEN,KDLS,KDMA,KDMN,KDVT,KDEW \ + -column_str_exc OBS_SID KDLN,KDHT \ + -dump_row &OUTPUT_DIR;/stat_analysis/POINT_STAT_FILTER_OBS_SID.stat \ + -v 1 + + + &OUTPUT_DIR;/stat_analysis/POINT_STAT_FILTER_OBS_SID.stat + + + OUTPUT_DIR &OUTPUT_DIR;/stat_analysis diff --git a/test/xml/unit_tc_stat.xml b/test/xml/unit_tc_stat.xml index 8a7891a2fd..7353d72f68 100644 --- a/test/xml/unit_tc_stat.xml +++ b/test/xml/unit_tc_stat.xml @@ -15,7 +15,6 @@ &TEST_DIR; true - &MET_BIN;/tc_stat \ @@ -33,6 +32,20 @@ + + &MET_BIN;/tc_stat + \ + -lookin &OUTPUT_DIR;/tc_pairs/alal2010.tcst \ + -job filter -dump_row &OUTPUT_DIR;/tc_stat/ALAL2010_FILTER_STRINGS.tcst \ + -init_str LEVEL TS,HU -init_str_exc WATCH_WARN HUWARN \ + -column_str LEVEL HU -column_str_exc WATCH_WARN TSWATCH \ + -v 2 + + + &OUTPUT_DIR;/tc_stat/ALAL2010_FILTER_STRINGS.tcst + + + &MET_BIN;/tc_stat \ From 6055600ea746ff8f3317569cb925d5476c102b32 Mon Sep 17 00:00:00 2001 From: johnhg Date: Tue, 30 Mar 2021 16:55:05 -0600 Subject: [PATCH 84/86] Bugfix 1737 develop little_r (#1739) * Per #1737, migrate the same fix from main_v9.1 over to the develop branch. * Per #1737, add another unit test for running ascii2nc with corrupt littl_r records. --- .../tools/other/ascii2nc/little_r_handler.cc | 73 ++++++++++++------- test/xml/unit_ascii2nc.xml | 12 +++ 2 files changed, 60 insertions(+), 25 deletions(-) diff --git a/met/src/tools/other/ascii2nc/little_r_handler.cc b/met/src/tools/other/ascii2nc/little_r_handler.cc index b3d49787ea..a604a96355 100644 --- a/met/src/tools/other/ascii2nc/little_r_handler.cc +++ b/met/src/tools/other/ascii2nc/little_r_handler.cc @@ -66,6 +66,8 @@ static const string lr_grib_names[] = { // Little-R regular expression used to determine file type static const char *lr_rpt_reg_exp = "FM-[0-9]"; +static const char *lr_dtg_reg_exp = "[0-9]\\{14\\}"; + //////////////////////////////////////////////////////////////////////// @@ -137,46 +139,49 @@ bool LittleRHandler::_readObservations(LineDataFile &ascii_file) int n_data_hdr; StringArray mappedTypes; StringArray unmappedTypes; + bool is_bad_header = false; while (ascii_file.read_fwf_line(data_line, lr_rpt_wdth, n_lr_rpt_wdth)) { + // Check for expected header line if (!check_reg_exp(lr_rpt_reg_exp, data_line[4])) { mlog << Error << "\nLittleRHandler::_readObservations() -> " << "the fifth entry of the little_r report on line " - << data_line.line_number() << " does not match \"" + << data_line.line_number() + << " does not match the regular expression \"" << lr_rpt_reg_exp << "\":\n\"" << data_line[4] << "\"\n\n"; return false; } // Store the message type - ConcatString concat_string = (string)data_line[4]; - concat_string.ws_strip(); + ConcatString cs = (string)data_line[4]; + cs.ws_strip(); ConcatString hdr_typ; - if (_messageTypeMap[concat_string] != "") + if (_messageTypeMap.count(cs) > 0) { - hdr_typ = _messageTypeMap[concat_string]; - if (!mappedTypes.has(concat_string)) { + hdr_typ = _messageTypeMap[cs]; + if (!mappedTypes.has(cs)) { mlog << Debug(5) - << "Switching little_r report type \"" << concat_string + << "Switching little_r report type \"" << cs << "\" to message type \"" << hdr_typ << "\".\n"; - mappedTypes.add(concat_string); + mappedTypes.add(cs); } } else { - hdr_typ = concat_string; + hdr_typ = cs; hdr_typ.replace(" ", "_", false); - - if (!unmappedTypes.has(concat_string)) { - mlog << Warning << "\nLittleRHandler::_processObs() -> " + + if (!unmappedTypes.has(cs)) { + mlog << Warning << "\nLittleRHandler::_readObservations() -> " << "Storing message type as \"" << hdr_typ - << "\" for unexpected report type \"" << concat_string << "\".\n\n"; - unmappedTypes.add(concat_string); + << "\" for unexpected report type \"" << cs << "\".\n\n"; + unmappedTypes.add(cs); } } @@ -188,16 +193,29 @@ bool LittleRHandler::_readObservations(LineDataFile &ascii_file) // Store the valid time in YYYYMMDD_HHMMSS format - ConcatString hdr_vld_str; - - concat_string = data_line[17]; - concat_string.ws_strip(); - hdr_vld_str << cs_erase; - hdr_vld_str.format("%.8s_%.6s", - concat_string.text(), concat_string.text()+8); + time_t hdr_vld = 0; - time_t hdr_vld = _getValidTime(hdr_vld_str.text()); + if (check_reg_exp(lr_dtg_reg_exp, data_line[17])) + { + ConcatString hdr_vld_str; + cs = data_line[17]; + cs.ws_strip(); + hdr_vld_str << cs_erase; + hdr_vld_str.format("%.8s_%.6s", cs.text(), cs.text()+8); + hdr_vld = _getValidTime(hdr_vld_str.text()); + is_bad_header = false; + + } else + { + mlog << Warning << "\nLittleRHandler::_readObservations() -> " + << "the 18 entry of the little_r report on line " + << data_line.line_number() + << " does not match the timestring regular expression \"" + << lr_dtg_reg_exp << "\":\n\"" << data_line[17] << "\"\n\n"; + is_bad_header = true; + } + // Store the station location double hdr_lat = atof(data_line[0]); @@ -211,7 +229,8 @@ bool LittleRHandler::_readObservations(LineDataFile &ascii_file) // Observation of sea level pressure in pascals. - if (!is_eq(atof(data_line[18]), lr_missing_value)) + if (!is_eq(atof(data_line[18]), lr_missing_value) && + !is_bad_header) { ConcatString obs_qty = (is_eq(atof(data_line[19]), lr_missing_value) ? na_string : (string)data_line[19]); @@ -237,12 +256,16 @@ bool LittleRHandler::_readObservations(LineDataFile &ascii_file) int i_data = 0; while (ascii_file.read_fwf_line(data_line, lr_meas_wdth, n_lr_meas_wdth)) { + // Check for the end of report if (is_eq(atof(data_line[0]), lr_end_value) && - is_eq(atof(data_line[2]), lr_end_value)) + is_eq(atof(data_line[2]), lr_end_value)) break; + // Skip data lines if the header line is bad + if (is_bad_header) continue; + // Retrieve pressure and height double obs_prs = (is_eq(atof(data_line[0]), lr_missing_value) ? @@ -305,7 +328,7 @@ bool LittleRHandler::_readObservations(LineDataFile &ascii_file) if (n_data_hdr != i_data) { - mlog << Warning << "\nprocess_little_r_obs() -> " + mlog << Warning << "\nLittleRHandler::_readObservations() -> " << "the number of data lines specified in the header (" << n_data_hdr << ") does not match the number found in the data (" diff --git a/test/xml/unit_ascii2nc.xml b/test/xml/unit_ascii2nc.xml index fc1d1c4b86..2edb2bf9b7 100644 --- a/test/xml/unit_ascii2nc.xml +++ b/test/xml/unit_ascii2nc.xml @@ -76,6 +76,18 @@ + + &MET_BIN;/ascii2nc + \ + &DATA_DIR_OBS;/ascii/OBS:2015080700_bad_record \ + &OUTPUT_DIR;/ascii2nc/OBS:2015080700_bad_record.nc \ + -v 1 + + + &OUTPUT_DIR;/ascii2nc/OBS:2015080700_bad_record.nc + + + &MET_BIN;/ascii2nc \ From 804b1ac397ccc78bb19d39e855dbde27dcfaedc7 Mon Sep 17 00:00:00 2001 From: jprestop Date: Thu, 1 Apr 2021 12:14:29 -0600 Subject: [PATCH 85/86] Feature GitHub actions (#1742) * Adding files to build documenation via GitHub Actions * Removing html_theme_options * Removed warnings.log from help section --- .github/workflows/main.yml | 50 ++++++++++++++++++++++++++++++++++++++ met/docs/Makefile | 3 ++- met/docs/conf.py | 2 -- 3 files changed, 52 insertions(+), 3 deletions(-) create mode 100644 .github/workflows/main.yml diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 0000000000..d959c9e411 --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,50 @@ +name: MET CI/CD Workflow +on: + push: + branches: + - develop + - develop-ref + - feature_* + - main_* + - bugfix_* + pull_request: + types: [opened, reopened, synchronize] + +jobs: + documentation: + name: Build Documentation + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: '3.6' + - name: Install dependencies + run: | + python -m pip install --upgrade python-dateutil requests sphinx \ + sphinx-gallery Pillow sphinx_rtd_theme + - name: Build docs + continue-on-error: true + run: | + DOCS_DIR=${GITHUB_WORKSPACE}/met/docs + cd ${DOCS_DIR} + make clean html + cd ${GITHUB_WORKSPACE} + warning_file=${DOCS_DIR}/_build/warnings.log + mkdir -p artifact/documentation + cp -r ${DOCS_DIR}/_build/html/* artifact/documentation + if [ -s $warning_file ]; then + cp -r ${DOCS_DIR}/_build/warnings.log artifact/doc_warnings.log + cp artifact/doc_warnings.log artifact/documentation + else + rm ${warning_file} + fi + - uses: actions/upload-artifact@v2 + with: + name: documentation + path: artifact/documentation + - uses: actions/upload-artifact@v2 + with: + name: documentation_warnings.log + path: artifact/doc_warnings.log + if-no-files-found: ignore diff --git a/met/docs/Makefile b/met/docs/Makefile index 6d88c8d309..81e7849441 100644 --- a/met/docs/Makefile +++ b/met/docs/Makefile @@ -21,4 +21,5 @@ clean: # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + [ -d $(BUILDDIR) ] || mkdir -p $(BUILDDIR) + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -w "$(BUILDDIR)/warnings.log" diff --git a/met/docs/conf.py b/met/docs/conf.py index 13f65b1b9d..2a51afe4d7 100644 --- a/met/docs/conf.py +++ b/met/docs/conf.py @@ -53,8 +53,6 @@ html_theme = 'sphinx_rtd_theme' html_theme_path = ["_themes", ] html_js_files = ['pop_ver.js'] -html_theme_options = {'canonical_url': 'https://dtcenter.github.io/MET/latest/'} -html_theme_options['versions'] = {'latest': '../latest', 'develop': '../develop'} html_css_files = ['theme_override.css'] # Add any paths that contain custom static files (such as style sheets) here, From 853ad34b7f8f74bc8a73d2787f3a4da49c59ca71 Mon Sep 17 00:00:00 2001 From: johnhg Date: Thu, 1 Apr 2021 16:56:03 -0600 Subject: [PATCH 86/86] Feature 1575 large_diffs (#1741) * Per #1575, add mpr_column and mpr_thresh entries to all of the Grid-Stat and Point-Stat config files. * Per #1575, define config strings to be parsed from the config files. * Per #1575, store col_name_ptr and col_thresh_ptr in PairBase. They are being used for PairDataPoint to do MPR filtering in Grid-Stat and Point-Stat. But they could be eventually be extended to filter ORANK columns for Ensemble-Stat. * Per #1575, add MPR filtering logic to pair_data_point.cc. Include filtering logic in PairDataPoint instead of VxPairDataPoint since Grid-Stat uses PairDataPoint. * Per #1575, update point_stat to parse the mpr_column and mpr_thresh config file options. Include the MPR rejection reason code counts in the log output. * Per #1575, updated Grid-Stat to parse mpr_column and mpr_thresh options. * Per #1575, update Point-Stat to store mpr_sa and mpr_ta locally and then call set_mpr_filt() after the VxPairDataPoint object has been sized and allocated. * Per #1575, renamed PairDataEnsemble::subset_pairs() to subset_pairs_obs_thresh() to be a little more explicit about things. I'll do the same for PairDataPoint using names subset_pairs_cnt_thresh() and subset_pairs_mpr_thresh(). * Per #1575, some cleanup, moving check_fo_thresh() utility function from vx_config to vx_statistics library. * Per #1575, when implementing this for Grid-Stat, I realized that there isn't much benefit in storing col_name_ptr and col_name_thresh in PairBase. These changes remove that. * Per #1575, updating pair_data_point.h/.cc to handle the subsetting of data based on the MPR thresh. * Per #1575, rename subset_pairs() to subset_pairs_cnt_thresh() to be a bit more explicit with the naming conventions. * Per #1575, no real changes here. Just reorganizing the location of the mpr_sa and mpr_ta members. * Per #1575, make the subset_pairs() utility function a member function of the PairDataPoint class named subset_pairs_cnt_thresh() and update the application code to call it. * Per #1575, need to actually set the mpr_thresh! * Per #1575, update subset_pairs_mpr_thresh() to make sure the StringArray and ThreshArray lengths are the same. * Per #1575, replace PairDataPoint::subset_pairs_mpr_thresh() with a utility function named apply_mpr_thresh_mask(). This is for Grid-Stat to apply the mpr_thresh settings after the DataPlane pairs have been created but prior to applying any smoothing operations. * Per #1575, add documentation about mpr_column and mpr_thresh. * Per #1575, mpr_columns can also include CLIMO_CDF. * Per #1575, add tests for Grid-Stat and Point-Stat to exercise the mpr_column and mpr_thresh config file options. --- met/data/config/GridStatConfig_default | 2 + met/data/config/PointStatConfig_default | 2 + met/docs/Users_Guide/config_options.rst | 25 +- met/docs/Users_Guide/grid-stat.rst | 4 +- met/docs/Users_Guide/point-stat.rst | 4 +- met/scripts/config/GridStatConfig_APCP_12 | 3 +- met/scripts/config/GridStatConfig_APCP_24 | 3 + met/scripts/config/GridStatConfig_POP_12 | 2 + met/scripts/config/GridStatConfig_all | 2 + met/scripts/config/PointStatConfig | 2 + met/src/basic/vx_config/config_constants.h | 6 +- met/src/basic/vx_config/config_util.cc | 40 -- met/src/basic/vx_config/config_util.h | 4 - met/src/libcode/vx_statistics/met_stats.cc | 2 +- met/src/libcode/vx_statistics/pair_base.h | 2 + .../vx_statistics/pair_data_ensemble.cc | 2 +- .../vx_statistics/pair_data_ensemble.h | 2 +- .../libcode/vx_statistics/pair_data_point.cc | 344 +++++++++++++++--- .../libcode/vx_statistics/pair_data_point.h | 32 +- .../tools/core/ensemble_stat/ensemble_stat.cc | 4 +- met/src/tools/core/grid_stat/grid_stat.cc | 14 +- .../core/grid_stat/grid_stat_conf_info.cc | 12 +- .../core/grid_stat/grid_stat_conf_info.h | 3 + met/src/tools/core/point_stat/point_stat.cc | 8 +- .../core/point_stat/point_stat_conf_info.cc | 12 + .../core/point_stat/point_stat_conf_info.h | 3 + .../core/series_analysis/series_analysis.cc | 4 +- test/config/GridStatConfig_APCP_regrid | 2 + test/config/GridStatConfig_GRIB_lvl_typ_val | 2 + test/config/GridStatConfig_GRIB_set_attr | 2 + test/config/GridStatConfig_GTG_latlon | 2 + test/config/GridStatConfig_GTG_lc | 2 + test/config/GridStatConfig_apply_mask | 2 + test/config/GridStatConfig_climo_WMO | 2 + test/config/GridStatConfig_climo_prob | 2 + test/config/GridStatConfig_fourier | 2 + test/config/GridStatConfig_grid_weight | 2 + test/config/GridStatConfig_interp_shape | 2 + test/config/GridStatConfig_mpr_thresh | 274 ++++++++++++++ test/config/GridStatConfig_no_leap | 2 + test/config/GridStatConfig_prob_as_scalar | 2 + test/config/GridStatConfig_python | 2 + test/config/GridStatConfig_python_mixed | 2 + test/config/GridStatConfig_rtma | 2 + test/config/GridStatConfig_rtma_perc_thresh | 2 + test/config/GridStatConfig_st4 | 2 + test/config/GridStatConfig_st4_censor | 2 + test/config/PointStatConfig_APCP | 2 + test/config/PointStatConfig_APCP_HIRA | 2 + test/config/PointStatConfig_GTG_latlon | 2 + test/config/PointStatConfig_GTG_lc | 2 + test/config/PointStatConfig_INTERP_OPTS | 2 + test/config/PointStatConfig_LAND_TOPO_MASK | 2 + test/config/PointStatConfig_MASK_SID | 2 + test/config/PointStatConfig_PHYS | 2 + test/config/PointStatConfig_PHYS_pint | 2 + test/config/PointStatConfig_WINDS | 2 + test/config/PointStatConfig_aeronet | 2 + test/config/PointStatConfig_airnow | 2 + test/config/PointStatConfig_climo | 2 + test/config/PointStatConfig_climo_WMO | 2 + test/config/PointStatConfig_climo_prob | 2 + test/config/PointStatConfig_dup | 2 + test/config/PointStatConfig_mpr_thresh | 221 +++++++++++ test/config/PointStatConfig_obs_summary | 2 + test/config/PointStatConfig_obs_summary_all | 2 + test/config/PointStatConfig_prob | 2 + test/config/PointStatConfig_python | 2 + test/config/PointStatConfig_sid_inc_exc | 2 + test/config/ref_config/GridStatConfig_03h | 2 + test/config/ref_config/GridStatConfig_24h | 3 + test/config/ref_config/PointStatConfig_ADPUPA | 2 + test/config/ref_config/PointStatConfig_ONLYSF | 2 + test/config/ref_config/PointStatConfig_WINDS | 2 + test/xml/unit_grid_stat.xml | 27 ++ test/xml/unit_point_stat.xml | 27 ++ 76 files changed, 1065 insertions(+), 118 deletions(-) create mode 100644 test/config/GridStatConfig_mpr_thresh create mode 100644 test/config/PointStatConfig_mpr_thresh diff --git a/met/data/config/GridStatConfig_default b/met/data/config/GridStatConfig_default index acd3a71051..c32872783a 100644 --- a/met/data/config/GridStatConfig_default +++ b/met/data/config/GridStatConfig_default @@ -43,6 +43,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/met/data/config/PointStatConfig_default b/met/data/config/PointStatConfig_default index ae05370fbc..b0a4981c62 100644 --- a/met/data/config/PointStatConfig_default +++ b/met/data/config/PointStatConfig_default @@ -38,6 +38,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = [ NA ]; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/met/docs/Users_Guide/config_options.rst b/met/docs/Users_Guide/config_options.rst index e1f65cc7b4..1277564934 100644 --- a/met/docs/Users_Guide/config_options.rst +++ b/met/docs/Users_Guide/config_options.rst @@ -640,12 +640,12 @@ to be verified. This dictionary may include the following entries: metadata of any output files, but the user can set the "desc" entry accordingly. - Examples of user-defined conversion functions include: + Examples of user-defined data censoring operations include: .. code-block:: none censor_thresh = [ >12000 ]; - censor_val = [ 12000 ]; + censor_val = [ 12000 ]; * Several configuration options are provided to override and correct the metadata read from the input file. The supported options are listed @@ -678,6 +678,25 @@ to be verified. This dictionary may include the following entries: is_wind_direction = boolean; is_prob = boolean; + * The "mpr_column" and "mpr_thresh" entries are arrays of strings and + thresholds to specify which matched pairs should be included in the + statistics. These options apply to the Point-Stat and Grid-Stat tools. + They are parsed seperately for each "obs.field" array entry. + The "mpr_column" strings specify MPR column names ("FCST", "OBS", + "CLIMO_MEAN", "CLIMO_STDEV", or "CLIMO_CDF"), differences of columns + ("FCST-OBS"), or the absolute value of those differences ("ABS(FCST-OBS)"). + The number of "mpr_thresh" thresholds must match the number of "mpr_column" + entries, and the n-th threshold is applied to the n-th column. Any matched + pairs which do not meet any of the specified thresholds are excluded from + the analysis. For example, the following settings exclude matched pairs + where the observation value differs from the forecast or climatological + mean values by more than 10: + + .. code-block:: none + + mpr_column = [ "ABS(OBS-FCST)", "ABS(OBS-CLIMO_MEAN)" ]; + mpr_thresh = [ <=10, <=10 ]; + * The "cat_thresh" entry is an array of thresholds to be used when computing categorical statistics. @@ -981,6 +1000,8 @@ or obs = { censor_thresh = []; censor_val = []; + mpr_column = []; + mpr_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; wind_thresh = [ NA ]; diff --git a/met/docs/Users_Guide/grid-stat.rst b/met/docs/Users_Guide/grid-stat.rst index a3aac48240..ff5808df9c 100644 --- a/met/docs/Users_Guide/grid-stat.rst +++ b/met/docs/Users_Guide/grid-stat.rst @@ -221,13 +221,15 @@ __________________________ type = [ { method = NEAREST; width = 1; } ]; } censor_thresh = []; censor_val = []; + mpr_column = []; + mpr_thresh = []; eclv_points = 0.05; rank_corr_flag = TRUE; tmp_dir = "/tmp"; output_prefix = ""; version = "VN.N"; -The configuration options listed above are common to many MET tools and are described in :numref:`config_options`. +The configuration options listed above are common to multiple MET tools and are described in :numref:`config_options`. ___________________________ diff --git a/met/docs/Users_Guide/point-stat.rst b/met/docs/Users_Guide/point-stat.rst index 1ec6fdacb9..bfbbc53fba 100644 --- a/met/docs/Users_Guide/point-stat.rst +++ b/met/docs/Users_Guide/point-stat.rst @@ -334,6 +334,8 @@ ________________________ type = [ { method = NEAREST; width = 1; } ]; } censor_thresh = []; censor_val = []; + mpr_column = []; + mpr_thresh = []; eclv_points = 0.05; rank_corr_flag = TRUE; sid_inc = []; @@ -347,7 +349,7 @@ ________________________ output_prefix = ""; version = "VN.N"; -The configuration options listed above are common to many MET tools and are described in :numref:`config_options`. +The configuration options listed above are common to multiple MET tools and are described in :numref:`config_options`. _________________________ diff --git a/met/scripts/config/GridStatConfig_APCP_12 b/met/scripts/config/GridStatConfig_APCP_12 index 9ea99e03bc..f4308d3bca 100644 --- a/met/scripts/config/GridStatConfig_APCP_12 +++ b/met/scripts/config/GridStatConfig_APCP_12 @@ -42,6 +42,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; @@ -52,7 +54,6 @@ nc_pairs_var_name = ""; nc_pairs_var_suffix = ""; rank_corr_flag = FALSE; - // // Forecast and observation fields to be verified // diff --git a/met/scripts/config/GridStatConfig_APCP_24 b/met/scripts/config/GridStatConfig_APCP_24 index 9d73b6b21c..af2cf4f003 100644 --- a/met/scripts/config/GridStatConfig_APCP_24 +++ b/met/scripts/config/GridStatConfig_APCP_24 @@ -42,6 +42,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; @@ -51,6 +53,7 @@ eclv_points = 0.05; nc_pairs_var_name = ""; nc_pairs_var_suffix = ""; rank_corr_flag = FALSE; + // // Forecast and observation fields to be verified // diff --git a/met/scripts/config/GridStatConfig_POP_12 b/met/scripts/config/GridStatConfig_POP_12 index faff0be7e3..c46f639b94 100644 --- a/met/scripts/config/GridStatConfig_POP_12 +++ b/met/scripts/config/GridStatConfig_POP_12 @@ -42,6 +42,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/met/scripts/config/GridStatConfig_all b/met/scripts/config/GridStatConfig_all index 1050ab5920..9360adca0a 100644 --- a/met/scripts/config/GridStatConfig_all +++ b/met/scripts/config/GridStatConfig_all @@ -42,6 +42,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/met/scripts/config/PointStatConfig b/met/scripts/config/PointStatConfig index fe007f5f9e..159e9ae1b8 100644 --- a/met/scripts/config/PointStatConfig +++ b/met/scripts/config/PointStatConfig @@ -33,6 +33,8 @@ regrid = { censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; wind_thresh = [ NA ]; diff --git a/met/src/basic/vx_config/config_constants.h b/met/src/basic/vx_config/config_constants.h index e63a6935f0..bc82d55625 100644 --- a/met/src/basic/vx_config/config_constants.h +++ b/met/src/basic/vx_config/config_constants.h @@ -380,7 +380,7 @@ struct MaskLatLon { // enum DuplicateType { - DuplicateType_None, // Apply no logic for duplicate point obs + DuplicateType_None, // Apply no logic for duplicate point obs DuplicateType_Unique // Filter out duplicate observation values }; @@ -394,7 +394,7 @@ enum ObsSummary { ObsSummary_None, // Keep all observations, no statistics ObsSummary_Nearest, // Keep only the observation closest in time ObsSummary_Min, // Keep only smallest value - ObsSummary_Max, // Keep only largest valueXS + ObsSummary_Max, // Keep only largest value ObsSummary_UW_Mean, // Calculate un-weighted mean ObsSummary_DW_Mean, // Calculate time weighted mean ObsSummary_Median, // Calculate median @@ -536,6 +536,8 @@ static const char conf_key_obs_qty[] = "obs_quality"; static const char conf_key_convert[] = "convert"; static const char conf_key_censor_thresh[] = "censor_thresh"; static const char conf_key_censor_val[] = "censor_val"; +static const char conf_key_mpr_column[] = "mpr_column"; +static const char conf_key_mpr_thresh[] = "mpr_thresh"; static const char conf_key_cnt_thresh[] = "cnt_thresh"; static const char conf_key_cnt_logic[] = "cnt_logic"; static const char conf_key_cat_thresh[] = "cat_thresh"; diff --git a/met/src/basic/vx_config/config_util.cc b/met/src/basic/vx_config/config_util.cc index 349eb06081..6eb13ef7a5 100644 --- a/met/src/basic/vx_config/config_util.cc +++ b/met/src/basic/vx_config/config_util.cc @@ -2285,46 +2285,6 @@ void check_mctc_thresh(const ThreshArray &ta) { /////////////////////////////////////////////////////////////////////////////// -bool check_fo_thresh(const double f, const double o, - const double cmn, const double csd, - const SingleThresh &ft, const SingleThresh &ot, - const SetLogic type) { - bool status = true; - bool fcheck = ft.check(f, cmn, csd); - bool ocheck = ot.check(o, cmn, csd); - SetLogic t = type; - - // If either of the thresholds is NA, reset the logic to intersection - // because an NA threshold is always true. - if(ft.get_type() == thresh_na || ot.get_type() == thresh_na) { - t = SetLogic_Intersection; - } - - switch(t) { - case(SetLogic_Union): - if(!fcheck && !ocheck) status = false; - break; - - case(SetLogic_Intersection): - if(!fcheck || !ocheck) status = false; - break; - - case(SetLogic_SymDiff): - if(fcheck == ocheck) status = false; - break; - - default: - mlog << Error << "\ncheck_fo_thresh() -> " - << "Unexpected SetLogic value of " << type << ".\n\n"; - exit(1); - break; - } - - return(status); -} - -/////////////////////////////////////////////////////////////////////////////// - const char * statlinetype_to_string(const STATLineType t) { const char *s = (const char *) 0; diff --git a/met/src/basic/vx_config/config_util.h b/met/src/basic/vx_config/config_util.h index d09a154c4c..ced39afb2d 100644 --- a/met/src/basic/vx_config/config_util.h +++ b/met/src/basic/vx_config/config_util.h @@ -80,10 +80,6 @@ extern void check_climo_n_vx(Dictionary *dict, const int); extern InterpMthd int_to_interpmthd(int); extern void check_mctc_thresh(const ThreshArray &); -extern bool check_fo_thresh(const double, const double, const double, const double, - const SingleThresh &, const SingleThresh &, - const SetLogic); - extern const char * statlinetype_to_string(const STATLineType); extern void statlinetype_to_string(const STATLineType, char *); extern STATLineType string_to_statlinetype(const char *); diff --git a/met/src/libcode/vx_statistics/met_stats.cc b/met/src/libcode/vx_statistics/met_stats.cc index e4074a79a6..27904462d1 100644 --- a/met/src/libcode/vx_statistics/met_stats.cc +++ b/met/src/libcode/vx_statistics/met_stats.cc @@ -1207,7 +1207,7 @@ void SL1L2Info::set(const PairDataPoint &pd_all) { zero_out(); // Apply continuous filtering thresholds to subset pairs - pd = subset_pairs(pd_all, fthresh, othresh, logic); + pd = pd_all.subset_pairs_cnt_thresh(fthresh, othresh, logic); // Check for no matched pairs to process if(pd.n_obs == 0) return; diff --git a/met/src/libcode/vx_statistics/pair_base.h b/met/src/libcode/vx_statistics/pair_base.h index 0c1bec3bd0..db7b63297f 100644 --- a/met/src/libcode/vx_statistics/pair_base.h +++ b/met/src/libcode/vx_statistics/pair_base.h @@ -71,6 +71,8 @@ class PairBase { MaskLatLon *mask_llpnt_ptr; // Pointer to Lat/Lon thresholds // which is not allocated + ////////////////////////////////////////////////////////////////// + ConcatString msg_typ; // Name of the verifying message type StringArray msg_typ_vals; // Message type values to be included diff --git a/met/src/libcode/vx_statistics/pair_data_ensemble.cc b/met/src/libcode/vx_statistics/pair_data_ensemble.cc index bc27df220c..5f21f17fe9 100644 --- a/met/src/libcode/vx_statistics/pair_data_ensemble.cc +++ b/met/src/libcode/vx_statistics/pair_data_ensemble.cc @@ -758,7 +758,7 @@ void PairDataEnsemble::compute_ssvar() { // //////////////////////////////////////////////////////////////////////// -PairDataEnsemble PairDataEnsemble::subset_pairs(const SingleThresh &ot) const { +PairDataEnsemble PairDataEnsemble::subset_pairs_obs_thresh(const SingleThresh &ot) const { // Check for no work to be done if(ot.get_type() == thresh_na) return(*this); diff --git a/met/src/libcode/vx_statistics/pair_data_ensemble.h b/met/src/libcode/vx_statistics/pair_data_ensemble.h index 5bfd4d4300..3d71af984b 100644 --- a/met/src/libcode/vx_statistics/pair_data_ensemble.h +++ b/met/src/libcode/vx_statistics/pair_data_ensemble.h @@ -142,7 +142,7 @@ class PairDataEnsemble : public PairBase { void compute_phist(); void compute_ssvar(); - PairDataEnsemble subset_pairs(const SingleThresh &ot) const; + PairDataEnsemble subset_pairs_obs_thresh(const SingleThresh &ot) const; }; //////////////////////////////////////////////////////////////////////// diff --git a/met/src/libcode/vx_statistics/pair_data_point.cc b/met/src/libcode/vx_statistics/pair_data_point.cc index d728b242cb..0ca19804dd 100644 --- a/met/src/libcode/vx_statistics/pair_data_point.cc +++ b/met/src/libcode/vx_statistics/pair_data_point.cc @@ -174,8 +174,7 @@ void PairDataPoint::set_point_pair(int i_obs, const char *sid, if(i_obs < 0 || i_obs >= n_obs) { mlog << Error << "\nPairDataPoint::set_point_pair() -> " << "range check error: " << i_obs << " not in (0, " - << n_obs << ").\n\n" - ; + << n_obs << ").\n\n"; exit(1); } @@ -232,6 +231,67 @@ bool PairDataPoint::add_grid_pair(const NumArray &f_in, const NumArray &o_in, return(true); } +//////////////////////////////////////////////////////////////////////// + +PairDataPoint PairDataPoint::subset_pairs_cnt_thresh( + const SingleThresh &ft, const SingleThresh &ot, + const SetLogic type) const { + + // Check for no work to be done + if(ft.get_type() == thresh_na && ot.get_type() == thresh_na) { + return(*this); + } + + int i; + PairDataPoint out_pd; + + // Allocate memory for output pairs + out_pd.extend(n_obs); + out_pd.set_climo_cdf_info(cdf_info); + + bool cmn_flag = set_climo_flag(f_na, cmn_na); + bool csd_flag = set_climo_flag(f_na, csd_na); + bool wgt_flag = set_climo_flag(f_na, wgt_na); + + // Loop over the pairs + for(i=0; i " + << "the \"" << conf_key_mpr_column << "\" (" + << write_css(sa) << ") and \"" << conf_key_mpr_thresh + << "\" (" << write_css(ta) + << ") config file entries must have the same length!\n\n"; + exit(1); + } + + mpr_column = sa; + mpr_thresh = ta; + + return; +} + +//////////////////////////////////////////////////////////////////////// + void VxPairDataPoint::set_climo_cdf_info(const ClimoCDFInfo &info) { for(int i=0; imagic_str() << " versus " + << obs_info->magic_str() + << ", skipping observation due to matched pair filter since " + << reason_cs << ":\n" + << point_obs_to_string(hdr_arr, hdr_typ_str, hdr_sid_str, + hdr_ut, obs_qty, obs_arr, var_name) + << "\n"; + inc_count(rej_mpr, i, j, k); + continue; + } + // Compute weight for current point wgt_v = (wgt_dp == (DataPlane *) 0 ? default_grid_weight : wgt_dp->get(x, y)); @@ -1336,64 +1445,199 @@ void VxPairDataPoint::inc_count(int ***&rej, int i, int j, int k) { // //////////////////////////////////////////////////////////////////////// -PairDataPoint subset_pairs(const PairDataPoint &pd, - const SingleThresh &ft, const SingleThresh &ot, - const SetLogic type) { +bool check_fo_thresh(double f, double o, double cmn, double csd, + const SingleThresh &ft, const SingleThresh &ot, + const SetLogic type) { + bool status = true; + bool fcheck = ft.check(f, cmn, csd); + bool ocheck = ot.check(o, cmn, csd); + SetLogic t = type; - // Check for no work to be done - if(ft.get_type() == thresh_na && ot.get_type() == thresh_na) { - return(pd); + // If either of the thresholds is NA, reset the logic to intersection + // because an NA threshold is always true. + if(ft.get_type() == thresh_na || ot.get_type() == thresh_na) { + t = SetLogic_Intersection; } - int i; - PairDataPoint out_pd; + switch(t) { + case(SetLogic_Union): + if(!fcheck && !ocheck) status = false; + break; - // Allocate memory for output pairs - out_pd.extend(pd.n_obs); - out_pd.set_climo_cdf_info(pd.cdf_info); + case(SetLogic_Intersection): + if(!fcheck || !ocheck) status = false; + break; - bool cmn_flag = set_climo_flag(pd.f_na, pd.cmn_na); - bool csd_flag = set_climo_flag(pd.f_na, pd.csd_na); - bool wgt_flag = set_climo_flag(pd.f_na, pd.wgt_na); + case(SetLogic_SymDiff): + if(fcheck == ocheck) status = false; + break; - // Loop over the pairs - for(i=0; i " + << "Unexpected SetLogic value of " << type << ".\n\n"; + exit(1); + break; + } - // Check for bad data - if(is_bad_data(pd.f_na[i]) || - is_bad_data(pd.o_na[i]) || - (cmn_flag && is_bad_data(pd.cmn_na[i])) || - (csd_flag && is_bad_data(pd.csd_na[i])) || - (wgt_flag && is_bad_data(pd.wgt_na[i]))) continue; + return(status); +} - // Keep pairs which meet the threshold criteria - if(check_fo_thresh(pd.f_na[i], pd.o_na[i], - pd.cmn_na[i], pd.csd_na[i], - ft, ot, type)) { +//////////////////////////////////////////////////////////////////////// - // Handle point data - if(pd.is_point_vx()) { - out_pd.add_point_pair(pd.sid_sa[i].c_str(), pd.lat_na[i], - pd.lon_na[i], pd.x_na[i], pd.y_na[i], - pd.vld_ta[i], pd.lvl_na[i], pd.elv_na[i], - pd.f_na[i], pd.o_na[i], pd.o_qc_sa[i].c_str(), - pd.cmn_na[i], pd.csd_na[i], pd.wgt_na[i]); - } - // Handle gridded data - else { - out_pd.add_grid_pair(pd.f_na[i], pd.o_na[i], pd.cmn_na[i], - pd.csd_na[i], pd.wgt_na[i]); +bool check_mpr_thresh(double f, double o, double cmn, double csd, + const StringArray &col_sa, const ThreshArray &col_ta, + ConcatString *reason_ptr) { + // Initialize + if(reason_ptr) reason_ptr->erase(); + + // Check arrays + if(col_sa.n() == 0 || col_ta.n() == 0) return(true); + + bool keep = true; + bool absv = false; + StringArray sa; + ConcatString cs; + double v, v_cur; + int i, j; + + // Loop over all the column filter names + for(i=0; i 1) { + + // Loop through the columns + for(j=1; j " + << "unsupported matched pair column name requested in \"" + << conf_key_mpr_column << "\" (" << s << ")!\n\n"; + exit(1); + } + + return(v); +} + +//////////////////////////////////////////////////////////////////////// + +void apply_mpr_thresh_mask(DataPlane &fcst_dp, DataPlane &obs_dp, + DataPlane &cmn_dp, DataPlane &csd_dp, + const StringArray &col_sa, const ThreshArray &col_ta) { + + // Check for no work to be done + if(col_sa.n() == 0 && col_ta.n() == 0) return; + + // Check for constant length + if(col_sa.n() != col_ta.n()) { + mlog << Error << "\napply_mpr_thresh_mask() -> " + << "the \"" << conf_key_mpr_column << "\" (" + << write_css(col_sa) << ") and \"" << conf_key_mpr_thresh + << "\" (" << write_css(col_ta) + << ") config file entries must have the same length!\n\n"; + exit(1); + } + + int nxy = fcst_dp.nx() * fcst_dp.ny(); + int n_skip = 0; + bool cmn_flag = !(cmn_dp.is_empty()); + bool csd_flag = !(csd_dp.is_empty()); + + // Loop over the pairs + for(int i=0; isubset_pairs(conf_info.vx_opt[i].othr_ta[m]); + pd = pd_ptr->subset_pairs_obs_thresh(conf_info.vx_opt[i].othr_ta[m]); // Continue if there are no points if(pd.n_obs == 0) continue; @@ -1779,7 +1779,7 @@ void process_grid_vx() { shc.set_obs_thresh(conf_info.vx_opt[i].othr_ta[l]); // Subset pairs using the current obs_thresh - pd = pd_all.subset_pairs(conf_info.vx_opt[i].othr_ta[l]); + pd = pd_all.subset_pairs_obs_thresh(conf_info.vx_opt[i].othr_ta[l]); // Continue if there are no points if(pd.n_obs == 0) continue; diff --git a/met/src/tools/core/grid_stat/grid_stat.cc b/met/src/tools/core/grid_stat/grid_stat.cc index 14b09c6d5a..cf30bd282f 100644 --- a/met/src/tools/core/grid_stat/grid_stat.cc +++ b/met/src/tools/core/grid_stat/grid_stat.cc @@ -106,6 +106,8 @@ // continuous and probabilistic statistics. // 050 03/02/20 Halley Gotway Add nc_pairs_var_name and rename // nc_pairs_var_str to nc_pairs_var_suffix. +// 051 03/28/21 Halley Gotway Add mpr_column and mpr_thresh +// filtering options. // //////////////////////////////////////////////////////////////////////// @@ -711,6 +713,13 @@ void process_scores() { << " climatology standard deviation field(s) for forecast " << conf_info.vx_opt[i].fcst_info->magic_str() << ".\n"; + // Apply MPR threshold filters + if(conf_info.vx_opt[i].mpr_sa.n() > 0) { + apply_mpr_thresh_mask(fcst_dp, obs_dp, cmn_dp, csd_dp, + conf_info.vx_opt[i].mpr_sa, + conf_info.vx_opt[i].mpr_ta); + } + // Setup the first pass through the data if(is_first_pass) setup_first_pass(fcst_dp); @@ -1961,8 +1970,9 @@ void do_cnt_sl1l2(const GridStatVxOpt &vx_opt, const PairDataPoint *pd_ptr) { for(i=0; isubset_pairs_cnt_thresh(vx_opt.fcnt_ta[i], + vx_opt.ocnt_ta[i], + vx_opt.cnt_logic); // Check for no matched pairs to process if(pd_thr.n_obs == 0) continue; diff --git a/met/src/tools/core/grid_stat/grid_stat_conf_info.cc b/met/src/tools/core/grid_stat/grid_stat_conf_info.cc index a5702ca06f..8b2b572eca 100644 --- a/met/src/tools/core/grid_stat/grid_stat_conf_info.cc +++ b/met/src/tools/core/grid_stat/grid_stat_conf_info.cc @@ -275,7 +275,7 @@ void GridStatConfInfo::process_flags() { // Check for at least one output data type if(!output_ascii_flag && !output_nc_flag) { - mlog << Error << "\nGridStatVxOpt::process_config() -> " + mlog << Error << "\nGridStatConfInfo::process_flags() -> " << "At least one output STAT or NetCDF type must be " << " requested in \"" << conf_key_output_flag << "\" or \"" << conf_key_nc_pairs_flag << "\".\n\n"; @@ -495,6 +495,9 @@ void GridStatVxOpt::clear() { var_name.clear(); var_suffix.clear(); + mpr_sa.clear(); + mpr_ta.clear(); + fcat_ta.clear(); ocat_ta.clear(); @@ -614,6 +617,10 @@ void GridStatVxOpt::process_config( // Populate the output_flag array with map values for(i=0; i= 5) { mlog << Debug(5) << "Parsed thresholds:\n" + << "Matched pair filter columns: " << write_css(mpr_sa) << "\n" + << "Matched pair filter thresholds: " << mpr_ta.get_str() << "\n" << "Forecast categorical thresholds: " << fcat_ta.get_str() << "\n" << "Observed categorical thresholds: " << ocat_ta.get_str() << "\n" << "Forecast continuous thresholds: " << fcnt_ta.get_str() << "\n" @@ -875,6 +884,7 @@ bool GridStatVxOpt::is_uv_match(const GridStatVxOpt &v) const { // // The following do not impact matched pairs: // desc, var_name, var_suffix, + // mpr_sa, mpr_ta, // fcat_ta, ocat_ta, // fcnt_ta, ocnt_ta, cnt_logic, // fwind_ta, owind_ta, wind_logic, diff --git a/met/src/tools/core/grid_stat/grid_stat_conf_info.h b/met/src/tools/core/grid_stat/grid_stat_conf_info.h index 3b92c5be06..e03b657488 100644 --- a/met/src/tools/core/grid_stat/grid_stat_conf_info.h +++ b/met/src/tools/core/grid_stat/grid_stat_conf_info.h @@ -145,6 +145,9 @@ class GridStatVxOpt { ConcatString var_suffix; // nc_pairs_var_suffix string // nc_pairs_var_str is deprecated + StringArray mpr_sa; // MPR filtering columns + ThreshArray mpr_ta; // MPR filtering thresholds + ThreshArray fcat_ta; // fcst categorical thresholds ThreshArray ocat_ta; // obs categorical thresholds diff --git a/met/src/tools/core/point_stat/point_stat.cc b/met/src/tools/core/point_stat/point_stat.cc index d1edfdb935..ba28097e6a 100644 --- a/met/src/tools/core/point_stat/point_stat.cc +++ b/met/src/tools/core/point_stat/point_stat.cc @@ -92,6 +92,8 @@ // 043 11/15/19 Halley Gotway Apply climatology bins to // continuous and probabilistic statistics. // 044 01/24/20 Halley Gotway Add HiRA RPS output. +// 045 03/28/21 Halley Gotway Add mpr_column and mpr_thresh +// filtering options. // //////////////////////////////////////////////////////////////////////// @@ -1016,6 +1018,7 @@ void process_scores() { << "Rejected: bad fcst value = " << conf_info.vx_opt[i].vx_pd.rej_fcst[j][k][l] << "\n" << "Rejected: bad climo mean = " << conf_info.vx_opt[i].vx_pd.rej_cmn[j][k][l] << "\n" << "Rejected: bad climo stdev = " << conf_info.vx_opt[i].vx_pd.rej_csd[j][k][l] << "\n" + << "Rejected: mpr filter = " << conf_info.vx_opt[i].vx_pd.rej_mpr[j][k][l] << "\n" << "Rejected: duplicates = " << conf_info.vx_opt[i].vx_pd.rej_dup[j][k][l] << "\n"; // Print report based on the number of matched pairs @@ -1421,8 +1424,9 @@ void do_cnt_sl1l2(const PointStatVxOpt &vx_opt, const PairDataPoint *pd_ptr) { for(i=0; isubset_pairs_cnt_thresh(vx_opt.fcnt_ta[i], + vx_opt.ocnt_ta[i], + vx_opt.cnt_logic); // Check for no matched pairs to process if(pd_thr.n_obs == 0) continue; diff --git a/met/src/tools/core/point_stat/point_stat_conf_info.cc b/met/src/tools/core/point_stat/point_stat_conf_info.cc index ecd6b8b3dc..5a039e1bae 100644 --- a/met/src/tools/core/point_stat/point_stat_conf_info.cc +++ b/met/src/tools/core/point_stat/point_stat_conf_info.cc @@ -606,6 +606,9 @@ void PointStatVxOpt::clear() { mask_sid.clear(); mask_llpnt.clear(); + mpr_sa.clear(); + mpr_ta.clear(); + mask_name.clear(); eclv_points.clear(); @@ -774,10 +777,16 @@ void PointStatVxOpt::process_config(GrdFileType ftype, int_to_setlogic(fdict.lookup_int(conf_key_wind_logic)), int_to_setlogic(odict.lookup_int(conf_key_wind_logic))); + // Conf: mpr_column and mpr_thresh + mpr_sa = odict.lookup_string_array(conf_key_mpr_column); + mpr_ta = odict.lookup_thresh_array(conf_key_mpr_thresh); + // Dump the contents of the current thresholds if(mlog.verbosity_level() >= 5) { mlog << Debug(5) << "Parsed thresholds:\n" + << "Matched pair filter columns: " << write_css(mpr_sa) << "\n" + << "Matched pair filter thresholds: " << mpr_ta.get_str() << "\n" << "Forecast categorical thresholds: " << fcat_ta.get_str() << "\n" << "Observed categorical thresholds: " << ocat_ta.get_str() << "\n" << "Forecast continuous thresholds: " << fcnt_ta.get_str() << "\n" @@ -932,6 +941,9 @@ void PointStatVxOpt::set_vx_pd(PointStatConfInfo *conf_info) { // Define the dimensions vx_pd.set_pd_size(n_msg_typ, n_mask, n_interp); + // Store the MPR filter threshold + vx_pd.set_mpr_thresh(mpr_sa, mpr_ta); + // Store the climo CDF info vx_pd.set_climo_cdf_info(cdf_info); diff --git a/met/src/tools/core/point_stat/point_stat_conf_info.h b/met/src/tools/core/point_stat/point_stat_conf_info.h index d849dcc113..dd1d787dfa 100644 --- a/met/src/tools/core/point_stat/point_stat_conf_info.h +++ b/met/src/tools/core/point_stat/point_stat_conf_info.h @@ -123,6 +123,9 @@ class PointStatVxOpt { StringArray mask_poly; // Masking polyline strings StringArray mask_sid; // Masking station ID's + StringArray mpr_sa; // MPR column names + ThreshArray mpr_ta; // MPR column thresholds + // Vector of MaskLatLon objects defining Lat/Lon Point masks vector mask_llpnt; diff --git a/met/src/tools/core/series_analysis/series_analysis.cc b/met/src/tools/core/series_analysis/series_analysis.cc index 5a17f62ff7..acc347aff0 100644 --- a/met/src/tools/core/series_analysis/series_analysis.cc +++ b/met/src/tools/core/series_analysis/series_analysis.cc @@ -1008,8 +1008,8 @@ void do_cnt(int n, const PairDataPoint *pd_ptr) { } // Apply continuous filtering thresholds to subset pairs - pd = subset_pairs(*pd_ptr, cnt_info.fthresh, cnt_info.othresh, - cnt_info.logic); + pd = pd_ptr->subset_pairs_cnt_thresh(cnt_info.fthresh, cnt_info.othresh, + cnt_info.logic); // Check for no matched pairs to process if(pd.n_obs == 0) continue; diff --git a/test/config/GridStatConfig_APCP_regrid b/test/config/GridStatConfig_APCP_regrid index 445ff414e9..7696febce9 100644 --- a/test/config/GridStatConfig_APCP_regrid +++ b/test/config/GridStatConfig_APCP_regrid @@ -42,6 +42,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/GridStatConfig_GRIB_lvl_typ_val b/test/config/GridStatConfig_GRIB_lvl_typ_val index ceadb05264..5e0f64d6a7 100644 --- a/test/config/GridStatConfig_GRIB_lvl_typ_val +++ b/test/config/GridStatConfig_GRIB_lvl_typ_val @@ -42,6 +42,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/GridStatConfig_GRIB_set_attr b/test/config/GridStatConfig_GRIB_set_attr index 88703198c1..d1d5dbc30d 100644 --- a/test/config/GridStatConfig_GRIB_set_attr +++ b/test/config/GridStatConfig_GRIB_set_attr @@ -42,6 +42,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/GridStatConfig_GTG_latlon b/test/config/GridStatConfig_GTG_latlon index 86419f863c..648863688e 100644 --- a/test/config/GridStatConfig_GTG_latlon +++ b/test/config/GridStatConfig_GTG_latlon @@ -42,6 +42,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/GridStatConfig_GTG_lc b/test/config/GridStatConfig_GTG_lc index 290756e91e..846f5a2e6e 100644 --- a/test/config/GridStatConfig_GTG_lc +++ b/test/config/GridStatConfig_GTG_lc @@ -42,6 +42,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/GridStatConfig_apply_mask b/test/config/GridStatConfig_apply_mask index fef335f064..1bb34bb9f1 100644 --- a/test/config/GridStatConfig_apply_mask +++ b/test/config/GridStatConfig_apply_mask @@ -42,6 +42,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/GridStatConfig_climo_WMO b/test/config/GridStatConfig_climo_WMO index 6974d71937..a9f4c120cf 100644 --- a/test/config/GridStatConfig_climo_WMO +++ b/test/config/GridStatConfig_climo_WMO @@ -42,6 +42,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/GridStatConfig_climo_prob b/test/config/GridStatConfig_climo_prob index 4d652daa12..7b91e8da0f 100644 --- a/test/config/GridStatConfig_climo_prob +++ b/test/config/GridStatConfig_climo_prob @@ -42,6 +42,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/GridStatConfig_fourier b/test/config/GridStatConfig_fourier index e978c99f8c..a441acd51f 100644 --- a/test/config/GridStatConfig_fourier +++ b/test/config/GridStatConfig_fourier @@ -42,6 +42,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/GridStatConfig_grid_weight b/test/config/GridStatConfig_grid_weight index c5cf23cef6..5ea4b6df87 100644 --- a/test/config/GridStatConfig_grid_weight +++ b/test/config/GridStatConfig_grid_weight @@ -41,6 +41,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/GridStatConfig_interp_shape b/test/config/GridStatConfig_interp_shape index cc212d77f8..af303ec165 100644 --- a/test/config/GridStatConfig_interp_shape +++ b/test/config/GridStatConfig_interp_shape @@ -42,6 +42,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/GridStatConfig_mpr_thresh b/test/config/GridStatConfig_mpr_thresh new file mode 100644 index 0000000000..bd28d883f2 --- /dev/null +++ b/test/config/GridStatConfig_mpr_thresh @@ -0,0 +1,274 @@ +//////////////////////////////////////////////////////////////////////////////// +// +// Grid-Stat configuration file. +// +// For additional information, please see the MET User's Guide. +// +//////////////////////////////////////////////////////////////////////////////// + +// +// Output model name to be written +// +model = "GFS"; + +// +// Output description to be written +// May be set separately in each "obs.field" entry +// +desc = "NA"; + +// +// Output observation type to be written +// +obtype = "GFSANL"; + +//////////////////////////////////////////////////////////////////////////////// + +// +// Verification grid +// +regrid = { + to_grid = NONE; + method = NEAREST; + width = 1; + vld_thresh = 0.5; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// May be set separately in each "field" entry +// +censor_thresh = []; +censor_val = []; +mpr_column = []; +mpr_thresh = []; +cat_thresh = []; +cnt_thresh = [ NA ]; +cnt_logic = UNION; +wind_thresh = [ NA ]; +wind_logic = UNION; +eclv_points = 0.05; +nc_pairs_var_name = ""; +nc_pairs_var_suffix = ""; +rank_corr_flag = FALSE; + +// +// Forecast and observation fields to be verified +// +fcst = { + + name = "TMP"; + level = "Z2"; + + field = [ + { + desc = "NO_MPR_THRESH"; + nc_pairs_var_suffix = desc; + }, + { + mpr_column = [ "OBS-FCST" ]; + mpr_thresh = [ >=-5&&<=5 ]; + desc = "OBS_FCST_DIFF"; + nc_pairs_var_suffix = desc; + }, + { + mpr_column = [ "ABS(OBS-FCST)" ]; + mpr_thresh = [ <=5 ]; + desc = "ABS_OBS_FCST_DIFF"; + nc_pairs_var_suffix = desc; + }, + { + mpr_column = [ "ABS(OBS-CLIMO_MEAN)" ]; + mpr_thresh = [ <=5 ]; + desc = "ABS_OBS_CLIMO_MEAN_DIFF"; + nc_pairs_var_suffix = desc; + }, + { + mpr_column = [ "CLIMO_CDF" ]; + mpr_thresh = [ >=0.25&&<=0.75 ]; + desc = "CLIMO_CDF_IQR"; + nc_pairs_var_suffix = desc; + } + ]; +} +obs = fcst; + +//////////////////////////////////////////////////////////////////////////////// + +// +// Climatology mean data +// +climo_mean = fcst; +climo_mean = { + + file_name = [ ${CLIMO_MEAN_FILE_LIST} ]; + + regrid = { + method = BILIN; + width = 2; + vld_thresh = 0.5; + } + + time_interp_method = DW_MEAN; + day_interval = ${DAY_INTERVAL}; + hour_interval = ${HOUR_INTERVAL}; +} + +climo_stdev = climo_mean; +climo_stdev = { + file_name = [ ${CLIMO_STDEV_FILE_LIST} ]; +} + +// +// May be set separately in each "obs.field" entry +// +climo_cdf = { + cdf_bins = 1; + center_bins = FALSE; + write_bins = TRUE; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Verification masking regions +// +mask = { + grid = [ "FULL" ]; + poly = []; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Confidence interval settings +// +ci_alpha = [ 0.05 ]; + +boot = { + interval = PCTILE; + rep_prop = 1.0; + n_rep = 0; + rng = "mt19937"; + seed = ""; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Data smoothing methods +// +interp = { + field = BOTH; + vld_thresh = 1.0; + shape = SQUARE; + + type = [ + { + method = NEAREST; + width = 1; + } + ]; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Neighborhood methods +// +nbrhd = { + width = [ 1 ]; + cov_thresh = [ >=0.5 ]; + vld_thresh = 1.0; + shape = SQUARE; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Fourier decomposition +// +fourier = { + wave_1d_beg = []; + wave_1d_end = []; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Gradient statistics +// May be set separately in each "obs.field" entry +// +gradient = { + dx = [ 1 ]; + dy = [ 1 ]; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Distance Map statistics +// May be set separately in each "obs.field" entry +// +distance_map = { + baddeley_p = 2; + baddeley_max_dist = NA; + fom_alpha = 0.1; + zhu_weight = 0.5; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Statistical output types +// +output_flag = { + fho = NONE; + ctc = NONE; + cts = NONE; + mctc = NONE; + mcts = NONE; + cnt = NONE; + sl1l2 = STAT; + sal1l2 = NONE; + vl1l2 = NONE; + val1l2 = NONE; + vcnt = NONE; + pct = NONE; + pstd = NONE; + pjc = NONE; + prc = NONE; + eclv = NONE; + nbrctc = NONE; + nbrcts = NONE; + nbrcnt = NONE; + grad = NONE; + dmap = NONE; +} + +// +// NetCDF matched pairs output file +// +nc_pairs_flag = { + latlon = FALSE; + raw = FALSE; + diff = TRUE; + climo = FALSE; + climo_cdp = FALSE; + weight = FALSE; + nbrhd = FALSE; + fourier = FALSE; + gradient = FALSE; + distance_map = FALSE; + apply_mask = FALSE; +} + +//////////////////////////////////////////////////////////////////////////////// + +grid_weight_flag = COS_LAT; +tmp_dir = "/tmp"; +output_prefix = "${OUTPUT_PREFIX}"; +version = "V10.0.0"; + +//////////////////////////////////////////////////////////////////////////////// diff --git a/test/config/GridStatConfig_no_leap b/test/config/GridStatConfig_no_leap index e415640c07..47ab1f474b 100644 --- a/test/config/GridStatConfig_no_leap +++ b/test/config/GridStatConfig_no_leap @@ -42,6 +42,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/GridStatConfig_prob_as_scalar b/test/config/GridStatConfig_prob_as_scalar index 2c63950004..13c6143438 100644 --- a/test/config/GridStatConfig_prob_as_scalar +++ b/test/config/GridStatConfig_prob_as_scalar @@ -42,6 +42,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/GridStatConfig_python b/test/config/GridStatConfig_python index 85dd871c5e..0d5e908266 100644 --- a/test/config/GridStatConfig_python +++ b/test/config/GridStatConfig_python @@ -42,6 +42,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/GridStatConfig_python_mixed b/test/config/GridStatConfig_python_mixed index 367c0e1118..b3a6c2ea2b 100644 --- a/test/config/GridStatConfig_python_mixed +++ b/test/config/GridStatConfig_python_mixed @@ -42,6 +42,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/GridStatConfig_rtma b/test/config/GridStatConfig_rtma index 4d88b8e6c7..77d491e5b5 100644 --- a/test/config/GridStatConfig_rtma +++ b/test/config/GridStatConfig_rtma @@ -42,6 +42,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/GridStatConfig_rtma_perc_thresh b/test/config/GridStatConfig_rtma_perc_thresh index 0f96a179f4..cabb9c13df 100644 --- a/test/config/GridStatConfig_rtma_perc_thresh +++ b/test/config/GridStatConfig_rtma_perc_thresh @@ -42,6 +42,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/GridStatConfig_st4 b/test/config/GridStatConfig_st4 index abb1c4079f..7ad113c13f 100644 --- a/test/config/GridStatConfig_st4 +++ b/test/config/GridStatConfig_st4 @@ -42,6 +42,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/GridStatConfig_st4_censor b/test/config/GridStatConfig_st4_censor index fd9debdcdf..8f088b7a6d 100644 --- a/test/config/GridStatConfig_st4_censor +++ b/test/config/GridStatConfig_st4_censor @@ -42,6 +42,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/PointStatConfig_APCP b/test/config/PointStatConfig_APCP index 3923198689..920034bc27 100644 --- a/test/config/PointStatConfig_APCP +++ b/test/config/PointStatConfig_APCP @@ -31,6 +31,8 @@ obs_window = { //////////////////////////////////////////////////////////////////////////////// +mpr_column = []; +mpr_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; wind_thresh = [ NA ]; diff --git a/test/config/PointStatConfig_APCP_HIRA b/test/config/PointStatConfig_APCP_HIRA index 4941473a82..e39d21863c 100644 --- a/test/config/PointStatConfig_APCP_HIRA +++ b/test/config/PointStatConfig_APCP_HIRA @@ -31,6 +31,8 @@ obs_window = { //////////////////////////////////////////////////////////////////////////////// +mpr_column = []; +mpr_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; wind_thresh = [ NA ]; diff --git a/test/config/PointStatConfig_GTG_latlon b/test/config/PointStatConfig_GTG_latlon index 4a33c23102..fc5fa1eef9 100644 --- a/test/config/PointStatConfig_GTG_latlon +++ b/test/config/PointStatConfig_GTG_latlon @@ -31,6 +31,8 @@ obs_window = { //////////////////////////////////////////////////////////////////////////////// +mpr_column = []; +mpr_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; wind_thresh = [ NA ]; diff --git a/test/config/PointStatConfig_GTG_lc b/test/config/PointStatConfig_GTG_lc index 0a08acdf19..e1c5f89ab2 100644 --- a/test/config/PointStatConfig_GTG_lc +++ b/test/config/PointStatConfig_GTG_lc @@ -31,6 +31,8 @@ obs_window = { //////////////////////////////////////////////////////////////////////////////// +mpr_column = []; +mpr_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; wind_thresh = [ NA ]; diff --git a/test/config/PointStatConfig_INTERP_OPTS b/test/config/PointStatConfig_INTERP_OPTS index 902138d916..1538ce4bf9 100644 --- a/test/config/PointStatConfig_INTERP_OPTS +++ b/test/config/PointStatConfig_INTERP_OPTS @@ -31,6 +31,8 @@ obs_window = { //////////////////////////////////////////////////////////////////////////////// +mpr_column = []; +mpr_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; wind_thresh = [ NA ]; diff --git a/test/config/PointStatConfig_LAND_TOPO_MASK b/test/config/PointStatConfig_LAND_TOPO_MASK index a98c163ff1..9d9e77564e 100644 --- a/test/config/PointStatConfig_LAND_TOPO_MASK +++ b/test/config/PointStatConfig_LAND_TOPO_MASK @@ -31,6 +31,8 @@ obs_window = { //////////////////////////////////////////////////////////////////////////////// +mpr_column = []; +mpr_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; wind_thresh = [ NA ]; diff --git a/test/config/PointStatConfig_MASK_SID b/test/config/PointStatConfig_MASK_SID index 6333358fac..b9afca5389 100644 --- a/test/config/PointStatConfig_MASK_SID +++ b/test/config/PointStatConfig_MASK_SID @@ -31,6 +31,8 @@ obs_window = { //////////////////////////////////////////////////////////////////////////////// +mpr_column = []; +mpr_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; wind_thresh = [ NA ]; diff --git a/test/config/PointStatConfig_PHYS b/test/config/PointStatConfig_PHYS index 03a67ca8b4..aea79c4b19 100644 --- a/test/config/PointStatConfig_PHYS +++ b/test/config/PointStatConfig_PHYS @@ -31,6 +31,8 @@ obs_window = { //////////////////////////////////////////////////////////////////////////////// +mpr_column = []; +mpr_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; wind_thresh = [ NA ]; diff --git a/test/config/PointStatConfig_PHYS_pint b/test/config/PointStatConfig_PHYS_pint index 951d936320..be5abaaf6f 100644 --- a/test/config/PointStatConfig_PHYS_pint +++ b/test/config/PointStatConfig_PHYS_pint @@ -31,6 +31,8 @@ obs_window = { //////////////////////////////////////////////////////////////////////////////// +mpr_column = []; +mpr_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; wind_thresh = [ NA ]; diff --git a/test/config/PointStatConfig_WINDS b/test/config/PointStatConfig_WINDS index 66257da5fa..3dc709d48c 100644 --- a/test/config/PointStatConfig_WINDS +++ b/test/config/PointStatConfig_WINDS @@ -31,6 +31,8 @@ obs_window = { //////////////////////////////////////////////////////////////////////////////// +mpr_column = []; +mpr_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; wind_thresh = [ NA ]; diff --git a/test/config/PointStatConfig_aeronet b/test/config/PointStatConfig_aeronet index 7423d57bba..a1405e9424 100644 --- a/test/config/PointStatConfig_aeronet +++ b/test/config/PointStatConfig_aeronet @@ -31,6 +31,8 @@ regrid = { //////////////////////////////////////////////////////////////////////////////// +mpr_column = []; +mpr_thresh = []; cat_thresh = [ NA ]; cnt_thresh = [ NA ]; //cnt_logic = UNION; diff --git a/test/config/PointStatConfig_airnow b/test/config/PointStatConfig_airnow index 89a1b22252..eb18e2000f 100644 --- a/test/config/PointStatConfig_airnow +++ b/test/config/PointStatConfig_airnow @@ -38,6 +38,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = [ NA ]; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/PointStatConfig_climo b/test/config/PointStatConfig_climo index 843c927614..17005f9979 100644 --- a/test/config/PointStatConfig_climo +++ b/test/config/PointStatConfig_climo @@ -31,6 +31,8 @@ regrid = { //////////////////////////////////////////////////////////////////////////////// +mpr_column = []; +mpr_thresh = []; cat_thresh = [ NA ]; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/PointStatConfig_climo_WMO b/test/config/PointStatConfig_climo_WMO index fe2eedd6e9..722edd4881 100644 --- a/test/config/PointStatConfig_climo_WMO +++ b/test/config/PointStatConfig_climo_WMO @@ -31,6 +31,8 @@ regrid = { //////////////////////////////////////////////////////////////////////////////// +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/PointStatConfig_climo_prob b/test/config/PointStatConfig_climo_prob index 2d59e5712b..53a754b87c 100644 --- a/test/config/PointStatConfig_climo_prob +++ b/test/config/PointStatConfig_climo_prob @@ -32,6 +32,8 @@ regrid = { //////////////////////////////////////////////////////////////////////////////// +mpr_column = []; +mpr_thresh = []; cat_thresh = [ NA ]; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/PointStatConfig_dup b/test/config/PointStatConfig_dup index 9f0c2992ad..e67fb84089 100644 --- a/test/config/PointStatConfig_dup +++ b/test/config/PointStatConfig_dup @@ -30,6 +30,8 @@ regrid = { //////////////////////////////////////////////////////////////////////////////// +mpr_column = []; +mpr_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; wind_thresh = [ NA ]; diff --git a/test/config/PointStatConfig_mpr_thresh b/test/config/PointStatConfig_mpr_thresh new file mode 100644 index 0000000000..6a33eebf2a --- /dev/null +++ b/test/config/PointStatConfig_mpr_thresh @@ -0,0 +1,221 @@ +//////////////////////////////////////////////////////////////////////////////// +// +// Point-Stat configuration file. +// +// For additional information, please see the MET User's Guide. +// +//////////////////////////////////////////////////////////////////////////////// + +// +// Output model name to be written +// +model = "GFS"; + +// +// Output description to be written +// May be set separately in each "obs.field" entry +// +desc = "NA"; + +//////////////////////////////////////////////////////////////////////////////// + +// +// Verification grid +// +regrid = { + to_grid = NONE; + method = NEAREST; + width = 1; + vld_thresh = 0.5; +} + +//////////////////////////////////////////////////////////////////////////////// + +mpr_column = []; +mpr_thresh = []; +cat_thresh = [ NA ]; +cnt_thresh = [ NA ]; +cnt_logic = UNION; +wind_thresh = [ NA ]; +wind_logic = UNION; +eclv_points = 0.05; + +// +// Forecast and observation fields to be verified +// +fcst = { + sid_inc = []; + sid_exc = []; + cat_thresh = []; + message_type = [ "ADPSFC" ]; + + name = "TMP"; + level = "Z2"; + + field = [ + { + desc = "NO_MPR_THRESH"; + }, + { + mpr_column = [ "OBS-FCST" ]; + mpr_thresh = [ >=-5&&<=5 ]; + desc = "OBS_FCST_DIFF"; + }, + { + mpr_column = [ "ABS(OBS-FCST)" ]; + mpr_thresh = [ <=5 ]; + desc = "ABS_OBS_FCST_DIFF"; + }, + { + mpr_column = [ "ABS(OBS-CLIMO_MEAN)" ]; + mpr_thresh = [ <=5 ]; + desc = "ABS_OBS_CLIMO_MEAN_DIFF"; + }, + { + mpr_column = [ "CLIMO_CDF" ]; + mpr_thresh = [ >=0.25&&<=0.75 ]; + desc = "CLIMO_CDF_IQR"; + } + ]; +} +obs = fcst; + +//////////////////////////////////////////////////////////////////////////////// + +// +// Climatology mean data +// +climo_mean = fcst; +climo_mean = { + + file_name = [ ${CLIMO_MEAN_FILE_LIST} ]; + + regrid = { + method = BILIN; + width = 2; + vld_thresh = 0.5; + } + + time_interp_method = DW_MEAN; + day_interval = ${DAY_INTERVAL}; + hour_interval = ${HOUR_INTERVAL}; +} + +climo_stdev = climo_mean; +climo_stdev = { + file_name = [ ${CLIMO_STDEV_FILE_LIST} ]; +} + +// +// May be set separately in each "obs.field" entry +// +climo_cdf = { + cdf_bins = 1; + center_bins = FALSE; + write_bins = TRUE; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Point observation time window +// +obs_window = { + beg = -5400; + end = 5400; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Verification masking regions +// +mask = { + grid = [ "FULL" ]; + poly = []; + sid = []; + llpnt = []; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Confidence interval settings +// +ci_alpha = [ 0.05 ]; + +boot = { + interval = PCTILE; + rep_prop = 1.0; + n_rep = 1000; + rng = "mt19937"; + seed = "1"; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Interpolation methods +// +interp = { + vld_thresh = 1.0; + + type = [ + { + method = NEAREST; + width = 1; + } + ]; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// HiRA verification method +// +hira = { + flag = FALSE; + width = [ 2, 3, 4, 5 ]; + vld_thresh = 1.0; + cov_thresh = [ ==0.25 ]; + shape = SQUARE; + prob_cat_thresh = []; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Statistical output types +// +output_flag = { + fho = NONE; + ctc = NONE; + cts = NONE; + mctc = NONE; + mcts = NONE; + cnt = NONE; + sl1l2 = STAT; + sal1l2 = NONE; + vl1l2 = NONE; + val1l2 = NONE; + vcnt = NONE; + pct = NONE; + pstd = NONE; + pjc = NONE; + prc = NONE; + ecnt = NONE; + rps = NONE; + eclv = NONE; + mpr = NONE; +} + +//////////////////////////////////////////////////////////////////////////////// + +obs_quality = []; +duplicate_flag = NONE; +rank_corr_flag = TRUE; +tmp_dir = "/tmp"; +output_prefix = "${OUTPUT_PREFIX}"; +version = "V10.0.0"; + +//////////////////////////////////////////////////////////////////////////////// diff --git a/test/config/PointStatConfig_obs_summary b/test/config/PointStatConfig_obs_summary index 1bce341f5e..03b00e3438 100644 --- a/test/config/PointStatConfig_obs_summary +++ b/test/config/PointStatConfig_obs_summary @@ -30,6 +30,8 @@ regrid = { //////////////////////////////////////////////////////////////////////////////// +mpr_column = []; +mpr_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; wind_thresh = [ NA ]; diff --git a/test/config/PointStatConfig_obs_summary_all b/test/config/PointStatConfig_obs_summary_all index 4bbe7821b3..329a3bd05f 100644 --- a/test/config/PointStatConfig_obs_summary_all +++ b/test/config/PointStatConfig_obs_summary_all @@ -30,6 +30,8 @@ regrid = { //////////////////////////////////////////////////////////////////////////////// +mpr_column = []; +mpr_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; wind_thresh = [ NA ]; diff --git a/test/config/PointStatConfig_prob b/test/config/PointStatConfig_prob index 3c26b54ec0..c1d7f8d58f 100644 --- a/test/config/PointStatConfig_prob +++ b/test/config/PointStatConfig_prob @@ -31,6 +31,8 @@ obs_window = { //////////////////////////////////////////////////////////////////////////////// +mpr_column = []; +mpr_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; wind_thresh = [ NA ]; diff --git a/test/config/PointStatConfig_python b/test/config/PointStatConfig_python index e975b01b9e..2b073fff61 100644 --- a/test/config/PointStatConfig_python +++ b/test/config/PointStatConfig_python @@ -37,6 +37,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = [ NA ]; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/PointStatConfig_sid_inc_exc b/test/config/PointStatConfig_sid_inc_exc index a7766ff324..70f17d7943 100644 --- a/test/config/PointStatConfig_sid_inc_exc +++ b/test/config/PointStatConfig_sid_inc_exc @@ -33,6 +33,8 @@ obs_window = { censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; wind_thresh = [ NA ]; diff --git a/test/config/ref_config/GridStatConfig_03h b/test/config/ref_config/GridStatConfig_03h index dc93b76496..ce8b0f982c 100644 --- a/test/config/ref_config/GridStatConfig_03h +++ b/test/config/ref_config/GridStatConfig_03h @@ -42,6 +42,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; diff --git a/test/config/ref_config/GridStatConfig_24h b/test/config/ref_config/GridStatConfig_24h index 260e1e901d..1f7fb01cda 100644 --- a/test/config/ref_config/GridStatConfig_24h +++ b/test/config/ref_config/GridStatConfig_24h @@ -42,6 +42,8 @@ regrid = { // censor_thresh = []; censor_val = []; +mpr_column = []; +mpr_thresh = []; cat_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; @@ -51,6 +53,7 @@ eclv_points = 0.05; nc_pairs_var_name = ""; nc_pairs_var_suffix = ""; rank_corr_flag = FALSE; + // // Forecast and observation fields to be verified // diff --git a/test/config/ref_config/PointStatConfig_ADPUPA b/test/config/ref_config/PointStatConfig_ADPUPA index 9976d42778..a458683711 100644 --- a/test/config/ref_config/PointStatConfig_ADPUPA +++ b/test/config/ref_config/PointStatConfig_ADPUPA @@ -30,6 +30,8 @@ regrid = { //////////////////////////////////////////////////////////////////////////////// +mpr_column = []; +mpr_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; wind_thresh = [ NA ]; diff --git a/test/config/ref_config/PointStatConfig_ONLYSF b/test/config/ref_config/PointStatConfig_ONLYSF index 8c969f69f0..9276f52a45 100644 --- a/test/config/ref_config/PointStatConfig_ONLYSF +++ b/test/config/ref_config/PointStatConfig_ONLYSF @@ -30,6 +30,8 @@ regrid = { //////////////////////////////////////////////////////////////////////////////// +mpr_column = []; +mpr_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; wind_logic = INTERSECTION; diff --git a/test/config/ref_config/PointStatConfig_WINDS b/test/config/ref_config/PointStatConfig_WINDS index 82ba02f5e4..5e18b2f1dc 100644 --- a/test/config/ref_config/PointStatConfig_WINDS +++ b/test/config/ref_config/PointStatConfig_WINDS @@ -30,6 +30,8 @@ regrid = { //////////////////////////////////////////////////////////////////////////////// +mpr_column = []; +mpr_thresh = []; cnt_thresh = [ NA ]; cnt_logic = UNION; wind_logic = INTERSECTION; diff --git a/test/xml/unit_grid_stat.xml b/test/xml/unit_grid_stat.xml index 729761c71d..1d562778f7 100644 --- a/test/xml/unit_grid_stat.xml +++ b/test/xml/unit_grid_stat.xml @@ -234,4 +234,31 @@ + + &MET_BIN;/grid_stat + + OUTPUT_PREFIX MPR_THRESH + DAY_INTERVAL 1 + HOUR_INTERVAL 6 + CLIMO_MEAN_FILE_LIST + "&DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg/cmean_1d.19590409" + + + CLIMO_STDEV_FILE_LIST + "&DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg/cstdv_1d.19590409" + + + + \ + &DATA_DIR_MODEL;/grib2/gfs/gfs_2012040900_F012.grib2 \ + &DATA_DIR_MODEL;/grib2/gfsanl/gfsanl_4_20120409_1200_000.grb2 \ + &CONFIG_DIR;/GridStatConfig_mpr_thresh \ + -outdir &OUTPUT_DIR;/grid_stat -v 3 + + + &OUTPUT_DIR;/grid_stat/grid_stat_MPR_THRESH_120000L_20120409_120000V.stat + &OUTPUT_DIR;/grid_stat/grid_stat_MPR_THRESH_120000L_20120409_120000V_pairs.nc + + + diff --git a/test/xml/unit_point_stat.xml b/test/xml/unit_point_stat.xml index c369108a08..8750e1ee68 100644 --- a/test/xml/unit_point_stat.xml +++ b/test/xml/unit_point_stat.xml @@ -8,6 +8,7 @@ + ]> @@ -454,4 +455,30 @@ + + &MET_BIN;/point_stat + + OUTPUT_PREFIX MPR_THRESH + DAY_INTERVAL 1 + HOUR_INTERVAL 6 + CLIMO_MEAN_FILE_LIST + "&DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg/cmean_1d.19590409" + + + CLIMO_STDEV_FILE_LIST + "&DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg/cstdv_1d.19590409" + + + + \ + &DATA_DIR_MODEL;/grib2/gfs/gfs_2012040900_F012.grib2 \ + &OUTPUT_DIR;/pb2nc/ndas.20120409.t12z.prepbufr.tm00.nc \ + &CONFIG_DIR;/PointStatConfig_mpr_thresh \ + -outdir &OUTPUT_DIR;/point_stat -v 3 + + + &OUTPUT_DIR;/point_stat/point_stat_MPR_THRESH_120000L_20120409_120000V.stat + + +