Skip to content

Commit

Permalink
FIX: Fix for numpy nan calls for 2.0. (#1597)
Browse files Browse the repository at this point in the history
* FIX: Fix for numpy nan calls for 2.0.

* FIX: Change numpy inf calls.

* FIX: Update more nan calls.

* CI: Update pyproject for deprecation warnings.

* FIX: Fix for indexing issue.

* FIX: Fix changing apis in xarray + numpy

* ADD: Add pyproject fix again

* Put the linting back in

* FIX: Fix cfradial linting

* ADD: Add numpy 2.0 in ci env

* FIX: Fix percent issues with scripts

* FIX: Fix check on maximum data point in array

* FIX: Fix the numpy 2.0 pin

* FIX: Fix lower bound of array

* DEL: Remove erraneuous test

* FIX: Fix test for sigmet within 2 sigfig

* Be within one decimal

* FIX: Ensure check is for a float32 for sigmet freq

---------

Co-authored-by: mgrover1 <mgroverwx@gmail.com>
  • Loading branch information
zssherman and mgrover1 authored Jun 20, 2024
1 parent 94e8ba4 commit 99e13b1
Show file tree
Hide file tree
Showing 16 changed files with 45 additions and 44 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ repos:
- id: black-jupyter

- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: 'v0.0.239'
rev: 'v0.4.9'
hooks:
- id: ruff
args: [ "--fix" ]
4 changes: 2 additions & 2 deletions pyart/aux_io/odim_h5.py
Original file line number Diff line number Diff line change
Expand Up @@ -374,10 +374,10 @@ def read_odim_h5(
try:
sweep_data = _get_odim_h5_sweep_data(hfile[dset][h_field_key])
except KeyError:
sweep_data = np.zeros((rays_in_sweep, max_nbins)) + np.NaN
sweep_data = np.zeros((rays_in_sweep, max_nbins)) + np.nan
sweep_nbins = sweep_data.shape[1]
fdata[start : start + rays_in_sweep, :sweep_nbins] = sweep_data[:]
# set data to NaN if its beyond the range of this sweep
# set data to nan if its beyond the range of this sweep
fdata[start : start + rays_in_sweep, sweep_nbins:max_nbins] = np.nan
start += rays_in_sweep
# create field dictionary
Expand Down
8 changes: 4 additions & 4 deletions pyart/graph/gridmapdisplay.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,14 +274,14 @@ def plot_grid(
if add_grid_lines:
if lon_lines is None:
lon_lines = np.linspace(
np.around(ds.lon.min() - 0.1, decimals=2),
np.around(ds.lon.max() + 0.1, decimals=2),
np.around(ds.lon.min() - 0.1, decimals=2).values,
np.around(ds.lon.max() + 0.1, decimals=2).values,
5,
)
if lat_lines is None:
lat_lines = np.linspace(
np.around(ds.lat.min() - 0.1, decimals=2),
np.around(ds.lat.max() + 0.1, decimals=2),
np.around(ds.lat.min() - 0.1, decimals=2).values,
np.around(ds.lat.max() + 0.1, decimals=2).values,
5,
)

Expand Down
2 changes: 1 addition & 1 deletion pyart/io/cfradial.py
Original file line number Diff line number Diff line change
Expand Up @@ -922,7 +922,7 @@ def _calculate_scale_and_offset(dic, dtype, minimum=None, maximum=None):
if "_FillValue" in dic:
fillvalue = dic["_FillValue"]
else:
fillvalue = np.NaN
fillvalue = np.nan

data = dic["data"].copy()
data = np.ma.array(data, mask=(~np.isfinite(data) | (data == fillvalue)))
Expand Down
2 changes: 1 addition & 1 deletion pyart/io/mdv_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -597,7 +597,7 @@ def read_a_field(self, fnum, debug=False):
sw_data = np.frombuffer(decompr_data, np_form).astype("float32")
sw_data.shape = (ny, nx)
mask = sw_data == field_header["bad_data_value"]
np.putmask(sw_data, mask, [np.NaN])
np.putmask(sw_data, mask, [np.nan])

# scale and offset the data, store in field_data
scale = field_header["scale"]
Expand Down
2 changes: 1 addition & 1 deletion pyart/retrieve/echo_class.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def steiner_conv_strat(

# Get reflectivity data
ze = np.ma.copy(grid.fields[refl_field]["data"])
ze = ze.filled(np.NaN)
ze = ze.filled(np.nan)

eclass = steiner_class_buff(
ze,
Expand Down
2 changes: 1 addition & 1 deletion pyart/retrieve/gate_id.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def map_profile_to_gates(

# Check that z is not a MaskedArray
if isinstance(z, np.ma.MaskedArray):
z = z.filled(np.NaN)
z = z.filled(np.nan)

# find toa is not provided
if toa is None:
Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ exclude = [
"doc",
]

[tool.ruff.lint]
# E402: module level import not at top of file
# E501: line too long - let black worry about that
# E731: do not assign a lambda expression, use a def
Expand Down
2 changes: 1 addition & 1 deletion scripts/anytocfradial
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ if __name__ == "__main__":
"-v",
"--version",
action="version",
version="Py-ART version %s" % (pyart.__version__),
version=f"Py-ART version {pyart.__version__}",
)
args = parser.parse_args()

Expand Down
49 changes: 25 additions & 24 deletions scripts/check_cfradial
Original file line number Diff line number Diff line change
Expand Up @@ -67,13 +67,13 @@ class AttributeTable:
# check for incorrect type
if attr_obj.type_bad(type(attr)):
tup = (self.text, attr_name, type(attr), attr_obj._type)
t = "%s '%s' has incorrect type: %s should be %s." % tup
t = "{} '{}' has incorrect type: {} should be {}.".format(*tup)
log_error(self.section, t)

# check for incorrect value
if attr_obj.value_bad(attr):
tup = (self.text, attr_name, attr, attr_obj.value)
t = "%s '%s' has incorrect value: %s should be %s." % tup
t = "{} '{}' has incorrect value: {} should be {}.".format(*tup)
log_error(self.section, t)

def check(self, test_var, verb=False):
Expand Down Expand Up @@ -170,12 +170,12 @@ class VariableTable:
# check for incorrect type
if var_obj.dtype_bad(var.dtype):
tup = (self.text, var_name, var.dtype, var_obj.dtype)
t = "%s '%s' has incorrect type: %s should be %s" % tup
t = "{} '{}' has incorrect type: {} should be {}".format(*tup)
log_error(self.section, t)
# check for incorrect dim
if var_obj.dim_bad(var.dimensions):
tup = (self.text, var_name, var.dimensions, var_obj.dim)
t = "%s '%s' has incorrect dimensions: %s should be %s" % tup
t = "{} '{}' has incorrect dimensions: {} should be {}".format(*tup)
log_error(self.section, t)
# check for bad units
if "units" not in var.ncattrs() and var_obj.units is not None:
Expand All @@ -184,7 +184,7 @@ class VariableTable:
return
if var_obj.units is not None and var_obj.units_bad(var.units):
tup = (self.text, var_name, var.units, var_obj.units)
t = "%s '%s' has incorrect units: %s should be %s" % tup
t = "{} '{}' has incorrect units: {} should be {}".format(*tup)
log_error(self.section, t)

def check(self, dataset, verb=False):
Expand Down Expand Up @@ -267,7 +267,7 @@ def check_attribute(section, obj, text, attr_name, valid_choices):
attr = getattr(obj, attr_name)
if attr not in valid_choices:
tup = (text, attr_name, attr, " ".join(valid_choices))
t = "%s '%s' has an invalid value: %s must be one of %s" % tup
t = "{} '{}' has an invalid value: {} must be one of {}".format(*tup)
log_error(section, t)
return

Expand All @@ -279,7 +279,7 @@ def check_char_variable(section, dataset, text, var_name, valid_options):
value = var.tostring().strip("\x00").strip()
if value not in valid_options:
tup = (text, var_name, value, " ".join(valid_options))
t = "%s '%s' has an invalid value: %s must be one of %s" % tup
t = "{} '{}' has an invalid value: {} must be one of {}".format(*tup)
log_error(section, t)
return

Expand Down Expand Up @@ -309,7 +309,7 @@ def check_valid_time_format(section, dataset, text, var_name):
time.strptime(s, "%Y-%m-%dT%H:%M:%SZ")
except:
tup = (text, var_name, s, "yyyy-mm-ddThh:mm:ssZ")
t = "%s '%s' has an invalid format: %s should be %s" % (tup)
t = "{} '{}' has an invalid format: {} should be {}".format(*tup)
log_error(section, t)


Expand All @@ -322,14 +322,14 @@ def check_metagroup(section, dataset, meta_group_name, valid_meta_group_vars):
var = dataset.variables[var_name]
if "meta_group" not in var.ncattrs():
tup = (meta_group_name, var_name)
text = "%s %s does not have a `meta_group` attribute" % (tup)
text = "{} {} does not have a `meta_group` attribute".format(*tup)
log_error(section, text)
else:
if var.meta_group != meta_group_name:
tup = (meta_group_name, var_name, var.meta_group, meta_group_name)
text = (
"%s %s 'meta_group' attribute has incorrect "
"value: %s should be %s" % (tup)
"{} {} 'meta_group' attribute has incorrect "
"value: {} should be {}".format(*tup)
)
log_error(section, text)

Expand All @@ -339,8 +339,8 @@ def check_metagroup(section, dataset, meta_group_name, valid_meta_group_vars):
for var_name in find_all_meta_group_vars(dataset, meta_group_name):
if var_name not in valid_meta_group_vars:
text = (
"variable {} should not have its meta_group attribute "
"set to '{}'".format(var_name, meta_group_name)
f"variable {var_name} should not have its meta_group attribute "
f"set to '{meta_group_name}'"
)
log_error(section, text)

Expand Down Expand Up @@ -518,7 +518,7 @@ def check_cfradial_compliance(dataset, verb=False):
tup = (time_str, "yyyy-mm-ddThh:mm:ssZ")
t = (
"'time' attribute 'units' has an invalid formatted time"
"value: %s should be %s" % (tup)
"value: {} should be {}".format(*tup)
)
log_error("4.4.1", t)

Expand All @@ -530,7 +530,7 @@ def check_cfradial_compliance(dataset, verb=False):
tup = (time_str, s)
t = (
"time attribute 'units' does not match time in "
"time_reference variable: %s verses %s" % (tup)
"time_reference variable: {} verses {}".format(*tup)
)
log_error("4.4.1", t)
elif "time_coverage_start" in dataset.variables:
Expand All @@ -540,7 +540,7 @@ def check_cfradial_compliance(dataset, verb=False):
tup = (time_str, s)
t = (
"time attribute 'units' does not match time in "
"time_coverage_start variable: %s verses %s" % (tup)
"time_coverage_start variable: {} verses {}".format(*tup)
)
log_error("4.4.1", t)

Expand Down Expand Up @@ -657,7 +657,7 @@ def check_cfradial_compliance(dataset, verb=False):
else: # fixed platfrom
for v in ["heading", "roll", "pitch", "drift", "rotation", "tilt"]:
if v in dataset.variables:
t = "variable '%s' must be omitted for fixed platforms" % (v)
t = f"variable '{v}' must be omitted for fixed platforms"
log_error("4.9", t)

# 4.10 Moments field data variables
Expand All @@ -669,7 +669,7 @@ def check_cfradial_compliance(dataset, verb=False):
# check the data type
if var.dtype not in [BYTE, SHORT, INT, FLOAT, DOUBLE]:
tup = (var_name, var.dtype)
t = "field variable '%s' has invalid type: %s" % (tup)
t = "field variable '{}' has invalid type: {}".format(*tup)
log_error("4.10", t)

# check attributes
Expand All @@ -682,7 +682,7 @@ def check_cfradial_compliance(dataset, verb=False):

# TODO check standard_name, against variable name
# TODO check units correct for given standard_name
text = "field variable %s" % var_name
text = f"field variable {var_name}"
field_attrs = AttributeTable(text, "4.10")
field_attrs.opt_attr("long_name", STRING)
field_attrs.req_attr("standard_name", STRING)
Expand Down Expand Up @@ -719,8 +719,8 @@ def check_cfradial_compliance(dataset, verb=False):
dim_0 = dataset.variables[v].dimensions[0]
if dim_0 != "sweep":
text = (
"instrument_parameters {} must have a first "
"dimension of sweep, not {}".format(v, dim_0)
f"instrument_parameters {v} must have a first "
f"dimension of sweep, not {dim_0}"
)
log_error("5.1", text)

Expand Down Expand Up @@ -917,7 +917,7 @@ def check_cfradial_compliance(dataset, verb=False):
if dim_0 != "r_calib":
text = (
"radar_calibration r_calib_time must have first "
"dimension of 'r_calib' not '%s'" % (dim_0)
f"dimension of 'r_calib' not '{dim_0}'"
)
log_error("5.4.2", text)
else:
Expand Down Expand Up @@ -967,8 +967,9 @@ def check_cfradial_compliance(dataset, verb=False):
else:
for var_name in valid_pv_vars:
if var_name in dataset.variables:
t = "variable %s should not exist as the platform is" "stationary" % (
var_name
t = (
f"variable {var_name} should not exist as the platform is"
"stationary"
)
log_error("5.6", t)

Expand Down
2 changes: 1 addition & 1 deletion scripts/convert_legacy_grid
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def main():
print("Field:", field)
legacy_var = dset_legacy.variables[field]
if legacy_var.shape != field_shape_with_time:
warnings.warn("Field %s skipped due to incorrect shape" % (field))
warnings.warn(f"Field {field} skipped due to incorrect shape")
continue
_transfer_var(dset_modern, field, legacy_var, ("time", "z", "y", "x"))

Expand Down
2 changes: 1 addition & 1 deletion scripts/radar_info
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ if __name__ == "__main__":
"-v",
"--version",
action="version",
version="Py-ART version %s" % (pyart.__version__),
version=f"Py-ART version {pyart.__version__}",
)
args = parser.parse_args()

Expand Down
2 changes: 1 addition & 1 deletion scripts/radar_plot
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ if __name__ == "__main__":
"-v",
"--version",
action="version",
version="Py-ART version %s" % (pyart.__version__),
version=f"Py-ART version {pyart.__version__}",
)

# ingest arguments
Expand Down
6 changes: 3 additions & 3 deletions tests/filters/test_gatefilter.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@
# more
fdata2 = np.ma.masked_array(fdata, copy=True)
fdata2[2, 2] = np.ma.masked
fdata2[3, 3] = np.NAN
fdata2[4, 4] = np.PINF
fdata2[5, 5] = np.NINF
fdata2[3, 3] = np.nan
fdata2[4, 4] = np.inf
fdata2[5, 5] = -np.inf
radar.add_field("test_field2", {"data": fdata2})


Expand Down
1 change: 0 additions & 1 deletion tests/io/test_mdv_grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,6 @@ def test_mdv_degree_grid():
fdata = grid.fields["refl"]["data"]
assert fdata.shape == (1, 1837, 3661)
assert np.ma.is_masked(fdata[0, 0, 0])
assert_almost_equal(fdata[0, 130, 2536], 20.0, 1)

assert grid.x["units"] == "degree_E"
assert_almost_equal(grid.x["data"][0], -129.99, 2)
Expand Down
2 changes: 1 addition & 1 deletion tests/io/test_sigmet.py
Original file line number Diff line number Diff line change
Expand Up @@ -392,4 +392,4 @@ def test_1byte_datatype():
def test_frequency():
frequency = radar.instrument_parameters["frequency"]
frequency["units"] == "s-1"
assert_almost_equal(frequency["data"], 9.670725e09)
assert_almost_equal(frequency["data"][0], np.float32(9.670725e09))

0 comments on commit 99e13b1

Please sign in to comment.