From 5329a50e6e10244261cb8a9a4b25275e8abcc90c Mon Sep 17 00:00:00 2001 From: David Hassell Date: Tue, 23 Mar 2021 20:15:01 +0000 Subject: [PATCH 01/53] dev --- cf/aggregate.py | 7 +- cf/bounds.py | 5 +- cf/constructs.py | 48 +++-- cf/coordinatereference.py | 6 +- cf/count.py | 3 - cf/data/cachedarray.py | 16 +- cf/data/data.py | 4 +- cf/dimensioncoordinate.py | 12 +- cf/field.py | 157 ++++++++-------- cf/mixin/coordinate.py | 229 ++++++++++++++++++----- cf/mixin/properties.py | 53 +++--- cf/mixin/propertiesdata.py | 214 +++++++++++----------- cf/mixin/propertiesdatabounds.py | 299 +++++++++++++++---------------- cf/read_write/read.py | 2 +- cf/read_write/um/umread.py | 26 +-- 15 files changed, 598 insertions(+), 483 deletions(-) diff --git a/cf/aggregate.py b/cf/aggregate.py index 52bde3b8ef..4794bc97b9 100644 --- a/cf/aggregate.py +++ b/cf/aggregate.py @@ -3293,7 +3293,7 @@ def _aggregate_2_fields( if getattr(field0, "id", None) is not None: standard_name = field1.get_property("standard_name", None) if standard_name is not None: - field0.set_property("standard_name", standard_name) + field0.set_property("standard_name", standard_name, copy=False) del field0.id # --- End: if @@ -3323,10 +3323,11 @@ def _aggregate_2_fields( if value1 is not None: if value0 is not None: field0.set_property( - prop, "%s :AGGREGATED: %s" % (value0, value1) + prop, f"{value0} :AGGREGATED: {value1}", copy=False ) else: - field0.set_property(prop, " :AGGREGATED: %s" % value1) + field0.set_property(prop, " :AGGREGATED: {value1}", + copy=False) else: if value0 is not None: field0.del_property(prop) diff --git a/cf/bounds.py b/cf/bounds.py index e86997b57a..0e4e9761e2 100644 --- a/cf/bounds.py +++ b/cf/bounds.py @@ -137,7 +137,7 @@ def contiguous(self, overlap=True, direction=None, period=None, verbose=1): False """ - data = self.get_data(None) + data = self.get_data(None, set_fill_value=False) if data is None: return False @@ -320,6 +320,3 @@ def identity( return super().identity( default=default, strict=strict, relaxed=relaxed, nc_only=nc_only ) - - -# --- End: class diff --git a/cf/constructs.py b/cf/constructs.py index 7cb159c3f8..858cb204a2 100644 --- a/cf/constructs.py +++ b/cf/constructs.py @@ -45,7 +45,8 @@ def __repr__(self): """ return super().__repr__().replace("<", ">> d = c.filter_by_identity('ncvar%time') """ - # field_data_axes = self._field_data_axes - # - # if field_data_axes is not None: - # # Allows integer data domain axis positions, do we want this? TODO - # new_identities = [] - # for i in identities: - # try: - # _ = field_data_axes[i] - # except IndexError: - # new_identities.append(i) - # else: - # if isinstance(_, str): - # new_identities.append('key%'+_) - # else: - # new_identities.extend(['key%'+axis for axis in _]) - # else: - # new_identities = identities - # - # Allow keys without the 'key%' prefix identities = list(identities) for n, identity in enumerate(identities): if identity in self: identities[n] = "key%" + identity - # --- End: for - - return super().filter_by_identity(*identities) - -# --- End: class + ctype = [i for i in "XYZT" if i in identities] + + return super().filter_by_identity(*identities, ctype=ctype) diff --git a/cf/coordinatereference.py b/cf/coordinatereference.py index b16d5db62d..b111a8bf54 100644 --- a/cf/coordinatereference.py +++ b/cf/coordinatereference.py @@ -149,7 +149,7 @@ def __repr__(self): # ---------------------------------------------------------------- # Private methods # ---------------------------------------------------------------- - def _matching_values(self, value0, value1): + def _matching_values(self, value0, value1, basic=False): """Whether two coordinate reference construct identity values match. @@ -174,7 +174,7 @@ def _matching_values(self, value0, value1): # re.compile object return value0.search(value1) except (AttributeError, TypeError): - return self._equals(value1, value0) + return self._equals(value1, value0, basic=basic) # ---------------------------------------------------------------- # Private attributes @@ -560,7 +560,7 @@ def match_by_identity(self, *identities): ok = False for value0 in identities: for value1 in self_identities: - ok = self._matching_values(value0, value1) + ok = self._matching_values(value0, value1, basic=True) if ok: break # --- End: for diff --git a/cf/count.py b/cf/count.py index 61745c63c1..c2db119324 100644 --- a/cf/count.py +++ b/cf/count.py @@ -43,6 +43,3 @@ def __repr__(self): """ return super().__repr__().replace("<", " 1: @@ -138,7 +138,7 @@ def _infer_direction(self): # --- End: if # Still here? - data = self.get_bounds_data(None) + data = self.get_bounds_data(None, _fill_value=False) if data is not None: # Infer the direction from the bounds b = data[(0,) * (data.ndim - 1)].array @@ -856,11 +856,11 @@ def roll(self, axis, shift, inplace=False, i=False): c.dtype = numpy_result_type(c.dtype, period.dtype) b = c.get_bounds(None) - bounds_data = c.get_bounds_data(None) + bounds_data = c.get_bounds_data(None, _fill_value=False) if bounds_data is not None: b.dtype = numpy_result_type(bounds_data.dtype, period.dtype) - bounds_data = b.get_data(None) + bounds_data = b.get_data(None, set_fill_value=None) if direction: # Increasing @@ -882,7 +882,6 @@ def roll(self, axis, shift, inplace=False, i=False): c -= period if bounds_data is not None: b -= period - # --- End: if c._custom["direction"] = direction @@ -898,6 +897,3 @@ def role(self): _DEPRECATION_ERROR_ATTRIBUTE( self, "role", "Use attribute 'construct_type' instead" ) # pragma: no cover - - -# --- End: class diff --git a/cf/field.py b/cf/field.py index 9ffb1824ee..766cef1097 100644 --- a/cf/field.py +++ b/cf/field.py @@ -607,7 +607,7 @@ def __setitem__(self, indices, value): # self.__class__.__name__)) try: - data = value.get_data(None) + data = value.get_data(None, set_fill_value=False) except AttributeError: pass else: @@ -620,7 +620,8 @@ def __setitem__(self, indices, value): value = data - self.data[indices] = value + data = self.get_data(set_fill_value=False) + data[indices] = value def analyse_items(self, relaxed_identities=None): """Analyse a domain. @@ -1848,13 +1849,13 @@ def _binary_operation_old(self, other, method): field0.del_property("standard_name", None) field0.del_property("long_name", None) elif other_sn is not None: - field0.set_property("standard_name", other_sn) + field0.set_property("standard_name", other_sn, copy=False) if other_ln is None: field0.del_property("long_name", None) else: - field0.set_property("long_name", other_ln) + field0.set_property("long_name", other_ln, copy=False) elif ln is None and other_ln is not None: - field0.set_property("long_name", other_ln) + field0.set_property("long_name", other_ln, copy=False) # Warning: This code is replicated in PropertiesData new_units = field0.Units @@ -2407,13 +2408,13 @@ def _binary_operation(self, other, method): field0.del_property("standard_name", None) field0.del_property("long_name", None) elif other_sn is not None: - field0.set_property("standard_name", other_sn) + field0.set_property("standard_name", other_sn, copy=False) if other_ln is None: field0.del_property("long_name", None) else: - field0.set_property("long_name", other_ln) + field0.set_property("long_name", other_ln, copy=False) elif ln is None and other_ln is not None: - field0.set_property("long_name", other_ln) + field0.set_property("long_name", other_ln, copy=False) # Warning: This block of code is replicated in PropertiesData new_units = field0.Units @@ -2625,7 +2626,7 @@ def _set_construct_parse_axes(self, item, axes=None, allow_scalar=True): `list` """ - data = item.get_data(None) + data = item.get_data(None, set_fill_value=False) if axes is None: # -------------------------------------------------------- @@ -4800,7 +4801,7 @@ def _weights_geometry_area( if measure and spherical and aux_Z is not None: # Multiply by radius squared, accounting for any Z # coordinates, to get the actual area - z = aux_Z.get_data(None) + z = aux_Z.get_data(None, set_fill_value=False) if z is None: r = radius else: @@ -5320,7 +5321,7 @@ def _weights_measure( ) # --- End: for - clm = clm.get_data().copy() + clm = clm.get_data(set_fill_value=False).copy() if clm_axes != clm_axes0: iaxes = [clm_axes0.index(axis) for axis in clm_axes] clm.squeeze(iaxes, inplace=True) @@ -5843,7 +5844,7 @@ def Conventions(self): @Conventions.setter def Conventions(self, value): - self.set_property("Conventions", value) + self.set_property("Conventions", value, copy=False) @Conventions.deleter def Conventions(self): @@ -5876,7 +5877,7 @@ def featureType(self): @featureType.setter def featureType(self, value): - self.set_property("featureType", value) + self.set_property("featureType", value, copy=False) @featureType.deleter def featureType(self): @@ -5906,7 +5907,7 @@ def institution(self): @institution.setter def institution(self, value): - self.set_property("institution", value) + self.set_property("institution", value, copy=False) @institution.deleter def institution(self): @@ -5937,7 +5938,7 @@ def references(self): @references.setter def references(self, value): - self.set_property("references", value) + self.set_property("references", value, copy=False) @references.deleter def references(self): @@ -6003,7 +6004,7 @@ def source(self): @source.setter def source(self, value): - self.set_property("source", value) + self.set_property("source", value, copy=False) @source.deleter def source(self): @@ -6034,7 +6035,7 @@ def title(self): @title.setter def title(self, value): - self.set_property("title", value) + self.set_property("title", value, copy=False) @title.deleter def title(self): @@ -6146,7 +6147,7 @@ def cell_area( # w.override_units(radius.Units, inplace=True) # # --- End: if - w.set_property("standard_name", "cell_area") + w.set_property("standard_name", "cell_area", copy=False) return w @@ -6409,7 +6410,8 @@ def concatenate(cls, fields, axis=0, _preserve=True): return out new_data = Data.concatenate( - [f.get_data() for f in fields], axis=axis, _preserve=_preserve + [f.get_data(set_fill_value=False) + for f in fields], axis=axis, _preserve=_preserve ) # Change the domain axis size @@ -6537,7 +6539,7 @@ def cyclic(self, identity=None, iscyclic=True, period=None, **kwargs): self, "cyclic", kwargs ) # pragma: no cover - data = self.get_data(None) + data = self.get_data(None, set_fill_value=False) if data is None: return set() @@ -7675,43 +7677,42 @@ def digitize( # ------------------------------------------------------------ f.set_property( "long_name", - "Bin index to which each {!r} value belongs".format( - self.identity() - ), + f"Bin index to which each {self.identity()!r} value belongs", + copy=False ) - f.set_property("bin_bounds", bins.array.flatten()) + f.set_property("bin_bounds", bins.array.flatten(), copy=False) bin_count = bins.shape[0] if open_ends: bin_count += 2 - f.set_property("bin_count", bin_count) + f.set_property("bin_count", bin_count, copy=False) if upper: bin_interval_type = "lower: open upper: closed" else: bin_interval_type = "lower: closed upper: open" - f.set_property("bin_interval_type", bin_interval_type) + f.set_property("bin_interval_type", bin_interval_type, copy=False) standard_name = f.del_property("standard_name", None) if standard_name is not None: - f.set_property("bin_standard_name", standard_name) + f.set_property("bin_standard_name", standard_name, copy=False) else: long_name = f.del_property("long_name", None) if long_name is not None: - f.set_property("bin_long_name", long_name) + f.set_property("bin_long_name", long_name, copy=False) # --- End: if bin_units = bins.Units units = getattr(bin_units, "units", None) if units is not None: - f.set_property("bin_units", units) + f.set_property("bin_units", units, copy=False) calendar = getattr(bin_units, "calendar", None) if calendar is not None: - f.set_property("bin_calendar", calendar) + f.set_property("bin_calendar", calendar, copy=False) if return_bins: return f, bins @@ -8178,7 +8179,7 @@ def bin( True """ - logger.info(" Method: {}".format(method)) # pragma: no cover + logger.info(f" Method: {method}") # pragma: no cover if method == "integral": if weights is None: @@ -8234,12 +8235,12 @@ def bin( for f in digitized[::-1]: logger.info( - " Digitized field input : {!r}".format(f) + f" Digitized field input : {f!r}" # DCH ) # pragma: no cover f = self._conform_for_data_broadcasting(f) logger.info( - " conformed: {!r}".format(f) + f" conformed: {f!r}" # DCH ) # pragma: no cover if not self._is_broadcastable(f.shape): @@ -8285,7 +8286,8 @@ def bin( dim.long_name = bin_long_name if bin_interval_type is not None: - dim.set_property("bin_interval_type", bin_interval_type) + dim.set_property("bin_interval_type", + bin_interval_type, copy=False) # Create units for the bins units = Units(bin_units, bin_calendar) @@ -8301,9 +8303,7 @@ def bin( dim.set_bounds(self._Bounds(data=bounds_data)) logger.info( - " bins : {} {!r}".format( - dim.identity(), bounds_data - ) + f" bins : {dim.identity()} {bounds_data!r}" # DCH ) # pragma: no cover # Set domain axis and dimension coordinate for bins @@ -8358,19 +8358,19 @@ def bin( del f del y - logger.info(" Weights: {}".format(weights)) # pragma: no cover + # DCH + logger.info(f" Weights: {weights}") # pragma: no cover logger.info( - " Number of indexed ({}) bins: {}".format( - ", ".join(names), unique_indices.shape[1] - ) + f" Number of indexed ({', '.join(names)}) bins: " + f"{unique_indices.shape[1]}" ) # pragma: no cover logger.info( - " ({}) bin indices:".format(", ".join(names)) + f" ({', '.join(names)}) bin indices:" # DCH ) # pragma: no cover # Loop round unique collections of bin indices for i in zip(*unique_indices): - logger.info("{}".format(" ".join(str(i)))) + logger.info(f"{' '.join(str(i))}") b = bin_indices[0] == i[0] for a, n in zip(bin_indices[1:], i[1:]): @@ -8708,7 +8708,7 @@ def del_coordinate_reference( if key is None: return self._default( default, - "Can't identify construct from {!r}".format(identity), + f"Can't identify construct from {identity!r}", ) ref = self.del_construct(key) @@ -8727,7 +8727,7 @@ def del_coordinate_reference( c_key = self.construct(construct, key=True, default=None) if c_key is None: return self._default( - default, "Can't identify construct from {!r}".format(construct) + default, f"Can't identify construct from {construct!r}" ) for key, ref in tuple(self.coordinate_references.items()): @@ -8885,7 +8885,7 @@ def del_domain_axis( self.squeeze(dakey, inplace=True) for ckey, construct in self.constructs.filter_by_data().items(): - data = construct.get_data(None) + data = construct.get_data(None, set_fill_value=False) if data is None: continue @@ -8901,7 +8901,6 @@ def del_domain_axis( if not construct_axes: self.del_construct(ckey) - # --- End: for return domain_axis @@ -9002,7 +9001,7 @@ def get_coordinate_reference( c_key = self.construct(construct, key=True, default=None) if c_key is None: return self._default( - default, "Can't identify construct from {!r}".format(construct) + default, f"Can't identify construct from {construct!r}" ) for cr_key, ref in tuple(self.coordinate_references.items()): @@ -12415,7 +12414,7 @@ def _group_weights(weights, iaxis, index): raise ValueError( "Can't collapse: Need an unambiguous 1-d " "coordinate construct when " - "group_span={!r}".format(group_span) + f"group_span={group_span!r}" ) bounds = coord.get_bounds(None) @@ -12423,11 +12422,11 @@ def _group_weights(weights, iaxis, index): raise ValueError( "Can't collapse: Need unambiguous 1-d " "coordinate cell bounds when " - "group_span={!r}".format(group_span) + f"group_span={group_span!r}" ) - lb = bounds[0, 0].get_data() - ub = bounds[-1, 1].get_data() + lb = bounds[0, 0].get_data(set_fill_value=False) + ub = bounds[-1, 1].get_data(set_fill_value=False) if coord.T: lb = lb.datetime_array.item() ub = ub.datetime_array.item() @@ -15255,7 +15254,7 @@ def cumsum( bounds = coord.get_bounds() bounds[:, 0] = bounds[0, 0] - data = coord.get_data(None) + data = coord.get_data(None, set_fill_value=False) if coordinate is not None and data is not None: if coordinate == "mid_range": @@ -15272,13 +15271,11 @@ def cumsum( "(None, 'mid_range', 'minimum', 'maximum'). " "Got {!r}".format(coordinate) ) - # --- End: if # Add a cell method f._update_cell_methods( method="sum", domain_axes=f.domain_axes(axis_key) ) - # --- End: if return f @@ -15354,7 +15351,6 @@ def flip(self, axes=None, inplace=False, i=False, **kwargs): construct_axes.index(axis) for axis in construct_flip_axes ] construct.flip(iaxes, inplace=True) - # --- End: for return f @@ -15522,7 +15518,7 @@ def anchor( return f # --- End: if - c = dim.get_data() + c = dim.get_data(set_fill_value=False) if dim.increasing: # Adjust value so it's in the range [c[0], c[0]+period) @@ -15718,7 +15714,7 @@ def autocyclic(self, verbose=None): logger.debug(2) # pragma: no cover return False - bounds_data = bounds.get_data(None) + bounds_data = bounds.get_data(None, set_fill_value=False) if bounds_data is None: self.cyclic(key, iscyclic=False) logger.debug(3) # pragma: no cover @@ -17203,7 +17199,6 @@ def dimension_coordinate( c = self.dimension_coordinates.filter_by_axis( "exact", da_key ) - # --- End: if if key: return c.key(default=default) @@ -17733,41 +17728,42 @@ def get_data_axes(self, identity=None, default=ValueError()): :Parameters: identity: optional - Select the construct for which to return the domain axis - constructs spanned by its data. By default the field - construct is selected. May be: + Select the construct for which to return the domain + axis constructs spanned by its data. By default the + field construct is selected. May be: * The identity or key of a metadata construct. A construct identity is specified by a string (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); or a compiled regular expression - (e.g. ``re.compile('^atmosphere')``) that selects the - relevant constructs whose identities match via - `re.search`. + ``'ncvar%lat'``, etc.); or a compiled regular + expression (e.g. ``re.compile('^atmosphere')``) that + selects the relevant constructs whose identities match + via `re.search`. - Each construct has a number of identities, and is selected - if any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: + Each construct has a number of identities, and is + selected if any of them match any of those provided. A + construct's identities are those returned by its + `!identities` method. In the following example, the + construct ``x`` has six identities: >>> x.identities() ['time', 'long_name=Time', 'foo=bar', 'standard_name=time', 'ncvar%t', 'T'] A construct key may optionally have the ``'key%'`` prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. + ``'key%dimensioncoordinate2'`` are both acceptable + keys. Note that in the output of a `print` call or `!dump` method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. + identities, and so this description may always be used + as an *identity* argument. default: optional - Return the value of the *default* parameter if the data - axes have not been set. If set to an `Exception` instance - then it will be raised instead. + Return the value of the *default* parameter if the + data axes have not been set. If set to an `Exception` + instance then it will be raised instead. :Returns: @@ -18387,7 +18383,10 @@ def percentile( c = c.copy() - bounds = c.get_bounds_data(c.get_data(None)) + bounds = c.get_bounds_data( + c.get_data(None, set_fill_value=False), + _fill_value=False + ) if bounds is not None and bounds.shape[0] > 1: bounds = Data( [bounds.min().datum(), bounds.max().datum()], @@ -19332,7 +19331,7 @@ def where( data_axes = g.get_data_axes() - construct_data = construct.get_data(None) + construct_data = construct.get_data(None, set_fill_value=False) if construct_data is None: raise ValueError("{!r} has no data".format(construct)) diff --git a/cf/mixin/coordinate.py b/cf/mixin/coordinate.py index 01ef49c2fb..f2a220b698 100644 --- a/cf/mixin/coordinate.py +++ b/cf/mixin/coordinate.py @@ -1,7 +1,10 @@ +from itertools import chain + from ..decorators import ( _inplace_enabled, _inplace_enabled_define_and_cleanup, -) + _deprecated_kwarg_check, +) from ..data.data import Data @@ -21,7 +24,8 @@ def ctype(self): """The CF coordinate type. One of ``'T'``, ``'X'``, ``'Y'`` or ``'Z'`` if the coordinate - construct is for the respective CF axis type, otherwise `None`. + construct is for the respective CF axis type, otherwise + `None`. .. seealso:: `T`, `X`, `~cf.Coordinate.Y`, `Z` @@ -38,17 +42,24 @@ def ctype(self): 'T' """ - for t in ("T", "X", "Y", "Z"): - if getattr(self, t): - return t + if self.X: + return "X" + + if self.T: + return "T" + + if self.Y: + return "Y" + + if self.Z: + return "Z" @property def T(self): - """True if and only if the data are coordinates for a CF 'T' - axis. + """True if and only if the data are coordinates for a CF 'T' axis. - CF 'T' axis coordinates are defined by having one or more of the - following: + CF 'T' axis coordinates are defined by having one or more of + the following: * The `axis` property has the value ``'T'`` * Units of latitude @@ -73,17 +84,15 @@ def T(self): bounds = self.get_bounds(None) if bounds is not None: return bounds.T - # --- End: if return False @property def X(self): - """True if and only if the data are coordinates for a CF 'X' - axis. + """True if and only if the data are coordinates for a CF 'X' axis. - CF 'X' axis coordinates are defined by having one or more of the - following: + CF 'X' axis coordinates are defined by having one or more of + the following: * The `axis` property has the value ``'X'`` * Units of longitude @@ -138,17 +147,15 @@ def X(self): bounds = self.get_bounds(None) if bounds is not None: return bounds.X - # --- End: if return False @property def Y(self): - """True if and only if the data are coordinates for a CF 'Y' - axis. + """True if and only if the data are coordinates for a CF 'Y' axis. - CF 'Y' axis coordinates are defined by having one or more of the - following: + CF 'Y' axis coordinates are defined by having one or more of + the following: * The `axis` property has the value ``'Y'`` * Units of latitude @@ -190,17 +197,15 @@ def Y(self): bounds = self.get_bounds(None) if bounds is not None: return bounds.Y - # --- End: if return False @property def Z(self): - """True if and only if the data are coordinates for a CF 'Z' - axis. + """True if and only if the data are coordinates for a CF 'Z' axis. - CF 'Z' axis coordinates are defined by having one or more of the - following: + CF 'Z' axis coordinates are defined by having one or more of + the following: * The `axis` property has the value ``'Z'`` * Units of pressure, level, layer or sigma_level @@ -278,7 +283,6 @@ def Z(self): bounds = self.get_bounds(None) if bounds is not None: return bounds.Z - # --- End: if return False @@ -313,7 +317,7 @@ def axis(self): @axis.setter def axis(self, value): - self.set_property("axis", value) + self.set_property("axis", value, copy=False) @axis.deleter def axis(self): @@ -324,14 +328,14 @@ def positive(self): """The positive CF property. The direction of positive (i.e., the direction in which the - coordinate values are increasing), whether up or down, cannot in - all cases be inferred from the `units`. The direction of positive - is useful for applications displaying the data. The `positive` - attribute may have the value ``'up'`` or ``'down'`` (case - insensitive). - - For example, if ocean depth coordinates encode the depth of the - surface as 0 and the depth of 1000 meters as 1000 then the + coordinate values are increasing), whether up or down, cannot + in all cases be inferred from the `units`. The direction of + positive is useful for applications displaying the data. The + `positive` attribute may have the value ``'up'`` or ``'down'`` + (case insensitive). + + For example, if ocean depth coordinates encode the depth of + the surface as 0 and the depth of 1000 meters as 1000 then the `postive` property will have the value `'down'`. **Examples:** @@ -351,7 +355,7 @@ def positive(self): @positive.setter def positive(self, value): - self.set_property("positive", value) + self.set_property("positive", value, copy=False) self._direction = None @positive.deleter @@ -366,10 +370,10 @@ def positive(self): def autoperiod(self, inplace=False): """TODO Set dimensions to be cyclic. - TODO A dimension is set to be cyclic if it has a unique longitude (or - grid longitude) dimension coordinate construct with bounds and the - first and last bounds values differ by 360 degrees (or an - equivalent amount in other units). + TODO A dimension is set to be cyclic if it has a unique + longitude (or grid longitude) dimension coordinate construct + with bounds and the first and last bounds values differ by 360 + degrees (or an equivalent amount in other units). .. versionadded:: 3.5.0 @@ -403,5 +407,150 @@ def autoperiod(self, inplace=False): return c + @_deprecated_kwarg_check("relaxed_identity") + def identity( + self, + default="", + strict=False, + relaxed=False, + nc_only=False, + relaxed_identity=None, + _ctype=True, + ): + """Return the canonical identity. + + By default the identity is the first found of the following: + + * The "standard_name" property. + * The "id" attribute, preceded by ``'id%'``. + * The "cf_role" property, preceded by ``'cf_role='``. + * The "axis" property, preceded by ``'axis='``. + * The "long_name" property, preceded by ``'long_name='``. + * The netCDF variable name, preceded by ``'ncvar%'``. + * The coordinate type (``'X'``, ``'Y'``, ``'Z'`` or ``'T'``). + * The value of the *default* parameter. + + .. versionadded:: 3.0.0 + + .. seealso:: `id`, `identities` + + :Parameters: + + default: optional + If no identity can be found then return the value of the + default parameter. + + strict: `bool`, optional + If True then the identity is the first found of only the + "standard_name" property or the "id" attribute. + + relaxed: `bool`, optional + If True then the identity is the first found of only the + "standard_name" property, the "id" attribute, the + "long_name" property or the netCDF variable name. + + nc_only: `bool`, optional + If True then only take the identity from the netCDF + variable name. + + relaxed_identity: deprecated at version 3.0.0 + + :Returns: -# --- End: class + The identity. + + **Examples:** + + >>> f.properties() + {'foo': 'bar', + 'long_name': 'Air Temperature', + 'standard_name': 'air_temperature'} + >>> f.nc_get_variable() + 'tas' + >>> f.identity() + 'air_temperature' + >>> f.del_property('standard_name') + 'air_temperature' + >>> f.identity(default='no identity') + 'air_temperature' + >>> f.identity() + 'long_name=Air Temperature' + >>> f.del_property('long_name') + >>> f.identity() + 'ncvar%tas' + >>> f.nc_del_variable() + 'tas' + >>> f.identity() + 'ncvar%tas' + >>> f.identity() + '' + >>> f.identity(default='no identity') + 'no identity' + + """ + out = super().identity(default=None) + + if out is None: + ctype = self.ctype + if ctype is not None: + return ctype + + return default + + def identities(self, generator=False, ctype="XTYZ"): + """Return all possible identities. + + The identities comprise: + + * The "standard_name" property. + * The "id" attribute, preceded by ``'id%'``. + * The "cf_role" property, preceded by ``'cf_role='``. + * The "axis" property, preceded by ``'axis='``. + * The "long_name" property, preceded by ``'long_name='``. + * All other properties (including "standard_name"), preceded by + the property name and an ``'='``. + * The coordinate type (``'X'``, ``'Y'``, ``'Z'`` or ``'T'``). + * The netCDF variable name, preceded by ``'ncvar%'``. + + .. versionadded:: 3.0.0 + + .. seealso:: `id`, `identity` +TODO + :Returns: + + `list` + The identities. + + **Examples:** + + >>> f.properties() + {'foo': 'bar', + 'long_name': 'Air Temperature', + 'standard_name': 'air_temperature'} + >>> f.nc_get_variable() + 'tas' + >>> f.identities() + ['air_temperature', + 'long_name=Air Temperature', + 'foo=bar', + 'standard_name=air_temperature', + 'ncvar%tas'] + + """ + def _ctype_iter(self, ctype): + stop = False + for c in ctype: + if stop: + break + + if getattr(self, c): + stop = True + yield c + + identities = super().identities(generator=True) + + g = chain(identities, _ctype_iter(self, ctype)) + if generator: + return g + + return list(g) diff --git a/cf/mixin/properties.py b/cf/mixin/properties.py index a7508765f9..fc3472b11c 100644 --- a/cf/mixin/properties.py +++ b/cf/mixin/properties.py @@ -1,4 +1,4 @@ -from copy import deepcopy +from cfdm.core.functions import deepcopy from ..functions import atol as cf_atol, rtol as cf_rtol @@ -30,8 +30,8 @@ def __new__(cls, *args, **kwargs): """Store component classes. .. note:: If a child class requires a different component - classes than the ones defined here, then they must be - redefined in the child class. + classes than the ones defined here, then they must + be redefined in the child class. """ instance = super().__new__(cls) @@ -64,7 +64,7 @@ def _rtol(self): # ---------------------------------------------------------------- # Private methods # ---------------------------------------------------------------- - def _matching_values(self, value0, value1, units=False): + def _matching_values(self, value0, value1, units=False, basic=False): """Whether two values match. The definition of "match" depends on the types of *value0* and @@ -102,7 +102,7 @@ def _matching_values(self, value0, value1, units=False): if units and isinstance(value0, str): return Units(value0).equals(Units(value1)) - return self._equals(value1, value0) + return self._equals(value1, value0, basic=basic) # ---------------------------------------------------------------- # Attributes @@ -112,14 +112,16 @@ def id(self): """An identity for the {{class}} object. The `id` attribute can be used to unambiguously identify - constructs. This can be useful when identification is not possible - from the existing properties, either because they are missing or - because they do not provide sufficiently unique information. + constructs. This can be useful when identification is not + possible from the existing properties, either because they are + missing or because they do not provide sufficiently unique + information. - In general it will only be defined if explicitly set by the user. + In general it will only be defined if explicitly set by the + user. - Note that `id` is not a CF property and so is not read from, nor - written to, datasets. + Note that `id` is not a CF property and so is not read from, + nor written to, datasets. .. seealso:: `identity`, `identities`, `match_by_identity` @@ -186,7 +188,7 @@ def calendar(self): @calendar.setter def calendar(self, value): - self.set_property("calendar", value) + self.set_property("calendar", value, copy=False) @calendar.deleter def calendar(self): @@ -218,7 +220,7 @@ def comment(self): @comment.setter def comment(self, value): - self.set_property("comment", value) + self.set_property("comment", value, copy=False) @comment.deleter def comment(self): @@ -250,7 +252,7 @@ def history(self): @history.setter def history(self, value): - self.set_property("history", value) + self.set_property("history", value, copy=False) @history.deleter def history(self): @@ -283,7 +285,7 @@ def leap_month(self): @leap_month.setter def leap_month(self, value): - self.set_property("leap_month", value) + self.set_property("leap_month", value, copy=False) @leap_month.deleter def leap_month(self): @@ -350,7 +352,7 @@ def long_name(self): @long_name.setter def long_name(self, value): - self.set_property("long_name", value) + self.set_property("long_name", value, copy=False) @long_name.deleter def long_name(self): @@ -387,7 +389,7 @@ def month_lengths(self): @month_lengths.setter def month_lengths(self, value): - self.set_property("month_lengths", tuple(value)) + self.set_property("month_lengths", tuple(value), copy=False) @month_lengths.deleter def month_lengths(self): @@ -421,7 +423,7 @@ def standard_name(self): @standard_name.setter def standard_name(self, value): - self.set_property("standard_name", value) + self.set_property("standard_name", value, copy=False) @standard_name.deleter def standard_name(self): @@ -455,7 +457,7 @@ def units(self): @units.setter def units(self, value): - self.set_property("units", value) + self.set_property("units", value, copy=False) @units.deleter def units(self): @@ -555,7 +557,7 @@ def valid_range(self): @valid_range.setter def valid_range(self, value): - self.set_property("valid_range", tuple(value)) + self.set_property("valid_range", tuple(value), copy=False) @valid_range.deleter def valid_range(self): @@ -615,7 +617,6 @@ def get_property(self, prop, default=ValueError()): return getattr(self, prop) except AttributeError as err: return self._default(default, err) - # --- End: if # Still here? Then get a non-special property return super().get_property(prop, default=default) @@ -719,7 +720,6 @@ def del_property(self, prop, default=ValueError()): else: delattr(self, prop) return out - # --- End: if # Still here? Then del a non-special attribute return super().del_property(prop, default=default) @@ -782,14 +782,12 @@ def match_by_identity(self, *identities): ok = False for value0 in identities: for value1 in self_identities: - ok = self._matching_values(value0, value1) + ok = self._matching_values(value0, value1, basic=True) if ok: break - # --- End: for if ok: break - # --- End: for return ok @@ -850,10 +848,9 @@ def match_by_ncvar(self, *ncvars): ok = False for value0 in ncvars: - ok = self._matching_values(value0, ncvar) + ok = self._matching_values(value0, ncvar, basic=True) if ok: break - # --- End: for return ok @@ -958,7 +955,6 @@ def match_by_property(self, *mode, **properties): break elif not ok: break - # --- End: for return ok @@ -1004,7 +1000,6 @@ def properties(self): out.pop(prop, None) else: out[prop] = value - # --- End: for return out diff --git a/cf/mixin/propertiesdata.py b/cf/mixin/propertiesdata.py index 0aa922a0f9..4cd1ede664 100644 --- a/cf/mixin/propertiesdata.py +++ b/cf/mixin/propertiesdata.py @@ -1,4 +1,5 @@ from functools import partial as functools_partial +from itertools import chain import logging @@ -66,7 +67,7 @@ def __contains__(self, value): x.__contains__(y) <==> y in x """ - data = self.get_data(None) + data = self.get_data(None, set_fill_value=None) if data is None: return False @@ -103,12 +104,12 @@ def __setitem__(self, indices, value): x.__setitem__(indices, value) <==> x[indices] """ - data = self.get_data(None) + data = self.get_data(None, set_fill_value=None) if data is None: raise ValueError("Can't set elements when there is no data") try: - value = value.get_data() + value = value.get_data(set_fill_value=None) except AttributeError: pass @@ -569,7 +570,7 @@ def _binary_operation(self, y, method): >>> u._binary_operation(v, '__idiv__') """ - data = self.get_data(None) + data = self.get_data(None, set_fill_value=None) if data is None: raise ValueError( "Can't apply {} to a {} object with no data: {!r}".format( @@ -613,13 +614,13 @@ def _binary_operation(self, y, method): new.del_property("standard_name", None) new.del_property("long_name", None) elif other_sn is not None: - new.set_property("standard_name", other_sn) + new.set_property("standard_name", other_sn, copy=False) if other_ln is None: new.del_property("long_name", None) else: - new.set_property("long_name", other_ln) + new.set_property("long_name", other_ln, copy=False) elif ln is None and other_ln is not None: - new.set_property("long_name", other_ln) + new.set_property("long_name", other_ln, copy=False) new_units = new.Units if ( @@ -740,8 +741,8 @@ def _equivalent_data(self, other, atol=None, rtol=None, verbose=None): if not self.has_data(): return True - data0 = self.get_data() - data1 = other.get_data() + data0 = self.get_data(set_fill_value=None) + data1 = other.get_data(set_fill_value=None) if data0.shape != data1.shape: logger.info( @@ -895,7 +896,7 @@ def _unary_operation(self, method): [1 2 3 4 5] """ - data = self.get_data(None) + data = self.get_data(None, set_fill_value=None) if data is None: raise ValueError( "Can't apply {} to a {} with no data".format( @@ -912,7 +913,7 @@ def _unary_operation(self, method): def _YMDhms(self, attr): """TODO.""" - data = self.get_data(None) + data = self.get_data(None, set_fill_value=None) if data is None: raise ValueError( "ERROR: Can't get {}s when there is no data array".format(attr) @@ -923,7 +924,7 @@ def _YMDhms(self, attr): out.set_data(getattr(data, attr), copy=False) out.del_property("standard_name", None) - out.set_property("long_name", attr) + out.set_property("long_name", attr, copy=False) out.override_units(Units(), inplace=True) @@ -1065,7 +1066,7 @@ def binary_mask(self): """ out = type(self)() - out.set_propoerty("long_name", "binary_mask") + out.set_propoerty("long_name", "binary_mask", copy=False) out.set_data(self.data.binary_mask(), copy=False) return out @@ -1173,7 +1174,7 @@ def Units(self): """ - data = self.get_data(None) + data = self.get_data(None, set_fill_value=None) if data is not None: return data.Units @@ -1186,7 +1187,7 @@ def Units(self): @Units.setter def Units(self, value): - data = self.get_data(None) + data = self.get_data(None, set_fill_value=None) if data is not None: data.Units = value else: @@ -1359,7 +1360,7 @@ def mask(self): out.override_units(Units(), inplace=True) out.clear_properties() - out.set_property("long_name", "mask") + out.set_property("long_name", "mask", copy=False) out.nc_del_variable(default=None) @@ -1430,14 +1431,22 @@ def calendar(self): False """ - value = getattr(self.Units, "calendar", None) - if value is None: + try: + return self.Units.calendar + except AttributeError: raise AttributeError( - "{} doesn't have CF property 'calendar'".format( - self.__class__.__name__ - ) + f"{self.__class__.__name__} doesn't have CF property " + "'calendar'" ) - return value + +# value = getattr(self.Units, "calendar", None) +# if value is None: +# raise AttributeError( +# "{} doesn't have CF property 'calendar'".format( +# self.__class__.__name__ +# ) +# ) +# return value @calendar.setter def calendar(self, value): @@ -1599,15 +1608,20 @@ def units(self): True """ - value = getattr(self.Units, "units", None) - if value is None: + try: + return self.Units.units + except AttributeError: raise AttributeError( - "{} doesn't have CF property 'units'".format( - self.__class__.__name__ - ) + f"{self.__class__.__name__} doesn't have CF property 'units'" ) - - return value + +# value = getattr(self.Units, "units", None) +# if value is None: +# raise AttributeError( +# f"{self.__class__.__name__} doesn't have CF property 'units'" +# ) +# +# return value @units.setter def units(self, value): @@ -1693,7 +1707,7 @@ def mask_invalid(self, inplace=False, i=False): """ v = _inplace_enabled_define_and_cleanup(self) - data = v.get_data(None) + data = v.get_data(None, set_fill_value=None) if data is not None: data.mask_invalid(inplace=True) @@ -2304,7 +2318,7 @@ def dtype(self): [ 0.5 1.5 2.5] """ - data = self.get_data(None) + data = self.get_data(None, set_fill_value=None) if data is None: raise AttributeError( "{} doesn't have attribute 'dtype'".format( @@ -2317,13 +2331,13 @@ def dtype(self): @dtype.setter def dtype(self, value): # DCH - allow dtype to be set before data c.f. Units - data = self.get_data(None) + data = self.get_data(None, set_fill_value=None) if data is not None: data.dtype = value @dtype.deleter def dtype(self): - data = self.get_data(None) + data = self.get_data(None, set_fill_value=None) if data is not None: del data.dtype @@ -2349,7 +2363,7 @@ def hardmask(self): False """ - data = self.get_data(None) + data = self.get_data(None, set_fill_value=None) if data is None: raise AttributeError( "{} doesn't have attribute 'hardmask'".format( @@ -2360,8 +2374,8 @@ def hardmask(self): return data.hardmask @hardmask.setter - def hardmask(self, value): - data = self.get_data(None) + def hardmask(self, value): + data = self.get_data(None, set_fill_value=None) if data is None: raise AttributeError( "{} doesn't have attribute 'hardmask'".format( @@ -2471,7 +2485,7 @@ def isscalar(self): False """ - data = self.get_data(None) + data = self.get_data(None, set_fill_value=None) if data is None: return False @@ -2532,7 +2546,7 @@ def chunk(self, chunksize=None): `None` """ - data = self.get_data(None) + data = self.get_data(None, set_fill_value=None) if data is not None: data.chunk(chunksize) @@ -2604,7 +2618,7 @@ def close(self): >>> f.close() """ - data = self.get_data(None) + data = self.get_data(None, set_fill_value=None) if data is not None: data.close() @@ -2631,7 +2645,8 @@ def concatenate(cls, variables, axis=0, _preserve=True): out = variable0.copy() # data=False) data = Data.concatenate( - [v.get_data() for v in variables], axis=axis, _preserve=_preserve + [v.get_data(set_fill_value=None) + for v in variables], axis=axis, _preserve=_preserve ) out.set_data(data, copy=False) @@ -2767,7 +2782,7 @@ def count(self): >>> n = f.count() """ - data = self.get_data(None) + data = self.get_data(None, set_fill_value=None) if data is None: raise AttributeError("Can't count when there are data") @@ -2786,7 +2801,7 @@ def count_masked(self): >>> n = f.count_masked() """ - data = self.get_data(None) + data = self.get_data(None, set_fill_value=None) if data is None: raise AttributeError("Can't count masked when there are data") @@ -2821,7 +2836,7 @@ def cyclic(self, axes=None, iscyclic=True): {1} TODO """ - data = self.get_data(None) + data = self.get_data(None, set_fill_value=None) if data is None: return set() @@ -2918,7 +2933,7 @@ def datum(self, *index): 6 """ - data = self.get_data(None) + data = self.get_data(None, set_fill_value=None) if data is None: raise ValueError( "ERROR: Can't return an element when there is no data array" @@ -3482,7 +3497,7 @@ def match_by_naxes(self, *naxes): if not naxes: return True - data = self.get_data(None) + data = self.get_data(None, set_fill_value=None) if data is None: return False @@ -3491,7 +3506,6 @@ def match_by_naxes(self, *naxes): ok = ndim == self_ndim if ok: return True - # --- End: for return False @@ -4774,26 +4788,26 @@ def identity( n = self.nc_get_variable(None) if n is not None: - return "ncvar%{0}".format(n) + return f"ncvar%{n}" return default n = self.get_property("standard_name", None) if n is not None: - return "{0}".format(n) + return str(n) n = getattr(self, "id", None) if n is not None: - return "id%{0}".format(n) + return f"id%{n}" if relaxed: n = self.get_property("long_name", None) if n is not None: - return "long_name={0}".format(n) + return f"long_name={n}" n = self.nc_get_variable(None) if n is not None: - return "ncvar%{0}".format(n) + return f"ncvar%{n}" return default @@ -4803,22 +4817,20 @@ def identity( for prop in ("cf_role", "axis", "long_name"): n = self.get_property(prop, None) if n is not None: - return "{0}={1}".format(prop, n) - # --- End: for + return f"{prop}={n}" - if _ctype: - for ctype in ("X", "Y", "Z", "T"): - if getattr(self, ctype, False): - return ctype - # --- End: if +# if _ctype: +# for ctype in ("X", "Y", "Z", "T"): +# if getattr(self, ctype, False): +# return ctype n = self.nc_get_variable(None) if n is not None: - return "ncvar%{0}".format(n) + return f"ncvar%{n}" return default - def identities(self): + def identities(self, generator=False, **kwargs): """Return all possible identities. The identities comprise: @@ -4836,7 +4848,7 @@ def identities(self): .. versionadded:: 3.0.0 .. seealso:: `id`, `identity` - +TODO :Returns: `list` @@ -4858,33 +4870,19 @@ def identities(self): 'ncvar%tas'] """ - out = super().identities() - + id_identity = "" i = getattr(self, "id", None) if i is not None: - # Insert id attribute - i = "id%{0}".format(i) - if not out: - out = [i] - else: - out0 = out[0] - if ( - "=" in out0 - or "%" in out0 - or True in [a == out0 for a in "XYZT"] - ): - out.insert(0, i) - else: - out.insert(1, i) - # --- End: if + id_identity = ("id%" + i,) - for ctype in ("X", "Y", "Z", "T"): - if getattr(self, ctype, False): - out.append(ctype) - # --- End: for + identities = super().identities(generator=True) - return out + g = chain(id_identity, identities) + if generator: + return g + return list(g) + def inspect(self): """Inspect the object for debugging. @@ -4935,7 +4933,7 @@ def iscyclic(self, axis): return axis[0] in self.cyclic() - def get_data(self, default=ValueError()): + def get_data(self, default=ValueError(), set_fill_value=True): """Return the data. Note that a `Data` instance is returned. Use its `array` attribute @@ -4977,7 +4975,8 @@ def get_data(self, default=ValueError()): None """ - return super().get_data(default=default, _units=False) + return super().get_data(default=default, _units=False, + _fill_value=set_fill_value) @_inplace_enabled(default=False) @_manage_log_level_via_verbosity @@ -5150,7 +5149,7 @@ def override_calendar(self, calendar, inplace=False, i=False): """ v = _inplace_enabled_define_and_cleanup(self) - data = v.get_data(None) + data = v.get_data(None, set_fill_value=False) if data is not None: data.override_calendar(calendar, inplace=True) v._custom["Units"] = data.Units @@ -5220,7 +5219,7 @@ def override_units(self, units, inplace=False, i=False): units = Units(units) - data = v.get_data(None) + data = v.get_data(None, set_fill_value=False) if data is not None: data.override_units(units, inplace=True) else: @@ -5416,25 +5415,32 @@ def set_data(self, data, copy=True, inplace=True): None """ - data = self._Data(data, copy=False) + _Data = self._Data + if not isinstance(data, _Data): + data = _Data(data, copy=False) - if not data.Units: - units = getattr(self, "Units", None) + units = self.Units + data_units = data.Units + if not data_units: if units is not None: if copy: copy = False - data = data.override_units(units, inplace=False) + data = data.override_units(units) else: data.override_units(units, inplace=True) - # --- End: if - + elif units: + if units.equivalent(data_units): + if units != data_units: + if copy: + copy= False + data = data.copy() + + data.Units = units + else: + raise ValueError("Can't set data with incompatible units") + return super().set_data(data, copy=copy, inplace=inplace) - # if copy: - # data = data.copy() - # - # self._set_component('data', data, copy=False) - @_deprecated_kwarg_check("i") @_inplace_enabled(default=False) def where( @@ -5459,7 +5465,7 @@ def where( """ v = _inplace_enabled_define_and_cleanup(self) - data = v.get_data(None) + data = v.get_data(None, set_fill_value=False) if data is None: raise ValueError("ERROR: Can't set data in nonexistent data array") @@ -5478,7 +5484,7 @@ def where( condition = condition_data try: - x_data = x.get_data(None) + x_data = x.get_data(None, set_fill_value=False) except AttributeError: pass else: @@ -5492,7 +5498,7 @@ def where( x = x_data try: - y_data = y.get_data(None) + y_data = y.get_data(None, set_fill_value=False) except AttributeError: pass else: @@ -5686,9 +5692,6 @@ def select(self, *args, **kwargs): _DEPRECATION_ERROR_METHOD(self, "select") # pragma: no cover -# --- End: class - - class Subspace: """TODO.""" @@ -5716,6 +5719,3 @@ def __setitem__(self, indices, value): value = value.data self.variable[indices] = value - - -# --- End: class diff --git a/cf/mixin/propertiesdatabounds.py b/cf/mixin/propertiesdatabounds.py index 13f7ce9944..46f9399448 100644 --- a/cf/mixin/propertiesdatabounds.py +++ b/cf/mixin/propertiesdatabounds.py @@ -97,7 +97,7 @@ def __getitem__(self, indices): "{}.__getitem__: findices = {}".format(cname, findices) ) # pragma: no cover - data = self.get_data(None) + data = self.get_data(None, set_fill_value=False) if data is not None: new.set_data(data[findices], copy=False) @@ -109,7 +109,7 @@ def __getitem__(self, indices): # Subspace the bounds, if there are any bounds = self.get_bounds(None) if bounds is not None: - bounds_data = bounds.get_data(None) + bounds_data = bounds.get_data(None, set_fill_value=False) if bounds_data is not None: findices = list(findices) # if data.ndim <= 1 and not self.has_geometry(): @@ -126,7 +126,6 @@ def __getitem__(self, indices): # reverse its bounds (as per 7.1 of the # conventions) findices.append(slice(None, None, -1)) - # --- End: if if auxiliary_mask: findices[1] = [ @@ -140,7 +139,6 @@ def __getitem__(self, indices): ) # pragma: no cover new.bounds.set_data(bounds_data[tuple(findices)], copy=False) - # --- End: if # Remove the direction, as it may now be wrong new._custom.pop("direction", None) @@ -201,7 +199,6 @@ def __setitem__(self, indices, value): indices = parse_indices(self.shape, indices) indices.append(Ellipsis) bounds[tuple(indices)] = value_bounds - # --- End: if def __eq__(self, y): """The rich comparison operator ``==`` @@ -488,7 +485,6 @@ def _binary_operation(self, other, method, bounds=True): raise ValueError( "Can't combine operands with interior ring arrays" ) - # --- End: if has_bounds = self.has_bounds() @@ -557,7 +553,6 @@ def _binary_operation(self, other, method, bounds=True): other = other.insert_dimension(-1) except AttributeError: other = np.expand_dims(other, -1) - # --- End: if new_bounds = self.bounds._binary_operation(other, method) @@ -658,7 +653,6 @@ def _equivalent_data(self, other, rtol=None, atol=None, verbose=None): ) ) # pragma: no cover return False - # --- End: if # Still here? Then the data are equivalent. return True @@ -669,7 +663,7 @@ def _YMDhms(self, attr): out.del_bounds(None) return out - def _matching_values(self, value0, value1, units=False): + def _matching_values(self, value0, value1, units=False, basic=False): """Whether two values match. The definition of "match" depends on the types of *value0* and @@ -706,8 +700,7 @@ def _matching_values(self, value0, value1, units=False): try: return value0.search(value1) except (AttributeError, TypeError): - return self._equals(value1, value0) - # --- End: if + return self._equals(value1, value0, basic=basic) return False @@ -762,7 +755,6 @@ def _apply_superclass_data_oper( getattr(bounds, oper_name)( *oper_args, inplace=True, **oper_kwargs ) - # --- End: if if interior_ring: interior_ring = v.get_interior_ring(None) @@ -770,7 +762,6 @@ def _apply_superclass_data_oper( getattr(interior_ring, oper_name)( *oper_args, inplace=True, **oper_kwargs ) - # --- End: if return v @@ -803,7 +794,6 @@ def _unary_operation(self, method, bounds=True): new.set_bounds(new_bounds) else: new.del_bounds() - # --- End: if return new @@ -833,7 +823,7 @@ def cellsize(self): """ - data = self.get_bounds_data(None) + data = self.get_bounds_data(None, _fill_value=None) if data is not None: if data.shape[-1] != 2: raise ValueError( @@ -850,7 +840,6 @@ def cellsize(self): data = self.get_data(None) if data is not None: return Data.zeros(self.shape, units=self.Units) - # --- End: if raise AttributeError( "Can't get cell sizes when there are no bounds nor coordinate data" @@ -870,11 +859,11 @@ def dtype(self): >>> c.dtype = numpy.dtype('float32') """ - data = self.get_data(None) + data = self.get_data(None, set_fill_value=False) if data is not None: return data.dtype - bounds = self.get_bounds_data(None) + bounds = self.get_bounds_data(None, _fill_value=None) if bounds is not None: return bounds.dtype @@ -884,11 +873,11 @@ def dtype(self): @dtype.setter def dtype(self, value): - data = self.get_data(None) + data = self.get_data(None, set_fill_value=False) if data is not None: data.dtype = value - bounds = self.get_bounds_data(None) + bounds = self.get_bounds_data(None, _fill_value=None) if bounds is not None: bounds.dtype = value @@ -957,7 +946,6 @@ def lower_bounds(self): data = self.get_data(None) if data is not None: return data.copy() - # --- End: if raise AttributeError( "Can't get lower bounds when there are no bounds nor coordinate " @@ -983,7 +971,7 @@ def Units(self): """ # return super().Units - data = self.get_data(None) + data = self.get_data(None, set_fill_value=False) if data is not None: # Return the units of the data return data.Units @@ -995,7 +983,6 @@ def Units(self): # if data is not None: # # Return the units of the bounds data # return data.Units - # # --- End: if try: return self._custom["Units"] @@ -1009,7 +996,6 @@ def Units(self): # return bounds._custom['Units'] # except KeyError: # bounds._custom['Units'] = _units_None - # # --- End: try # return _units_None @@ -1069,7 +1055,6 @@ def upper_bounds(self): data = self.get_data(None) if data is not None: return data.copy() - # --- End: if raise AttributeError( "Can't get upper bounds when there are no bounds nor coordinate " @@ -1218,13 +1203,13 @@ def dtype(self): @dtype.setter def dtype(self, value): # DCH - allow dtype to be set before data c.f. Units - data = self.get_data(None) + data = self.get_data(None, set_fill_value=False) if data is not None: data.dtype = value @dtype.deleter def dtype(self): - data = self.get_data(None) + data = self.get_data(None, set_fill_value=False) if data is not None: del data.dtype @@ -1670,7 +1655,6 @@ def equivalent(self, other, rtol=None, atol=None, traceback=False): ) ) return False - # --- End: for # ------------------------------------------------------------ # Check the data @@ -1740,7 +1724,7 @@ def contiguous(self, overlap=True): False """ - bounds = self.get_bounds_data(None) + bounds = self.get_bounds_data(None, _fill_value=None) if bounds is None: return False @@ -1755,8 +1739,8 @@ def contiguous(self, overlap=True): if ndim == 2: if nbounds != 4: raise ValueError( - "Can't tell if {}-d cells with {} vertices " - "are contiguous".format(ndim, nbounds) + f"Can't tell if {ndim}-d cells with {nbounds} vertices " + "are contiguous" ) # Check cells (j, i) and cells (j, i+1) are contiguous @@ -1793,13 +1777,13 @@ def contiguous(self, overlap=True): if ndim > 2: raise ValueError( - "Can't tell if {}-d cells " "are contiguous".format(ndim) + f"Can't tell if {ndim}-d cells are contiguous" ) if nbounds != 2: raise ValueError( - "Can't tell if {}-d cells with {} vertices " - "are contiguous".format(ndim, nbounds) + f"Can't tell if {ndim}-d cells with {nbounds} vertices " + "are contiguous" ) lower = bounds[1:, 0] @@ -1833,37 +1817,39 @@ def convert_reference_time( ): """Convert reference time data values to have new units. - Conversion is done by decoding the reference times to date-time - objects and then re-encoding them for the new units. + Conversion is done by decoding the reference times to + date-time objects and then re-encoding them for the new units. Any conversions are possible, but this method is primarily for - conversions which require a change in the date-times originally - encoded. For example, use this method to reinterpret data values - in units of "months" since a reference time to data values in - "calendar months" since a reference time. This is often necessary - when units of "calendar months" were intended but encoded as - "months", which have special definition. See the note and examples - below for more details. - - For conversions which do not require a change in the date-times - implied by the data values, this method will be considerably - slower than a simple reassignment of the units. For example, if - the original units are ``'days since 2000-12-1'`` then ``c.Units = - cf.Units('days since 1901-1-1')`` will give the same result and be - considerably faster than ``c.convert_reference_time(cf.Units('days - since 1901-1-1'))``. - - .. note:: It is recommended that the units "year" and "month" be - used with caution, as explained in the following excerpt - from the CF conventions: "The Udunits package defines a - year to be exactly 365.242198781 days (the interval - between 2 successive passages of the sun through vernal - equinox). It is not a calendar year. Udunits includes - the following definitions for years: a common_year is - 365 days, a leap_year is 366 days, a Julian_year is - 365.25 days, and a Gregorian_year is 365.2425 days. For - similar reasons the unit ``month``, which is defined to - be exactly year/12, should also be used with caution. + conversions which require a change in the date-times + originally encoded. For example, use this method to + reinterpret data values in units of "months" since a reference + time to data values in "calendar months" since a reference + time. This is often necessary when units of "calendar months" + were intended but encoded as "months", which have special + definition. See the note and examples below for more details. + + For conversions which do not require a change in the + date-times implied by the data values, this method will be + considerably slower than a simple reassignment of the + units. For example, if the original units are ``'days since + 2000-12-1'`` then ``c.Units = cf.Units('days since + 1901-1-1')`` will give the same result and be considerably + faster than ``c.convert_reference_time(cf.Units('days since + 1901-1-1'))``. + + .. note:: It is recommended that the units "year" and "month" + be used with caution, as explained in the following + excerpt from the CF conventions: "The Udunits + package defines a year to be exactly 365.242198781 + days (the interval between 2 successive passages of + the sun through vernal equinox). It is not a + calendar year. Udunits includes the following + definitions for years: a common_year is 365 days, a + leap_year is 366 days, a Julian_year is 365.25 days, + and a Gregorian_year is 365.2425 days. For similar + reasons the unit ``month``, which is defined to be + exactly year/12, should also be used with caution. :Parameters: @@ -1873,24 +1859,24 @@ def convert_reference_time( original calendar. *Parameter example:* - If the original units are ``'months since 2000-1-1'`` in - the Gregorian calendar then the default units to convert - to are ``'days since 2000-1-1'`` in the Gregorian - calendar. + If the original units are ``'months since + 2000-1-1'`` in the Gregorian calendar then the + default units to convert to are ``'days since + 2000-1-1'`` in the Gregorian calendar. calendar_months: `bool`, optional - If True then treat units of ``'months'`` as if they were - calendar months (in whichever calendar is originally - specified), rather than a 12th of the interval between 2 - successive passages of the sun through vernal equinox - (i.e. 365.242198781/12 days). + If True then treat units of ``'months'`` as if they + were calendar months (in whichever calendar is + originally specified), rather than a 12th of the + interval between 2 successive passages of the sun + through vernal equinox (i.e. 365.242198781/12 days). calendar_years: `bool`, optional - If True then treat units of ``'years'`` as if they were - calendar years (in whichever calendar is originally - specified), rather than the interval between 2 successive - passages of the sun through vernal equinox - (i.e. 365.242198781 days). + If True then treat units of ``'years'`` as if they + were calendar years (in whichever calendar is + originally specified), rather than the interval + between 2 successive passages of the sun through + vernal equinox (i.e. 365.242198781 days). {{inplace: `bool`, optional}} @@ -1899,7 +1885,8 @@ def convert_reference_time( :Returns: `{{class}}` or `None` - The construct with converted reference time data values. + The construct with converted reference time data + values. **Examples:** @@ -1990,7 +1977,6 @@ def get_property(self, prop, default=ValueError(), bounds=False): out = self.get_bounds().get_property(prop, None) if out is not None: return out - # --- End: if return super().get_property(prop, default) @@ -2003,8 +1989,8 @@ def flatten(self, axes=None, inplace=False): The shape of the data may change, but the size will not. The flattening is executed in row-major (C-style) order. For - example, the array ``[[1, 2], [3, 4]]`` would be flattened across - both dimensions to ``[1 2 3 4]``. + example, the array ``[[1, 2], [3, 4]]`` would be flattened + across both dimensions to ``[1 2 3 4]``. .. versionadded:: 3.0.2 @@ -2013,13 +1999,15 @@ def flatten(self, axes=None, inplace=False): :Parameters: axes: (sequence of) int or str, optional - Select the axes. By default all axes are flattened. The - *axes* argument may be one, or a sequence, of: + Select the axes. By default all axes are + flattened. The *axes* argument may be one, or a + sequence, of: * An internal axis identifier. Selects this axis. - * An integer. Selects the axis corresponding to the given - position in the list of axes of the data array. + * An integer. Selects the axis corresponding to the + given position in the list of axes of the data + array. No axes are flattened if *axes* is an empty sequence. @@ -2044,9 +2032,10 @@ def flatten(self, axes=None, inplace=False): (4, 2, 3) """ - # Note the 'axes' argument can change mid-method meaning it is not - # possible to consolidate this method using a call to - # _apply_superclass_data_operations, despite mostly the same logic. + # Note the 'axes' argument can change mid-method meaning it is + # not possible to consolidate this method using a call to + # _apply_superclass_data_operations, despite mostly the same + # logic. v = _inplace_enabled_define_and_cleanup(self) super(PropertiesDataBounds, v).flatten(axes, inplace=True) @@ -2155,7 +2144,6 @@ def match_by_property(self, *mode, **properties): raise ValueError( "Positional argument, if provided, must one of 'or', 'and'" ) - # --- End: if if not properties: return True @@ -2172,7 +2160,6 @@ def match_by_property(self, *mode, **properties): break elif not ok: break - # --- End: for return ok @@ -2203,7 +2190,7 @@ def match_by_identity(self, *identities): ok = False for value0 in identities: for value1 in self_identities: - ok = self._matching_values(value0, value1) + ok = self._matching_values(value0, value1, basic=True) if ok: break @@ -2331,16 +2318,15 @@ def get_filenames(self): """ out = super().get_filenames() - data = self.get_bounds_data(None) + data = self.get_bounds_data(None, _fill_value=None) if data is not None: out.update(data.get_filenames()) interior_ring = self.get_interior_ring(None) if interior_ring is not None: - data = interior_ring.get_data(None) + data = interior_ring.get_data(None, set_fill_value=False) if data is not None: out.update(interior_ring.get_filenames()) - # --- End: if return out @@ -2638,7 +2624,7 @@ def set_bounds(self, bounds, copy=True): None """ - data = self.get_data(None) + data = self.get_data(None, set_fill_value=False) if data is not None and bounds.shape[: data.ndim] != data.shape: # Check shape @@ -3459,67 +3445,67 @@ def trunc(self, bounds=True, inplace=False, i=False): i=i, ) - def identities(self): - """Return all possible identities. - - The identities comprise: - - * The "standard_name" property. - * The "id" attribute, preceded by ``'id%'``. - * The "cf_role" property, preceded by ``'cf_role='``. - * The "axis" property, preceded by ``'axis='``. - * The "long_name" property, preceded by ``'long_name='``. - * All other properties (including "standard_name"), preceded by - the property name and an ``'='``. - * The coordinate type (``'X'``, ``'Y'``, ``'Z'`` or ``'T'``). - * The netCDF variable name, preceded by ``'ncvar%'``. - - The identities of the bounds, if present, are included (with the - exception of the bounds netCDF variable name). - - .. versionadded:: 3.0.0 - - .. seealso:: `id`, `identity` - - :Returns: - - `list` - The identities. - - **Examples:** - - >>> f.properties() - {'foo': 'bar', - 'long_name': 'Air Temperature', - 'standard_name': 'air_temperature'} - >>> f.nc_get_variable() - 'tas' - >>> f.identities() - ['air_temperature', - 'long_name=Air Temperature', - 'foo=bar', - 'standard_name=air_temperature', - 'ncvar%tas'] - - >>> f.properties() - {} - >>> f.bounds.properties() - {'axis': 'Z', - 'units': 'm'} - >>> f.identities() - ['axis=Z', 'units=m', 'ncvar%z'] - - """ - identities = super().identities() - - bounds = self.get_bounds(None) - if bounds is not None: - identities.extend( - [i for i in bounds.identities() if i not in identities] - ) - # TODO ncvar AND? - - return identities +# def identities(self, generator=False): +# """Return all possible identities. +# +# The identities comprise: +# +# * The "standard_name" property. +# * The "id" attribute, preceded by ``'id%'``. +# * The "cf_role" property, preceded by ``'cf_role='``. +# * The "axis" property, preceded by ``'axis='``. +# * The "long_name" property, preceded by ``'long_name='``. +# * All other properties (including "standard_name"), preceded by +# the property name and an ``'='``. +# * The coordinate type (``'X'``, ``'Y'``, ``'Z'`` or ``'T'``). +# * The netCDF variable name, preceded by ``'ncvar%'``. +# +# The identities of the bounds, if present, are included (with the +# exception of the bounds netCDF variable name). +# +# .. versionadded:: 3.0.0 +# +# .. seealso:: `id`, `identity` +#ODO +# :Returns: +# +# `list` +# The identities. +# +# **Examples:** +# +# >>> f.properties() +# {'foo': 'bar', +# 'long_name': 'Air Temperature', +# 'standard_name': 'air_temperature'} +# >>> f.nc_get_variable() +# 'tas' +# >>> f.identities() +# ['air_temperature', +# 'long_name=Air Temperature', +# 'foo=bar', +# 'standard_name=air_temperature', +# 'ncvar%tas'] +# +# >>> f.properties() +# {} +# >>> f.bounds.properties() +# {'axis': 'Z', +# 'units': 'm'} +# >>> f.identities() +# ['axis=Z', 'units=m', 'ncvar%z'] +# +# """ +# identities = super().identities() +# +# bounds = self.get_bounds(None) +# if bounds is not None: +# identities.extend( +# [i for i in bounds.identities() if i not in identities] +# ) +# # TODO ncvar AND? +# +# return identities @_deprecated_kwarg_check("relaxed_identity") def identity( @@ -3631,7 +3617,6 @@ def identity( if out is not None and not out.startswith("ncvar%"): return out - # --- End: if return default diff --git a/cf/read_write/read.py b/cf/read_write/read.py index 145de1d415..79f578adf9 100644 --- a/cf/read_write/read.py +++ b/cf/read_write/read.py @@ -736,7 +736,7 @@ def read( for f in field_list: standard_name = f._custom.get("standard_name", None) if standard_name is not None: - f.set_property("standard_name", standard_name) + f.set_property("standard_name", standard_name, copy=False) del f._custom["standard_name"] # --- End: for diff --git a/cf/read_write/um/umread.py b/cf/read_write/um/umread.py index b4589daf43..b7e2e1f9a9 100644 --- a/cf/read_write/um/umread.py +++ b/cf/read_write/um/umread.py @@ -902,7 +902,6 @@ def __init__( if LBVC in (2, 9, 65) or LBLEV in (7777, 8888): # CHECK! self.LBLEV = LBLEV c = self.model_level_number_coordinate(aux=bool(c)) - # --- End: if # -------------------------------------------------------- # Create the 'Y' dimension coordinate @@ -930,6 +929,7 @@ def __init__( # -------------------------------------------------------- axiscode = ix xc = None + xkey = None if axiscode is not None: if axiscode in (20, 23): # X axis is time since reference date @@ -1061,12 +1061,12 @@ def __init__( if down_axes: field.flip(down_axes, inplace=True) - # Force cyclic X axis for paritcular values of LBHEM - if int_hdr[lbhem] in (0, 1, 2, 4): - field.cyclic("X", period=360) + # Force cyclic X axis for particular values of LBHEM + if xkey is not None and int_hdr[lbhem] in (0, 1, 2, 4): +# field.cyclic("X", period=360) + field.cyclic(xkey, period=360) self.fields.append(field) - # --- End: for self._bool = True @@ -1177,7 +1177,7 @@ def atmosphere_hybrid_height_coordinate(self, axiscode): ac = self.coord_data(ac, array, bounds, units=_Units["m"]) ac.id = "UM_atmosphere_hybrid_height_coordinate_a" self.implementation.set_properties( - ac, {"long_name": "height based hybrid coeffient a"} + ac, {"long_name": "height based hybrid coeffient a"}, copy=False ) key_a = self.implementation.set_domain_ancillary( field, ac, axes=[_axis["z"]], copy=False @@ -1195,7 +1195,8 @@ def atmosphere_hybrid_height_coordinate(self, axiscode): dc = self.implementation.initialise_DimensionCoordinate() dc = self.coord_data(dc, array, bounds, units=_Units[""]) self.implementation.set_properties( - dc, {"standard_name": "atmosphere_hybrid_height_coordinate"} + dc, {"standard_name": "atmosphere_hybrid_height_coordinate"}, + copy=False ) dc = self.coord_axis(dc, axiscode) dc = self.coord_positive(dc, axiscode, _axis["z"]) @@ -1219,7 +1220,8 @@ def atmosphere_hybrid_height_coordinate(self, axiscode): ac = self.coord_data(ac, array, bounds, units=_Units["1"]) ac.id = "UM_atmosphere_hybrid_height_coordinate_b" self.implementation.set_properties( - ac, {"long_name": "height based hybrid coeffient b"} + ac, {"long_name": "height based hybrid coeffient b"}, + copy=False ) key_b = self.implementation.set_domain_ancillary( field, ac, axes=[_axis["z"]], copy=False @@ -1561,12 +1563,12 @@ def coord_names(self, coord, axiscode): standard_name = _coord_standard_name.setdefault(axiscode, None) if standard_name is not None: - coord.set_property("standard_name", standard_name) + coord.set_property("standard_name", standard_name, copy=False) coord.ncvar = standard_name else: long_name = _coord_long_name.setdefault(axiscode, None) if long_name is not None: - coord.long_name = long_name + coord.set_property("long_name", long_name, copy=False) return coord @@ -2337,7 +2339,9 @@ def pseudolevel_coordinate(self, LBUSER5): dc = self.coord_data( dc, array, units=_axiscode_to_Units.setdefault(axiscode, None) ) - self.implementation.set_properties(dc, {"long_name": "pseudolevel"}) + self.implementation.set_properties( + dc, {"long_name": "pseudolevel"}, copy=False + ) dc.id = "UM_pseudolevel" da = self.implementation.initialise_DomainAxis(size=array.size) From b2ecbe046fd481d05baa3c4f91008e6e69e1c8ef Mon Sep 17 00:00:00 2001 From: David Hassell Date: Tue, 23 Mar 2021 20:40:18 +0000 Subject: [PATCH 02/53] dev --- cf/mixin/propertiesdata.py | 65 +++++++++++++++++++------------------- 1 file changed, 33 insertions(+), 32 deletions(-) diff --git a/cf/mixin/propertiesdata.py b/cf/mixin/propertiesdata.py index 4cd1ede664..82b1ab7cf6 100644 --- a/cf/mixin/propertiesdata.py +++ b/cf/mixin/propertiesdata.py @@ -67,7 +67,7 @@ def __contains__(self, value): x.__contains__(y) <==> y in x """ - data = self.get_data(None, set_fill_value=None) + data = self.get_data(None, _fill_value=None) if data is None: return False @@ -104,12 +104,12 @@ def __setitem__(self, indices, value): x.__setitem__(indices, value) <==> x[indices] """ - data = self.get_data(None, set_fill_value=None) + data = self.get_data(None, _fill_value=None) if data is None: raise ValueError("Can't set elements when there is no data") try: - value = value.get_data(set_fill_value=None) + value = value.get_data(_fill_value=None) except AttributeError: pass @@ -570,7 +570,7 @@ def _binary_operation(self, y, method): >>> u._binary_operation(v, '__idiv__') """ - data = self.get_data(None, set_fill_value=None) + data = self.get_data(None, _fill_value=None) if data is None: raise ValueError( "Can't apply {} to a {} object with no data: {!r}".format( @@ -741,8 +741,8 @@ def _equivalent_data(self, other, atol=None, rtol=None, verbose=None): if not self.has_data(): return True - data0 = self.get_data(set_fill_value=None) - data1 = other.get_data(set_fill_value=None) + data0 = self.get_data(_fill_value=False) + data1 = other.get_data(_fill_value=False) if data0.shape != data1.shape: logger.info( @@ -896,7 +896,7 @@ def _unary_operation(self, method): [1 2 3 4 5] """ - data = self.get_data(None, set_fill_value=None) + data = self.get_data(None, _fill_value=False) if data is None: raise ValueError( "Can't apply {} to a {} with no data".format( @@ -913,7 +913,7 @@ def _unary_operation(self, method): def _YMDhms(self, attr): """TODO.""" - data = self.get_data(None, set_fill_value=None) + data = self.get_data(None, _fill_value=False) if data is None: raise ValueError( "ERROR: Can't get {}s when there is no data array".format(attr) @@ -1174,7 +1174,7 @@ def Units(self): """ - data = self.get_data(None, set_fill_value=None) + data = self.get_data(None, _fill_value=False) if data is not None: return data.Units @@ -1187,7 +1187,7 @@ def Units(self): @Units.setter def Units(self, value): - data = self.get_data(None, set_fill_value=None) + data = self.get_data(None, _fill_value=False) if data is not None: data.Units = value else: @@ -1707,7 +1707,7 @@ def mask_invalid(self, inplace=False, i=False): """ v = _inplace_enabled_define_and_cleanup(self) - data = v.get_data(None, set_fill_value=None) + data = v.get_data(None, _fill_value=False) if data is not None: data.mask_invalid(inplace=True) @@ -2318,7 +2318,7 @@ def dtype(self): [ 0.5 1.5 2.5] """ - data = self.get_data(None, set_fill_value=None) + data = self.get_data(None, _fill_value=False) if data is None: raise AttributeError( "{} doesn't have attribute 'dtype'".format( @@ -2331,13 +2331,13 @@ def dtype(self): @dtype.setter def dtype(self, value): # DCH - allow dtype to be set before data c.f. Units - data = self.get_data(None, set_fill_value=None) + data = self.get_data(None, _fill_value=False) if data is not None: data.dtype = value @dtype.deleter def dtype(self): - data = self.get_data(None, set_fill_value=None) + data = self.get_data(None, _fill_value=False) if data is not None: del data.dtype @@ -2363,7 +2363,7 @@ def hardmask(self): False """ - data = self.get_data(None, set_fill_value=None) + data = self.get_data(None, _fill_value=False) if data is None: raise AttributeError( "{} doesn't have attribute 'hardmask'".format( @@ -2375,7 +2375,7 @@ def hardmask(self): @hardmask.setter def hardmask(self, value): - data = self.get_data(None, set_fill_value=None) + data = self.get_data(None, _fill_value=False) if data is None: raise AttributeError( "{} doesn't have attribute 'hardmask'".format( @@ -2485,7 +2485,7 @@ def isscalar(self): False """ - data = self.get_data(None, set_fill_value=None) + data = self.get_data(None, _fill_value=False) if data is None: return False @@ -2546,7 +2546,7 @@ def chunk(self, chunksize=None): `None` """ - data = self.get_data(None, set_fill_value=None) + data = self.get_data(None, _fill_value=False) if data is not None: data.chunk(chunksize) @@ -2618,7 +2618,7 @@ def close(self): >>> f.close() """ - data = self.get_data(None, set_fill_value=None) + data = self.get_data(None, _fill_value=False) if data is not None: data.close() @@ -2645,7 +2645,7 @@ def concatenate(cls, variables, axis=0, _preserve=True): out = variable0.copy() # data=False) data = Data.concatenate( - [v.get_data(set_fill_value=None) + [v.get_data(_fill_value=False) for v in variables], axis=axis, _preserve=_preserve ) out.set_data(data, copy=False) @@ -2782,7 +2782,7 @@ def count(self): >>> n = f.count() """ - data = self.get_data(None, set_fill_value=None) + data = self.get_data(None, _fill_value=False) if data is None: raise AttributeError("Can't count when there are data") @@ -2801,7 +2801,7 @@ def count_masked(self): >>> n = f.count_masked() """ - data = self.get_data(None, set_fill_value=None) + data = self.get_data(None, _fill_value=False) if data is None: raise AttributeError("Can't count masked when there are data") @@ -2836,7 +2836,7 @@ def cyclic(self, axes=None, iscyclic=True): {1} TODO """ - data = self.get_data(None, set_fill_value=None) + data = self.get_data(None, _fill_value=False) if data is None: return set() @@ -2933,7 +2933,7 @@ def datum(self, *index): 6 """ - data = self.get_data(None, set_fill_value=None) + data = self.get_data(None, _fill_value=False) if data is None: raise ValueError( "ERROR: Can't return an element when there is no data array" @@ -3497,7 +3497,7 @@ def match_by_naxes(self, *naxes): if not naxes: return True - data = self.get_data(None, set_fill_value=None) + data = self.get_data(None, _fill_value=False) if data is None: return False @@ -4933,7 +4933,8 @@ def iscyclic(self, axis): return axis[0] in self.cyclic() - def get_data(self, default=ValueError(), set_fill_value=True): + def get_data(self, default=ValueError(), _units=None, + _fill_value=True): """Return the data. Note that a `Data` instance is returned. Use its `array` attribute @@ -4976,7 +4977,7 @@ def get_data(self, default=ValueError(), set_fill_value=True): """ return super().get_data(default=default, _units=False, - _fill_value=set_fill_value) + _fill_value=_fill_value) @_inplace_enabled(default=False) @_manage_log_level_via_verbosity @@ -5149,7 +5150,7 @@ def override_calendar(self, calendar, inplace=False, i=False): """ v = _inplace_enabled_define_and_cleanup(self) - data = v.get_data(None, set_fill_value=False) + data = v.get_data(None, _fill_value=False) if data is not None: data.override_calendar(calendar, inplace=True) v._custom["Units"] = data.Units @@ -5219,7 +5220,7 @@ def override_units(self, units, inplace=False, i=False): units = Units(units) - data = v.get_data(None, set_fill_value=False) + data = v.get_data(None, _fill_value=False) if data is not None: data.override_units(units, inplace=True) else: @@ -5465,7 +5466,7 @@ def where( """ v = _inplace_enabled_define_and_cleanup(self) - data = v.get_data(None, set_fill_value=False) + data = v.get_data(None, _fill_value=False) if data is None: raise ValueError("ERROR: Can't set data in nonexistent data array") @@ -5484,7 +5485,7 @@ def where( condition = condition_data try: - x_data = x.get_data(None, set_fill_value=False) + x_data = x.get_data(None, _fill_value=False) except AttributeError: pass else: @@ -5498,7 +5499,7 @@ def where( x = x_data try: - y_data = y.get_data(None, set_fill_value=False) + y_data = y.get_data(None, _fill_value=False) except AttributeError: pass else: From 249f39cbfd1959c3156707b3eb445f66769607c5 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Tue, 23 Mar 2021 23:30:19 +0000 Subject: [PATCH 03/53] dev --- cf/aggregate.py | 33 +- cf/bounds.py | 89 ++--- cf/constructs.py | 8 +- cf/coordinatereference.py | 22 +- cf/data/data.py | 2 +- cf/dimensioncoordinate.py | 4 +- cf/field.py | 45 ++- cf/mixin/coordinate.py | 82 ++-- cf/mixin/propertiesdata.py | 598 ++++++++++++---------------- cf/mixin/propertiesdatabounds.py | 150 ++++--- cf/read_write/um/umread.py | 10 +- cf/test/test_CoordinateReference.py | 13 +- 12 files changed, 481 insertions(+), 575 deletions(-) diff --git a/cf/aggregate.py b/cf/aggregate.py index 4794bc97b9..00632bdf46 100644 --- a/cf/aggregate.py +++ b/cf/aggregate.py @@ -1938,6 +1938,7 @@ def aggregate( aggregating_axes = [] axis_items = meta[0].axis.items() for axis in axes: + # TODO IMPORTANT: should this be filter_by_axis ???? coords = meta[0].field.coordinates.filter_by_identity( "exact", axis ) @@ -3202,8 +3203,8 @@ def _aggregate_2_fields( if construct0.has_bounds(): data = Data.concatenate( ( - construct0.bounds.get_data(), - construct1.bounds.get_data(), + construct0.bounds.get_data(_fill_value=False), + construct1.bounds.get_data(_fill_value=False), ), axis, _preserve=False, @@ -3212,7 +3213,10 @@ def _aggregate_2_fields( else: # The fields are decreasing along the aggregating axis data = Data.concatenate( - (construct1.get_data(), construct0.get_data()), + ( + construct1.get_data(_fill_value=False), + construct0.get_data(_fill_value=False), + ), axis, _preserve=False, ) @@ -3220,8 +3224,8 @@ def _aggregate_2_fields( if construct0.has_bounds(): data = Data.concatenate( ( - construct1.bounds.get_data(), - construct0.bounds.get_data(), + construct1.bounds.get_data(_fill_value=False), + construct0.bounds.get_data(_fill_value=False), ), axis, _preserve=False, @@ -3273,12 +3277,22 @@ def _aggregate_2_fields( if direction0: # The fields are increasing along the aggregating axis data = Data.concatenate( - (field0.get_data(), field1.get_data()), axis, _preserve=False + ( + field0.get_data(_fill_value=False), + field1.get_data(_fill_value=False), + ), + axis, + _preserve=False, ) else: # The fields are decreasing along the aggregating axis data = Data.concatenate( - (field1.get_data(), field0.get_data()), axis, _preserve=False + ( + field1.get_data(_fill_value=False), + field0.get_data(_fill_value=False), + ), + axis, + _preserve=False, ) # Update the size of the aggregating axis in field0 @@ -3326,8 +3340,9 @@ def _aggregate_2_fields( prop, f"{value0} :AGGREGATED: {value1}", copy=False ) else: - field0.set_property(prop, " :AGGREGATED: {value1}", - copy=False) + field0.set_property( + prop, " :AGGREGATED: " + value1, copy=False + ) else: if value0 is not None: field0.del_property(prop) diff --git a/cf/bounds.py b/cf/bounds.py index 0e4e9761e2..2ea9b2fbd1 100644 --- a/cf/bounds.py +++ b/cf/bounds.py @@ -48,65 +48,45 @@ def __repr__(self): def contiguous(self, overlap=True, direction=None, period=None, verbose=1): """Return True if the bounds are contiguous. - Bounds are contiguous if the cell boundaries match up, or overlap, - with the boundaries of adjacent cells. + Bounds are contiguous if the cell boundaries match up, or + overlap, with the boundaries of adjacent cells. - In general, it is only possible for 1 or 0 variable dimensional - variables with bounds to be contiguous, but size 1 variables with - any number of dimensions are always contiguous. + In general, it is only possible for 1 or 0 variable + dimensional variables with bounds to be contiguous, but size + 1 variables with any number of dimensions are always + contiguous. - An exception is raised if the variable is multidimensional and has - more than one element. + An exception is raised if the variable is multidimensional + and has more than one element. .. versionadded:: 2.0 :Parameters: overlap: `bool`, optional - If False then 1-d cells with two bounds vertices are not - considered contiguous if any adjacent cells overlap each - other. By default such cells are considered contiguous. + If False then 1-d cells with two bounds vertices are + not considered contiguous if any adjacent cells + overlap each other. By default such cells are + considered contiguous. direction: - Specify the direction of 1-d coordinates with two bounds - vertices. Either True for increasing coordinates, or False - for decreasing coordinates. By default the direction is - inferred from whether the first bound of the first cell is - less than its second bound (direction is True), or not - (direction is False). + Specify the direction of 1-d coordinates with two + bounds vertices. Either True for increasing + coordinates, or False for decreasing coordinates. By + default the direction is inferred from whether the + first bound of the first cell is less than its second + bound (direction is True), or not (direction is + False). period: optional - Define the period of cyclic values so that the test for - contiguousness can be carried out with modulo + Define the period of cyclic values so that the test + for contiguousness can be carried out with modulo arithmetic. By default the data are assumed to be - non-cyclic, unless the bounds have units of longitude (or - have units of ``'degrees'``), in which case a period of - 360 degrees is assumed. - - verbose: `int` or `str` or `None`, optional - If an integer from ``-1`` to ``3``, or an equivalent string - equal ignoring case to one of: - - * ``'DISABLE'`` (``0``) - * ``'WARNING'`` (``1``) - * ``'INFO'`` (``2``) - * ``'DETAIL'`` (``3``) - * ``'DEBUG'`` (``-1``) - - set for the duration of the method call only as the minimum - cut-off for the verboseness level of displayed output (log) - messages, regardless of the globally-configured `cf.log_level`. - Note that increasing numerical value corresponds to increasing - verbosity, with the exception of ``-1`` as a special case of - maximal and extreme verbosity. - - Otherwise, if `None` (the default value), output messages will - be shown according to the value of the `cf.log_level` setting. - - Overall, the higher a non-negative integer or equivalent string - that is set (up to a maximum of ``3``/``'DETAIL'``) for - increasing verbosity, the more description that is printed to - convey information about the test for contiguity. + non-cyclic, unless the bounds have units of longitude + (or have units of ``'degrees'``), in which case a + period of 360 degrees is assumed. + + {{verbose: `int` or `str` or `None`, optional}} :Returns: @@ -137,7 +117,7 @@ def contiguous(self, overlap=True, direction=None, period=None, verbose=1): False """ - data = self.get_data(None, set_fill_value=False) + data = self.get_data(None, _fill_value=False) if data is None: return False @@ -152,15 +132,15 @@ def contiguous(self, overlap=True, direction=None, period=None, verbose=1): period = Data(360.0, "degrees_east") elif self.Units.equals(Units("degrees")): period = Data(360.0, "degrees") - # --- End: if + if verbose >= 2: - print("Period = {!r}".format(period)) + print(f"Period = {period!r}") if ndim == 2: if nbounds != 4: raise ValueError( - "Can't tell if {}-d cells with {} vertices " - "are contiguous".format(ndim, nbounds) + f"Can't tell if {ndim}-d cells with {nbounds} vertices " + "are contiguous" ) # -------------------------------------------------------- @@ -212,19 +192,18 @@ def contiguous(self, overlap=True, direction=None, period=None, verbose=1): # if (bnd[j, i, 3] != bnd[j+1, i, 0] or # bnd[j, i, 2] != bnd[j+1, i, 1]): # return False - # # --- End: for return True if ndim > 2: raise ValueError( - "Can't tell if {}-d cells " "are contiguous".format(ndim) + f"Can't tell if {ndim}-d cells are contiguous" ) if nbounds != 2: raise ValueError( - "Can't tell if {}-d cells with {} vertices " - "are contiguous".format(ndim, nbounds) + f"Can't tell if {ndim}-d cells with {nbounds} vertices " + "are contiguous" ) if not overlap: diff --git a/cf/constructs.py b/cf/constructs.py index 858cb204a2..79706f2cc0 100644 --- a/cf/constructs.py +++ b/cf/constructs.py @@ -45,8 +45,7 @@ def __repr__(self): """ return super().__repr__().replace("<", " 1: @@ -860,7 +860,7 @@ def roll(self, axis, shift, inplace=False, i=False): if bounds_data is not None: b.dtype = numpy_result_type(bounds_data.dtype, period.dtype) - bounds_data = b.get_data(None, set_fill_value=None) + bounds_data = b.get_data(None, _fill_value=False) if direction: # Increasing diff --git a/cf/field.py b/cf/field.py index 766cef1097..748bc60b71 100644 --- a/cf/field.py +++ b/cf/field.py @@ -607,7 +607,7 @@ def __setitem__(self, indices, value): # self.__class__.__name__)) try: - data = value.get_data(None, set_fill_value=False) + data = value.get_data(None, _fill_value=False) except AttributeError: pass else: @@ -620,7 +620,7 @@ def __setitem__(self, indices, value): value = data - data = self.get_data(set_fill_value=False) + data = self.get_data(_fill_value=False) data[indices] = value def analyse_items(self, relaxed_identities=None): @@ -2626,7 +2626,7 @@ def _set_construct_parse_axes(self, item, axes=None, allow_scalar=True): `list` """ - data = item.get_data(None, set_fill_value=False) + data = item.get_data(None, _fill_value=False) if axes is None: # -------------------------------------------------------- @@ -4801,7 +4801,7 @@ def _weights_geometry_area( if measure and spherical and aux_Z is not None: # Multiply by radius squared, accounting for any Z # coordinates, to get the actual area - z = aux_Z.get_data(None, set_fill_value=False) + z = aux_Z.get_data(None, _fill_value=False) if z is None: r = radius else: @@ -5321,7 +5321,7 @@ def _weights_measure( ) # --- End: for - clm = clm.get_data(set_fill_value=False).copy() + clm = clm.get_data(_fill_value=False).copy() if clm_axes != clm_axes0: iaxes = [clm_axes0.index(axis) for axis in clm_axes] clm.squeeze(iaxes, inplace=True) @@ -6410,8 +6410,9 @@ def concatenate(cls, fields, axis=0, _preserve=True): return out new_data = Data.concatenate( - [f.get_data(set_fill_value=False) - for f in fields], axis=axis, _preserve=_preserve + [f.get_data(_fill_value=False) for f in fields], + axis=axis, + _preserve=_preserve, ) # Change the domain axis size @@ -6539,7 +6540,7 @@ def cyclic(self, identity=None, iscyclic=True, period=None, **kwargs): self, "cyclic", kwargs ) # pragma: no cover - data = self.get_data(None, set_fill_value=False) + data = self.get_data(None, _fill_value=False) if data is None: return set() @@ -7678,7 +7679,7 @@ def digitize( f.set_property( "long_name", f"Bin index to which each {self.identity()!r} value belongs", - copy=False + copy=False, ) f.set_property("bin_bounds", bins.array.flatten(), copy=False) @@ -8235,7 +8236,7 @@ def bin( for f in digitized[::-1]: logger.info( - f" Digitized field input : {f!r}" # DCH + f" Digitized field input : {f!r}" # DCH ) # pragma: no cover f = self._conform_for_data_broadcasting(f) @@ -8286,8 +8287,9 @@ def bin( dim.long_name = bin_long_name if bin_interval_type is not None: - dim.set_property("bin_interval_type", - bin_interval_type, copy=False) + dim.set_property( + "bin_interval_type", bin_interval_type, copy=False + ) # Create units for the bins units = Units(bin_units, bin_calendar) @@ -8358,7 +8360,7 @@ def bin( del f del y - # DCH + # DCH logger.info(f" Weights: {weights}") # pragma: no cover logger.info( f" Number of indexed ({', '.join(names)}) bins: " @@ -8885,7 +8887,7 @@ def del_domain_axis( self.squeeze(dakey, inplace=True) for ckey, construct in self.constructs.filter_by_data().items(): - data = construct.get_data(None, set_fill_value=False) + data = construct.get_data(None, _fill_value=False) if data is None: continue @@ -12425,8 +12427,8 @@ def _group_weights(weights, iaxis, index): f"group_span={group_span!r}" ) - lb = bounds[0, 0].get_data(set_fill_value=False) - ub = bounds[-1, 1].get_data(set_fill_value=False) + lb = bounds[0, 0].get_data(_fill_value=False) + ub = bounds[-1, 1].get_data(_fill_value=False) if coord.T: lb = lb.datetime_array.item() ub = ub.datetime_array.item() @@ -15254,7 +15256,7 @@ def cumsum( bounds = coord.get_bounds() bounds[:, 0] = bounds[0, 0] - data = coord.get_data(None, set_fill_value=False) + data = coord.get_data(None, _fill_value=False) if coordinate is not None and data is not None: if coordinate == "mid_range": @@ -15518,7 +15520,7 @@ def anchor( return f # --- End: if - c = dim.get_data(set_fill_value=False) + c = dim.get_data(_fill_value=False) if dim.increasing: # Adjust value so it's in the range [c[0], c[0]+period) @@ -15714,7 +15716,7 @@ def autocyclic(self, verbose=None): logger.debug(2) # pragma: no cover return False - bounds_data = bounds.get_data(None, set_fill_value=False) + bounds_data = bounds.get_data(None, _fill_value=False) if bounds_data is None: self.cyclic(key, iscyclic=False) logger.debug(3) # pragma: no cover @@ -18384,8 +18386,7 @@ def percentile( c = c.copy() bounds = c.get_bounds_data( - c.get_data(None, set_fill_value=False), - _fill_value=False + c.get_data(None, _fill_value=False), _fill_value=False ) if bounds is not None and bounds.shape[0] > 1: bounds = Data( @@ -19331,7 +19332,7 @@ def where( data_axes = g.get_data_axes() - construct_data = construct.get_data(None, set_fill_value=False) + construct_data = construct.get_data(None, _fill_value=False) if construct_data is None: raise ValueError("{!r} has no data".format(construct)) diff --git a/cf/mixin/coordinate.py b/cf/mixin/coordinate.py index f2a220b698..6551d4172a 100644 --- a/cf/mixin/coordinate.py +++ b/cf/mixin/coordinate.py @@ -4,7 +4,7 @@ _inplace_enabled, _inplace_enabled_define_and_cleanup, _deprecated_kwarg_check, -) +) from ..data.data import Data @@ -489,64 +489,66 @@ def identity( """ out = super().identity(default=None) - - if out is None: - ctype = self.ctype - if ctype is not None: - return ctype + if out is not None: + return out + + ctype = self.ctype + if ctype is not None: + return ctype return default def identities(self, generator=False, ctype="XTYZ"): """Return all possible identities. - The identities comprise: - - * The "standard_name" property. - * The "id" attribute, preceded by ``'id%'``. - * The "cf_role" property, preceded by ``'cf_role='``. - * The "axis" property, preceded by ``'axis='``. - * The "long_name" property, preceded by ``'long_name='``. - * All other properties (including "standard_name"), preceded by - the property name and an ``'='``. - * The coordinate type (``'X'``, ``'Y'``, ``'Z'`` or ``'T'``). - * The netCDF variable name, preceded by ``'ncvar%'``. - - .. versionadded:: 3.0.0 - - .. seealso:: `id`, `identity` -TODO - :Returns: + The identities comprise: - `list` - The identities. + * The "standard_name" property. + * The "id" attribute, preceded by ``'id%'``. + * The "cf_role" property, preceded by ``'cf_role='``. + * The "axis" property, preceded by ``'axis='``. + * The "long_name" property, preceded by ``'long_name='``. + * All other properties (including "standard_name"), preceded by + the property name and an ``'='``. + * The coordinate type (``'X'``, ``'Y'``, ``'Z'`` or ``'T'``). + * The netCDF variable name, preceded by ``'ncvar%'``. - **Examples:** + .. versionadded:: 3.0.0 - >>> f.properties() - {'foo': 'bar', - 'long_name': 'Air Temperature', - 'standard_name': 'air_temperature'} - >>> f.nc_get_variable() - 'tas' - >>> f.identities() - ['air_temperature', - 'long_name=Air Temperature', - 'foo=bar', - 'standard_name=air_temperature', - 'ncvar%tas'] + .. seealso:: `id`, `identity` + TODO + :Returns: + + `list` + The identities. + + **Examples:** + + >>> f.properties() + {'foo': 'bar', + 'long_name': 'Air Temperature', + 'standard_name': 'air_temperature'} + >>> f.nc_get_variable() + 'tas' + >>> f.identities() + ['air_temperature', + 'long_name=Air Temperature', + 'foo=bar', + 'standard_name=air_temperature', + 'ncvar%tas'] """ + def _ctype_iter(self, ctype): stop = False for c in ctype: if stop: break - + if getattr(self, c): stop = True yield c - + identities = super().identities(generator=True) g = chain(identities, _ctype_iter(self, ctype)) diff --git a/cf/mixin/propertiesdata.py b/cf/mixin/propertiesdata.py index 82b1ab7cf6..7c58d29715 100644 --- a/cf/mixin/propertiesdata.py +++ b/cf/mixin/propertiesdata.py @@ -59,7 +59,7 @@ def __array__(self, *dtype): if data is not None: return data.__array__(*dtype) - raise ValueError("{} has no data".format(self.__class__.__name__)) + raise ValueError(f"{self.__class__.__name__} has no data") def __contains__(self, value): """Called to implement membership test operators. @@ -96,7 +96,7 @@ def __data__(self): if data is not None: return data - raise ValueError("{} has no data".format(self.__class__.__name__)) + raise ValueError(f"{self.__class__.__name__} has no data") def __setitem__(self, indices, value): """Called to implement assignment to x[indices] @@ -573,9 +573,8 @@ def _binary_operation(self, y, method): data = self.get_data(None, _fill_value=None) if data is None: raise ValueError( - "Can't apply {} to a {} object with no data: {!r}".format( - method, self.__class__.__name__, self - ) + f"Can't apply {method} to a {self.__class__.__name__} " + f"object with no data: {self!r}" ) inplace = method[2] == "i" @@ -732,9 +731,8 @@ def _equivalent_data(self, other, atol=None, rtol=None, verbose=None): """ if self.has_data() != other.has_data(): logger.info( - "{}: Only one construct has data: {!r}, {!r}".format( - self.__class__.__name__, self, other - ) + f"{self.__class__.__name__}: Only one construct " + f"has data: {self!r}, {other!r}" ) return False @@ -826,7 +824,6 @@ def _equivalent_data(self, other, atol=None, rtol=None, verbose=None): # else: # # Identity (not string-valued, e.g. cf.Query). # matches.append({None: m}) - # # --- End: for # # return matches @@ -899,9 +896,8 @@ def _unary_operation(self, method): data = self.get_data(None, _fill_value=False) if data is None: raise ValueError( - "Can't apply {} to a {} with no data".format( - method, self.__class__.__name__ - ) + f"Can't apply {method} to a {self.__class__.__name__} " + "with no data" ) new = self.copy(data=False) @@ -916,7 +912,7 @@ def _YMDhms(self, attr): data = self.get_data(None, _fill_value=False) if data is None: raise ValueError( - "ERROR: Can't get {}s when there is no data array".format(attr) + f"ERROR: Can't get {attr}s when there is no data array" ) out = self.copy() # data=False) @@ -938,7 +934,6 @@ def _YMDhms(self, attr): # out.del_property('standard_name', None) # out.set_property('long_name', method) # return out - # # --- End: if # # raise ValueError( # "ERROR: Can't get {0} when there is no data array".format(method)) @@ -1132,9 +1127,8 @@ def reference_datetime(self): units = self.Units if not units.isreftime: raise AttributeError( - "{0} doesn't have attribute 'reference_datetime'".format( - self.__class__.__name__ - ) + f"{self.__class__.__name__} doesn't have attribute " + "'reference_datetime'" ) return dt(units.reftime, calendar=units._calendar) @@ -1144,17 +1138,16 @@ def reference_datetime(self, value): if not units.isreftime: raise AttributeError( "Can't set 'reference_datetime' for non reference date-time " - "units {}".format(self.__class__.__name__) + f"units {self.__class__.__name__}" ) units = units.units.split(" since ") try: - self.units = "{0} since {1}".format(units[0], value) + self.units = f"{units[0]} since {value}" except (ValueError, TypeError): raise ValueError( - "Can't override reference date-time {0!r} with {1!r}".format( - units[1], value - ) + "Can't override reference date-time " + f"{units[1]!r} with {value!r}" ) @property @@ -1213,8 +1206,8 @@ def Units(self, value): @Units.deleter def Units(self): raise AttributeError( - "Can't delete {} attribute 'Units'. Use the override_units " - "method.".format(self.__class__.__name__) + f"Can't delete {self.__class__.__name__} attribute " + "'Units'. Use the override_units method." ) @property @@ -1439,14 +1432,14 @@ def calendar(self): "'calendar'" ) -# value = getattr(self.Units, "calendar", None) -# if value is None: -# raise AttributeError( -# "{} doesn't have CF property 'calendar'".format( -# self.__class__.__name__ -# ) -# ) -# return value + # value = getattr(self.Units, "calendar", None) + # if value is None: + # raise AttributeError( + # "{} doesn't have CF property 'calendar'".format( + # self.__class__.__name__ + # ) + # ) + # return value @calendar.setter def calendar(self, value): @@ -1456,9 +1449,8 @@ def calendar(self, value): def calendar(self): if getattr(self, "calendar", None) is None: raise AttributeError( - "Can't delete non-existent {} CF property 'calendar'".format( - self.__class__.__name__ - ) + f"Can't delete non-existent {self.__class__.__name__} " + "CF property 'calendar'" ) self.Units = Units(getattr(self, "units", None)) @@ -1614,14 +1606,14 @@ def units(self): raise AttributeError( f"{self.__class__.__name__} doesn't have CF property 'units'" ) - -# value = getattr(self.Units, "units", None) -# if value is None: -# raise AttributeError( -# f"{self.__class__.__name__} doesn't have CF property 'units'" -# ) -# -# return value + + # value = getattr(self.Units, "units", None) + # if value is None: + # raise AttributeError( + # f"{self.__class__.__name__} doesn't have CF property 'units'" + # ) + # + # return value @units.setter def units(self, value): @@ -1631,9 +1623,8 @@ def units(self, value): def units(self): if getattr(self, "units", None) is None: raise AttributeError( - "Can't delete non-existent {} CF property 'units'".format( - self.__class__.__name__ - ) + f"Can't delete non-existent {self.__class__.__name__} " + "CF property 'units'" ) self.Units = Units(None, getattr(self, "calendar", None)) @@ -1895,10 +1886,9 @@ def period(self, *value): value.Units = self.Units else: raise ValueError( - "Period units {!r} are not equivalent to data " - "units {!r}".format(units, self.Units) + f"Period units {units!r} are not equivalent to data " + f"units {self.Units!r}" ) - # --- End: if value = abs(value) value.dtype = float @@ -1911,7 +1901,6 @@ def period(self, *value): # "The data range of {!r} is not less than the " # "period of {!r}".format(r, value) # ) - # --- End: if self._custom["period"] = value @@ -2258,7 +2247,7 @@ def datetime_array(self): data = self.get_data(None) if data is None: raise AttributeError( - "{} has no data array".format(self.__class__.__name__) + f"{self.__class__.__name__} has no data array" ) return data.datetime_array @@ -2321,9 +2310,7 @@ def dtype(self): data = self.get_data(None, _fill_value=False) if data is None: raise AttributeError( - "{} doesn't have attribute 'dtype'".format( - self.__class__.__name__ - ) + f"{self.__class__.__name__} doesn't have attribute 'dtype'" ) return data.dtype @@ -2366,21 +2353,17 @@ def hardmask(self): data = self.get_data(None, _fill_value=False) if data is None: raise AttributeError( - "{} doesn't have attribute 'hardmask'".format( - self.__class__.__name__ - ) + f"{self.__class__.__name__} doesn't have attribute 'hardmask'" ) return data.hardmask @hardmask.setter - def hardmask(self, value): + def hardmask(self, value): data = self.get_data(None, _fill_value=False) if data is None: raise AttributeError( - "{} doesn't have attribute 'hardmask'".format( - self.__class__.__name__ - ) + f"{self.__class__.__name__} doesn't have any data" ) data.hardmask = value @@ -2388,9 +2371,7 @@ def hardmask(self, value): @hardmask.deleter def hardmask(self): raise AttributeError( - "Won't delete {} attribute 'hardmask'".format( - self.__class__.__name__ - ) + f"Won't delete {self.__class__.__name__} attribute 'hardmask'" ) @property @@ -2422,7 +2403,7 @@ def array(self): data = self.get_data(None) if data is None: raise AttributeError( - "{} has no data array".format(self.__class__.__name__) + f"{self.__class__.__name__} has no data array" ) return data.array @@ -2456,7 +2437,7 @@ def varray(self): data = self.get_data(None) if data is None: raise AttributeError( - "{} has no data array".format(self.__class__.__name__) + f"{self.__class__.__name__} has no data array" ) return data.varray @@ -2512,8 +2493,8 @@ def ceil(self, inplace=False, i=False): :Returns: `{{class}}` or `None` - The construct with the ceiling of the data. If the operation was - in-place then `None` is returned. + The construct with the ceiling of the data. If the + operation was in-place then `None` is returned. **Examples:** @@ -2555,10 +2536,10 @@ def chunk(self, chunksize=None): def clip(self, a_min, a_max, units=None, inplace=False, i=False): """Limit the values in the data. - Given an interval, values outside the interval are clipped to the - interval edges. For example, if an interval of ``[0, 1]`` is - specified, values smaller than 0 become 0, and values larger than - 1 become 1. + Given an interval, values outside the interval are clipped to + the interval edges. For example, if an interval of ``[0, 1]`` + is specified, values smaller than 0 become 0, and values + larger than 1 become 1. :Parameters: @@ -2573,8 +2554,8 @@ def clip(self, a_min, a_max, units=None, inplace=False, i=False): `a_max` may be `None`. units: `str` or `Units` - Specify the units of *a_min* and *a_max*. By default the - same units as the data are assumed. + Specify the units of *a_min* and *a_max*. By default + the same units as the data are assumed. {{inplace: `bool`, optional}} @@ -2645,8 +2626,9 @@ def concatenate(cls, variables, axis=0, _preserve=True): out = variable0.copy() # data=False) data = Data.concatenate( - [v.get_data(_fill_value=False) - for v in variables], axis=axis, _preserve=_preserve + [v.get_data(_fill_value=False) for v in variables], + axis=axis, + _preserve=_preserve, ) out.set_data(data, copy=False) @@ -3054,9 +3036,8 @@ def equals( try: if not self.Units.equals(other.Units): logger.info( - "{0}: Different Units: {1!r} != {2!r}".format( - self.__class__.__name__, self.Units, other.Units - ) + f"{self.__class__.__name__}: Different Units: " + f"{self.Units!r} != {other.Units!r}" ) return False except AttributeError: @@ -3104,11 +3085,8 @@ def equivalent(self, other, rtol=None, atol=None, traceback=False): # Check that each instance is the same type if type(self) != type(other): print( - "{}: Different types: {}, {}".format( - self.__class__.__name__, - self.__class__.__name__, - other.__class__.__name__, - ) + f"{self.__class__.__name__}: Different types: " + f"{self.__class__.__name__}, {other.__class__.__name__}" ) return False @@ -3145,12 +3123,10 @@ def equivalent(self, other, rtol=None, atol=None, traceback=False): if not result: if traceback: print( - "{}: Different {} attributes: {!r}, {!r}".format( - self.__class__.__name__, attr, x, y - ) + f"{self.__class__.__name__}: Different {attr} " + f"attributes: {x!r}, {x!r}" ) return False - # --- End: for # ------------------------------------------------------------ # Check the data @@ -3287,13 +3263,10 @@ def _convert_reftime_units(value, units, reftime): # , calendar): else: return t.interval(reftime, end=True)[0] - # --- End: def - if not self.Units.isreftime: raise ValueError( - "{} must have reference time units, not {!r}".format( - self.__class__.__name__, self.Units - ) + f"{self.__class__.__name__} must have reference time units, " + f"not {self.Units!r}" ) v = _inplace_enabled_define_and_cleanup(self) @@ -3310,9 +3283,7 @@ def _convert_reftime_units(value, units, reftime): # , calendar): ) elif not getattr(units, "isreftime", False): raise ValueError( - "New units must be reference time units, not {0!r}".format( - units - ) + f"New units must be reference time units, not {units!r}" ) if units0._units_since_reftime in _month_units: @@ -3581,11 +3552,9 @@ def match_by_units(self, *units, exact=True): ok = Units(value).equals(self_units) else: ok = Units(value).equivalent(self_units) - # --- End: try if ok: break - # --- End: for return ok @@ -3629,29 +3598,31 @@ def allclose(self, y, atol=None, rtol=None): broadcastable data. Two real numbers ``x`` and ``y`` are considered equal if - ``|x-y|<=atol+rtol|y|``, where ``atol`` (the tolerance on absolute - differences) and ``rtol`` (the tolerance on relative differences) - are positive, typically very small numbers. See the *atol* and - *rtol* parameters. + ``|x-y|<=atol+rtol|y|``, where ``atol`` (the tolerance on + absolute differences) and ``rtol`` (the tolerance on relative + differences) are positive, typically very small numbers. See + the *atol* and *rtol* parameters. .. seealso:: `all`, `any`, `isclose` :Parameters: y: - The object to be compared with the data array. *y* must be - broadcastable to the data array and if *y* has units then - they must be compatible. May be any object that can be - converted to a `Data` object (which includes numpy array - and `Data` objects). + The object to be compared with the data array. *y* + must be broadcastable to the data array and if *y* has + units then they must be compatible. May be any object + that can be converted to a `Data` object (which + includes numpy array and `Data` objects). atol: `float`, optional The tolerance on absolute differences between real - numbers. The default value is set by the `atol` function. + numbers. The default value is set by the `atol` + function. rtol: `float`, optional The tolerance on relative differences between real - numbers. The default value is set by the `rtol` function. + numbers. The default value is set by the `rtol` + function. :Returns: @@ -3683,7 +3654,6 @@ def allclose(self, y, atol=None, rtol=None): else: if y_data is None: y_data = y - # --- End: if return data.allclose(y_data, rtol=rtol, atol=atol) @@ -3691,7 +3661,8 @@ def any(self): """Test whether any data elements evaluate to True. Performs a logical "or" over the data array and returns the - result. Masked values are considered as False during computation. + result. Masked values are considered as False during + computation. .. seealso:: `all`, `allclose` @@ -3724,44 +3695,14 @@ def any(self): return False - # def files(self): - # '''Return the names of any files containing parts of the data array. - # - # .. seealso:: `close` - # - # :Returns: - # - # `!set` - # The file names in normalized, absolute form. - # - # **Examples:** - # - # >>> f = cf.read_field('../file[123].nc') - # >>> f.files() - # {'/data/user/file1.nc', - # '/data/user/file2.nc', - # '/data/user/file3.nc'} - # >>> a = f.array - # >>> f.files() - # set() - # - # ''' - # data = self.get_data(None) - # if data is None: - # out = set() - # else: - # out = data.files() - # - # return out - def fill_value(self, default=None): """Return the data array missing data value. - This is the value of the `missing_value` CF property, or if that - is not set, the value of the `_FillValue` CF property, else if - that is not set, ``None``. In the last case the default `numpy` - missing data value for the array's data type is assumed if a - missing data value is required. + This is the value of the `missing_value` CF property, or if + that is not set, the value of the `_FillValue` CF property, + else if that is not set, ``None``. In the last case the + default `numpy` missing data value for the array's data type + is assumed if a missing data value is required. .. seealso:: `cf.default_netCDF_fillvals`, `_FillValue`, `missing_value` @@ -3769,12 +3710,12 @@ def fill_value(self, default=None): :Parameters: default: optional - If the missing value is unset then return this value. By - default, *default* is `None`. If *default* is the special - value ``'netCDF'`` then return the netCDF default value - appropriate to the data array's data type is used. These - may be found with the `cf.default_netCDF_fillvals` - function. For example: + If the missing value is unset then return this + value. By default, *default* is `None`. If *default* + is the special value ``'netCDF'`` then return the + netCDF default value appropriate to the data array's + data type is used. These may be found with the + `cf.default_netCDF_fillvals` function. For example: >>> cf.default_netCDF_fillvals() {'S1': '\x00', @@ -3829,7 +3770,6 @@ def fill_value(self, default=None): fillval = default_netCDF_fillvals()[d.kind + str(d.itemsize)] else: fillval = default - # --- End: if return fillval @@ -3884,8 +3824,8 @@ def flip(self, axes=None, inplace=False, i=False): def exp(self, inplace=False, i=False): """The exponential of the data, element-wise. - The "standard_name" and "long_name" properties are removed from - the result. + The "standard_name" and "long_name" properties are removed + from the result. .. seealso:: `log` @@ -3898,8 +3838,8 @@ def exp(self, inplace=False, i=False): :Returns: `{{class}}` or `None` - The construct with the exponential of data values. If the - operation was in-place then `None` is returned. + The construct with the exponential of data values. If + the operation was in-place then `None` is returned. **Examples:** @@ -3939,8 +3879,8 @@ def sin(self, inplace=False, i=False): The Units are changed to '1' (nondimensional). - The "standard_name" and "long_name" properties are removed from - the result. + The "standard_name" and "long_name" properties are removed + from the result. .. seealso:: `cos`, `tan` @@ -3991,14 +3931,13 @@ def sin(self, inplace=False, i=False): @_deprecated_kwarg_check("i") @_inplace_enabled(default=False) def arctan(self, inplace=False): - """Take the trigonometric inverse tangent of the data element- - wise. + """Take the trigonometric inverse tangent of the data element-wise. Units are ignored in the calculation. The result has units of radians. - The "standard_name" and "long_name" properties are removed from - the result. + The "standard_name" and "long_name" properties are removed + from the result. .. versionadded:: 3.0.7 @@ -4011,9 +3950,9 @@ def arctan(self, inplace=False): :Returns: `{{class}}` or `None` - The construct with the trigonometric inverse tangent of - data values. If the operation was in-place then `None` is - returned. + The construct with the trigonometric inverse tangent + of data values. If the operation was in-place then + `None` is returned. **Examples:** @@ -4047,10 +3986,11 @@ def arctan(self, inplace=False): def arctanh(self, inplace=False): """Take the inverse hyperbolic tangent of the data element-wise. - Units are ignored in the calculation. The result has units of radians. + Units are ignored in the calculation. The result has units of + radians. - The "standard_name" and "long_name" properties are removed from - the result. + The "standard_name" and "long_name" properties are removed + from the result. .. versionadded:: 3.2.0 @@ -4063,9 +4003,9 @@ def arctanh(self, inplace=False): :Returns: `{{class}}` or `None` - The construct with the inverse hyperbolic tangent of data - values. If the operation was in-place then `None` is - returned. + The construct with the inverse hyperbolic tangent of + data values. If the operation was in-place then `None` + is returned. **Examples:** @@ -4104,8 +4044,8 @@ def arcsin(self, inplace=False): Units are ignored in the calculation. The result has units of radians. - The "standard_name" and "long_name" properties are removed from - the result. + The "standard_name" and "long_name" properties are removed + from the result. .. versionadded:: 3.2.0 @@ -4118,9 +4058,9 @@ def arcsin(self, inplace=False): :Returns: `{{class}}` or `None` - The construct with the trigonometric inverse sine of data - values. If the operation was in-place then `None` is - returned. + The construct with the trigonometric inverse sine of + data values. If the operation was in-place then `None` + is returned. **Examples:** @@ -4156,10 +4096,11 @@ def arcsin(self, inplace=False): def arcsinh(self, inplace=False): """Take the inverse hyperbolic sine of the data element-wise. - Units are ignored in the calculation. The result has units of radians. + Units are ignored in the calculation. The result has units of + radians. - The "standard_name" and "long_name" properties are removed from - the result. + The "standard_name" and "long_name" properties are removed + from the result. .. versionadded:: 3.1.0 @@ -4206,14 +4147,13 @@ def arcsinh(self, inplace=False): @_deprecated_kwarg_check("i") @_inplace_enabled(default=False) def arccos(self, inplace=False): - """Take the trigonometric inverse cosine of the data element- - wise. + """Take the trigonometric inverse cosine of the data element- wise. Units are ignored in the calculation. The result has units of radians. - The "standard_name" and "long_name" properties are removed from - the result. + The "standard_name" and "long_name" properties are removed + from the result. .. versionadded:: 3.2.0 @@ -4227,8 +4167,8 @@ def arccos(self, inplace=False): `{{class}}` or `None` The construct with the trigonometric inverse cosine of - data values. If the operation was in-place then `None` is - returned. + data values. If the operation was in-place then `None` + is returned. **Examples:** @@ -4264,10 +4204,11 @@ def arccos(self, inplace=False): def arccosh(self, inplace=False): """Take the inverse hyperbolic cosine of the data element-wise. - Units are ignored in the calculation. The result has units of radians. + Units are ignored in the calculation. The result has units of + radians. - The "standard_name" and "long_name" properties are removed from - the result. + The "standard_name" and "long_name" properties are removed + from the result. .. versionadded:: 3.2.0 @@ -4280,9 +4221,9 @@ def arccosh(self, inplace=False): :Returns: `{{class}}` or `None` - The construct with the inverse hyperbolic cosine of data - values. If the operation was in-place then `None` is - returned. + The construct with the inverse hyperbolic cosine of + data values. If the operation was in-place then `None` + is returned. **Examples:** @@ -4327,8 +4268,8 @@ def tan(self, inplace=False, i=False): The Units are changed to '1' (nondimensional). - The "standard_name" and "long_name" properties are removed from - the result. + The "standard_name" and "long_name" properties are removed + from the result. .. seealso:: `arctan`, `cos`, `sin`, `tanh` @@ -4380,16 +4321,16 @@ def tan(self, inplace=False, i=False): def tanh(self, inplace=False): """Take the hyperbolic tangent of the data array. - Units are accounted for in the calculation. If the units are not - equivalent to radians (such as Kelvin) then they are treated as if - they were radians. For example, the the hyperbolic tangent of 90 - degrees_east is 0.91715234, as is the hyperbolic tangent of - 1.57079632 radians. + Units are accounted for in the calculation. If the units are + not equivalent to radians (such as Kelvin) then they are + treated as if they were radians. For example, the the + hyperbolic tangent of 90 degrees_east is 0.91715234, as is the + hyperbolic tangent of 1.57079632 radians. The output units are changed to '1' (nondimensional). - The "standard_name" and "long_name" properties are removed from - the result. + The "standard_name" and "long_name" properties are removed + from the result. .. versionadded:: 3.1.0 @@ -4442,16 +4383,16 @@ def tanh(self, inplace=False): def sinh(self, inplace=False): """Take the hyperbolic sine of the data element-wise. - Units are accounted for in the calculation. If the units are not - equivalent to radians (such as Kelvin) then they are treated as if - they were radians. For example, the the hyperbolic sine of 90 - degrees_north is 2.30129890, as is the hyperbolic sine of - 1.57079632 radians. + Units are accounted for in the calculation. If the units are + not equivalent to radians (such as Kelvin) then they are + treated as if they were radians. For example, the the + hyperbolic sine of 90 degrees_north is 2.30129890, as is the + hyperbolic sine of 1.57079632 radians. The output units are changed to '1' (nondimensional). - The "standard_name" and "long_name" properties are removed from - the result. + The "standard_name" and "long_name" properties are removed + from the result. .. versionadded:: 3.1.0 @@ -4464,8 +4405,9 @@ def sinh(self, inplace=False): :Returns: `{{class}}` or `None` - The construct with the hyperbolic sine of data values. If - the operation was in-place then `None` is returned. + The construct with the hyperbolic sine of data + values. If the operation was in-place then `None` is + returned. **Examples:** @@ -4502,15 +4444,16 @@ def sinh(self, inplace=False): def cosh(self, inplace=False): """Take the hyperbolic cosine of the data element-wise. - Units are accounted for in the calculation. If the units are not - equivalent to radians (such as Kelvin) then they are treated as if - they were radians. For example, the the hyperbolic cosine of 0 - degrees_east is 1.0, as is the hyperbolic cosine of 1.57079632 radians. + Units are accounted for in the calculation. If the units are + not equivalent to radians (such as Kelvin) then they are + treated as if they were radians. For example, the the + hyperbolic cosine of 0 degrees_east is 1.0, as is the + hyperbolic cosine of 1.57079632 radians. The output units are changed to '1' (nondimensional). - The "standard_name" and "long_name" properties are removed from - the result. + The "standard_name" and "long_name" properties are removed + from the result. .. versionadded:: 3.1.0 @@ -4565,8 +4508,8 @@ def log(self, base=None, inplace=False, i=False): By default the natural logarithm is taken, but any base may be specified. - The "standard_name" and "long_name" properties are removed from - the result. + The "standard_name" and "long_name" properties are removed + from the result. .. seealso:: `exp` @@ -4583,8 +4526,8 @@ def log(self, base=None, inplace=False, i=False): :Returns: `{{class}}` or `None` - The construct with the logarithm of data values, or `None` - if the operation was in-place. + The construct with the logarithm of data values, or + `None` if the operation was in-place. **Examples:** @@ -4624,9 +4567,9 @@ def log(self, base=None, inplace=False, i=False): def trunc(self, inplace=False, i=False): """Truncate the data, element-wise. - The truncated value of the scalar ``x``, is the nearest integer - ``i`` which is closer to zero than ``x`` is. I.e. the fractional - part of the signed number ``x`` is discarded. + The truncated value of the scalar ``x``, is the nearest + integer ``i`` which is closer to zero than ``x`` is. I.e. the + fractional part of the signed number ``x`` is discarded. .. versionadded:: 1.0 @@ -4641,8 +4584,8 @@ def trunc(self, inplace=False, i=False): :Returns: `{{class}}` or `None` - The construct with truncated data. If the operation was - in-place then `None` is returned. + The construct with truncated data. If the operation + was in-place then `None` is returned. **Examples:** @@ -4724,16 +4667,16 @@ def identity( :Parameters: default: optional - If no identity can be found then return the value of the - default parameter. + If no identity can be found then return the value of + the default parameter. strict: `bool`, optional - If True then the identity is the first found of only the - "standard_name" property or the "id" attribute. + If True then the identity is the first found of only + the "standard_name" property or the "id" attribute. relaxed: `bool`, optional - If True then the identity is the first found of only the - "standard_name" property, the "id" attribute, the + If True then the identity is the first found of only + the "standard_name" property, the "id" attribute, the "long_name" property or the netCDF variable name. nc_only: `bool`, optional @@ -4819,11 +4762,6 @@ def identity( if n is not None: return f"{prop}={n}" -# if _ctype: -# for ctype in ("X", "Y", "Z", "T"): -# if getattr(self, ctype, False): -# return ctype - n = self.nc_get_variable(None) if n is not None: return f"ncvar%{n}" @@ -4848,7 +4786,7 @@ def identities(self, generator=False, **kwargs): .. versionadded:: 3.0.0 .. seealso:: `id`, `identity` -TODO + TODO :Returns: `list` @@ -4873,7 +4811,7 @@ def identities(self, generator=False, **kwargs): id_identity = "" i = getattr(self, "id", None) if i is not None: - id_identity = ("id%" + i,) + id_identity = (f"id%{i}",) identities = super().identities(generator=True) @@ -4882,7 +4820,7 @@ def identities(self, generator=False, **kwargs): return g return list(g) - + def inspect(self): """Inspect the object for debugging. @@ -4933,8 +4871,7 @@ def iscyclic(self, axis): return axis[0] in self.cyclic() - def get_data(self, default=ValueError(), _units=None, - _fill_value=True): + def get_data(self, default=ValueError(), _units=None, _fill_value=True): """Return the data. Note that a `Data` instance is returned. Use its `array` attribute @@ -4976,8 +4913,9 @@ def get_data(self, default=ValueError(), _units=None, None """ - return super().get_data(default=default, _units=False, - _fill_value=_fill_value) + return super().get_data( + default=default, _units=False, _fill_value=_fill_value + ) @_inplace_enabled(default=False) @_manage_log_level_via_verbosity @@ -4992,22 +4930,22 @@ def halo( ): """Expand the data by adding a halo. - The halo may be applied over a subset of the data dimensions and - each dimension may have a different halo size (including - zero). The halo region is populated with a copy of the proximate - values from the original data. + The halo may be applied over a subset of the data dimensions + and each dimension may have a different halo size (including + zero). The halo region is populated with a copy of the + proximate values from the original data. **Cyclic axes** - A cyclic axis that is expanded with a halo of at least size 1 is - no longer considered to be cyclic. + A cyclic axis that is expanded with a halo of at least size 1 + is no longer considered to be cyclic. **Tripolar domains** - Data for global tripolar domains are a special case in that a halo - added to the northern end of the "Y" axis must be filled with - values that are flipped in "X" direction. Such domains need to be - explicitly indicated with the *tripolar* parameter. + Data for global tripolar domains are a special case in that a + halo added to the northern end of the "Y" axis must be filled + with values that are flipped in "X" direction. Such domains + need to be explicitly indicated with the *tripolar* parameter. .. versionadded:: 3.5.0 @@ -5016,30 +4954,31 @@ def halo( size: `int` or `dict` Specify the size of the halo for each axis. - If *size* is a non-negative `int` then this is the halo - size that is applied to all of the axes defined by the - *axes* parameter. + If *size* is a non-negative `int` then this is the + halo size that is applied to all of the axes defined + by the *axes* parameter. Alternatively, halo sizes may be assigned to axes individually by providing a `dict` for which a key - specifies an axis (defined by its integer position in the - data) with a corresponding value of the halo size for that - axis. Axes not specified by the dictionary are not - expanded, and the *axes* parameter must not also be set. + specifies an axis (defined by its integer position in + the data) with a corresponding value of the halo size + for that axis. Axes not specified by the dictionary + are not expanded, and the *axes* parameter must not + also be set. *Parameter example:* Specify a halo size of 1 for all otherwise selected axes: ``size=1`` *Parameter example:* - Specify a halo size of zero ``size=0``. This results in - no change to the data shape. + Specify a halo size of zero ``size=0``. This results + in no change to the data shape. *Parameter example:* - For data with three dimensions, specify a halo size of 3 - for the first dimension and 1 for the second dimension: - ``size={0: 3, 1: 1}``. This is equivalent to ``size={0: - 3, 1: 1, 2: 0}`` + For data with three dimensions, specify a halo size + of 3 for the first dimension and 1 for the second + dimension: ``size={0: 3, 1: 1}``. This is equivalent + to ``size={0: 3, 1: 1, 2: 0}`` *Parameter example:* Specify a halo size of 2 for the first and last @@ -5047,34 +4986,36 @@ def halo( ``size={0: 2, -1: 2}``. axes: (sequence of) `int` - Select the domain axes to be expanded, defined by their - integer positions in the data. By default, or if *axes* is - `None`, all axes are selected. No axes are expanded if - *axes* is an empty sequence. + Select the domain axes to be expanded, defined by + their integer positions in the data. By default, or if + *axes* is `None`, all axes are selected. No axes are + expanded if *axes* is an empty sequence. tripolar: `dict`, optional A dictionary defining the "X" and "Y" axes of a global - tripolar domain. This is necessary because in the global - tripolar case the "X" and "Y" axes need special treatment, - as described above. It must have keys ``'X'`` and ``'Y'``, - whose values identify the corresponding domain axis - construct by their integer positions in the data. + tripolar domain. This is necessary because in the + global tripolar case the "X" and "Y" axes need special + treatment, as described above. It must have keys + ``'X'`` and ``'Y'``, whose values identify the + corresponding domain axis construct by their integer + positions in the data. - The "X" and "Y" axes must be a subset of those identified - by the *size* or *axes* parameter. + The "X" and "Y" axes must be a subset of those + identified by the *size* or *axes* parameter. See the *fold_index* parameter. *Parameter example:* Define the "X" and Y" axes by positions 2 and 1 - respectively of the data: ``tripolar={'X': 2, 'Y': 1}`` + respectively of the data: ``tripolar={'X': 2, 'Y': + 1}`` fold_index: `int`, optional - Identify which index of the "Y" axis corresponds to the - fold in "X" axis of a tripolar grid. The only valid values - are ``-1`` for the last index, and ``0`` for the first - index. By default it is assumed to be the last - index. Ignored if *tripolar* is `None`. + Identify which index of the "Y" axis corresponds to + the fold in "X" axis of a tripolar grid. The only + valid values are ``-1`` for the last index, and ``0`` + for the first index. By default it is assumed to be + the last index. Ignored if *tripolar* is `None`. {{inplace: `bool`, optional}} @@ -5115,11 +5056,11 @@ def halo( def override_calendar(self, calendar, inplace=False, i=False): """Override the calendar of date-time units. - The new calendar need not be equivalent to the original one, and - the data array elements will not be changed to reflect the new - units. Therefore, this method should only be used when it is known - that the data array values are correct but the calendar has been - incorrectly encoded. + The new calendar need not be equivalent to the original one, + and the data array elements will not be changed to reflect the + new units. Therefore, this method should only be used when it + is known that the data array values are correct but the + calendar has been incorrectly encoded. Not to be confused with setting the `calendar` or `Units` attributes to a calendar which is equivalent to the original @@ -5158,7 +5099,7 @@ def override_calendar(self, calendar, inplace=False, i=False): if not v.Units.isreftime: raise ValueError( "Can't override the calendar of non-reference-time " - "units: {0!r}".format(self.Units) + f"units: {self.Units!r}" ) PropertiesData.Units.fset( @@ -5172,14 +5113,15 @@ def override_calendar(self, calendar, inplace=False, i=False): def override_units(self, units, inplace=False, i=False): """Override the units. - The new units need not be equivalent to the original ones, and the - data array elements will not be changed to reflect the new - units. Therefore, this method should only be used when it is known - that the data array values are correct but the units have - incorrectly encoded. + The new units need not be equivalent to the original ones, and + the data array elements will not be changed to reflect the new + units. Therefore, this method should only be used when it is + known that the data array values are correct but the units + have incorrectly encoded. - Not to be confused with setting the `units` or `Units` attributes - to units which are equivalent to the original units. + Not to be confused with setting the `units` or `Units` + attributes to units which are equivalent to the original + units. .. seealso:: `calendar`, `override_calendar`, `units`, `Units` @@ -5278,12 +5220,12 @@ def rint(self, inplace=False, i=False): def round(self, decimals=0, inplace=False, i=False): """Round the data to the given number of decimals. - Values exactly halfway between rounded decimal values are rounded - to the nearest even value. Thus 1.5 and 2.5 round to 2.0, -0.5 and - 0.5 round to 0.0, etc. Results may also be surprising due to the - inexact representation of decimal fractions in the IEEE floating - point standard and errors introduced when scaling by powers of - ten. + Values exactly halfway between rounded decimal values are + rounded to the nearest even value. Thus 1.5 and 2.5 round to + 2.0, -0.5 and 0.5 round to 0.0, etc. Results may also be + surprising due to the inexact representation of decimal + fractions in the IEEE floating point standard and errors + introduced when scaling by powers of ten. .. versionadded:: 1.1.4 @@ -5292,9 +5234,9 @@ def round(self, decimals=0, inplace=False, i=False): :Parameters: decimals: `int`, optional - Number of decimal places to round to (0 by default). If - decimals is negative, it specifies the number of positions - to the left of the decimal point. + Number of decimal places to round to (0 by + default). If decimals is negative, it specifies the + number of positions to the left of the decimal point. {{inplace: `bool`, optional}} @@ -5364,8 +5306,8 @@ def roll(self, iaxis, shift, inplace=False, i=False): def set_data(self, data, copy=True, inplace=True): """Set the data. - The units, calendar and fill value of the incoming `Data` instance - are removed prior to insertion. + The units, calendar and fill value of the incoming `Data` + instance are removed prior to insertion. .. versionadded:: 3.0.0 @@ -5379,8 +5321,8 @@ def set_data(self, data, copy=True, inplace=True): {{data_like}} copy: `bool`, optional - If False then do not copy the data prior to insertion. By - default the data are copied. + If False then do not copy the data prior to + insertion. By default the data are copied. {{inplace: `bool`, optional (default True)}} @@ -5390,8 +5332,8 @@ def set_data(self, data, copy=True, inplace=True): `None` or `{{class}}` If the operation was in-place then `None` is returned, - otherwise return a new `{{class}}` instance containing the - new data. + otherwise return a new `{{class}}` instance containing + the new data. **Examples:** @@ -5420,26 +5362,15 @@ def set_data(self, data, copy=True, inplace=True): if not isinstance(data, _Data): data = _Data(data, copy=False) - units = self.Units - data_units = data.Units - if not data_units: + if not data.Units: + units = self.Units if units is not None: if copy: copy = False - data = data.override_units(units) + data = data.override_units(units, inplace=False) else: data.override_units(units, inplace=True) - elif units: - if units.equivalent(data_units): - if units != data_units: - if copy: - copy= False - data = data.copy() - - data.Units = units - else: - raise ValueError("Can't set data with incompatible units") - + return super().set_data(data, copy=copy, inplace=inplace) @_deprecated_kwarg_check("i") @@ -5477,9 +5408,8 @@ def where( else: if condition_data is None: raise ValueError( - "ERROR: Can't set data from {} with no data array".format( - condition.__class__.__name__ - ) + "ERROR: Can't set data from " + f"{condition.__class__.__name__} with no data array" ) condition = condition_data @@ -5491,9 +5421,8 @@ def where( else: if x_data is None: raise ValueError( - "ERROR: Can't set data from {} with no data array".format( - x.__class__.__name__ - ) + f"ERROR: Can't set data from {x.__class__.__name__} " + "with no data array" ) x = x_data @@ -5505,9 +5434,8 @@ def where( else: if y_data is None: raise ValueError( - "ERROR: Can't set data from {} with no data array".format( - y.__class__.__name__ - ) + f"ERROR: Can't set data from {y.__class__.__name__} " + "with no data array" ) y = y_data diff --git a/cf/mixin/propertiesdatabounds.py b/cf/mixin/propertiesdatabounds.py index 46f9399448..97080416b6 100644 --- a/cf/mixin/propertiesdatabounds.py +++ b/cf/mixin/propertiesdatabounds.py @@ -97,7 +97,7 @@ def __getitem__(self, indices): "{}.__getitem__: findices = {}".format(cname, findices) ) # pragma: no cover - data = self.get_data(None, set_fill_value=False) + data = self.get_data(None, _fill_value=False) if data is not None: new.set_data(data[findices], copy=False) @@ -109,7 +109,7 @@ def __getitem__(self, indices): # Subspace the bounds, if there are any bounds = self.get_bounds(None) if bounds is not None: - bounds_data = bounds.get_data(None, set_fill_value=False) + bounds_data = bounds.get_data(None, _fill_value=False) if bounds_data is not None: findices = list(findices) # if data.ndim <= 1 and not self.has_geometry(): @@ -711,7 +711,7 @@ def _apply_superclass_data_oper( oper_args=(), bounds=True, interior_ring=False, - **oper_kwargs + **oper_kwargs, ): """Define an operation that can be applied to the data array. @@ -859,7 +859,7 @@ def dtype(self): >>> c.dtype = numpy.dtype('float32') """ - data = self.get_data(None, set_fill_value=False) + data = self.get_data(None, _fill_value=False) if data is not None: return data.dtype @@ -873,7 +873,7 @@ def dtype(self): @dtype.setter def dtype(self, value): - data = self.get_data(None, set_fill_value=False) + data = self.get_data(None, _fill_value=False) if data is not None: data.dtype = value @@ -971,7 +971,7 @@ def Units(self): """ # return super().Units - data = self.get_data(None, set_fill_value=False) + data = self.get_data(None, _fill_value=False) if data is not None: # Return the units of the data return data.Units @@ -1203,13 +1203,13 @@ def dtype(self): @dtype.setter def dtype(self, value): # DCH - allow dtype to be set before data c.f. Units - data = self.get_data(None, set_fill_value=False) + data = self.get_data(None, _fill_value=False) if data is not None: data.dtype = value @dtype.deleter def dtype(self): - data = self.get_data(None, set_fill_value=False) + data = self.get_data(None, _fill_value=False) if data is not None: del data.dtype @@ -1776,9 +1776,7 @@ def contiguous(self, overlap=True): return True if ndim > 2: - raise ValueError( - f"Can't tell if {ndim}-d cells are contiguous" - ) + raise ValueError(f"Can't tell if {ndim}-d cells are contiguous") if nbounds != 2: raise ValueError( @@ -2324,7 +2322,7 @@ def get_filenames(self): interior_ring = self.get_interior_ring(None) if interior_ring is not None: - data = interior_ring.get_data(None, set_fill_value=False) + data = interior_ring.get_data(None, _fill_value=False) if data is not None: out.update(interior_ring.get_filenames()) @@ -2624,13 +2622,13 @@ def set_bounds(self, bounds, copy=True): None """ - data = self.get_data(None, set_fill_value=False) + data = self.get_data(None, _fill_value=False) if data is not None and bounds.shape[: data.ndim] != data.shape: # Check shape raise ValueError( - "Can't set bounds: Incorrect bounds shape {} " - "for data shape {}".format(bounds.shape, data.shape) + f"Can't set bounds: Incorrect bounds shape {bounds.shape} " + f"for data shape {data.shape}" ) if copy: @@ -3445,67 +3443,67 @@ def trunc(self, bounds=True, inplace=False, i=False): i=i, ) -# def identities(self, generator=False): -# """Return all possible identities. -# -# The identities comprise: -# -# * The "standard_name" property. -# * The "id" attribute, preceded by ``'id%'``. -# * The "cf_role" property, preceded by ``'cf_role='``. -# * The "axis" property, preceded by ``'axis='``. -# * The "long_name" property, preceded by ``'long_name='``. -# * All other properties (including "standard_name"), preceded by -# the property name and an ``'='``. -# * The coordinate type (``'X'``, ``'Y'``, ``'Z'`` or ``'T'``). -# * The netCDF variable name, preceded by ``'ncvar%'``. -# -# The identities of the bounds, if present, are included (with the -# exception of the bounds netCDF variable name). -# -# .. versionadded:: 3.0.0 -# -# .. seealso:: `id`, `identity` -#ODO -# :Returns: -# -# `list` -# The identities. -# -# **Examples:** -# -# >>> f.properties() -# {'foo': 'bar', -# 'long_name': 'Air Temperature', -# 'standard_name': 'air_temperature'} -# >>> f.nc_get_variable() -# 'tas' -# >>> f.identities() -# ['air_temperature', -# 'long_name=Air Temperature', -# 'foo=bar', -# 'standard_name=air_temperature', -# 'ncvar%tas'] -# -# >>> f.properties() -# {} -# >>> f.bounds.properties() -# {'axis': 'Z', -# 'units': 'm'} -# >>> f.identities() -# ['axis=Z', 'units=m', 'ncvar%z'] -# -# """ -# identities = super().identities() -# -# bounds = self.get_bounds(None) -# if bounds is not None: -# identities.extend( -# [i for i in bounds.identities() if i not in identities] -# ) -# # TODO ncvar AND? -# -# return identities + # def identities(self, generator=False): + # """Return all possible identities. + # + # The identities comprise: + # + # * The "standard_name" property. + # * The "id" attribute, preceded by ``'id%'``. + # * The "cf_role" property, preceded by ``'cf_role='``. + # * The "axis" property, preceded by ``'axis='``. + # * The "long_name" property, preceded by ``'long_name='``. + # * All other properties (including "standard_name"), preceded by + # the property name and an ``'='``. + # * The coordinate type (``'X'``, ``'Y'``, ``'Z'`` or ``'T'``). + # * The netCDF variable name, preceded by ``'ncvar%'``. + # + # The identities of the bounds, if present, are included (with the + # exception of the bounds netCDF variable name). + # + # .. versionadded:: 3.0.0 + # + # .. seealso:: `id`, `identity` + # ODO + # :Returns: + # + # `list` + # The identities. + # + # **Examples:** + # + # >>> f.properties() + # {'foo': 'bar', + # 'long_name': 'Air Temperature', + # 'standard_name': 'air_temperature'} + # >>> f.nc_get_variable() + # 'tas' + # >>> f.identities() + # ['air_temperature', + # 'long_name=Air Temperature', + # 'foo=bar', + # 'standard_name=air_temperature', + # 'ncvar%tas'] + # + # >>> f.properties() + # {} + # >>> f.bounds.properties() + # {'axis': 'Z', + # 'units': 'm'} + # >>> f.identities() + # ['axis=Z', 'units=m', 'ncvar%z'] + # + # """ + # identities = super().identities() + # + # bounds = self.get_bounds(None) + # if bounds is not None: + # identities.extend( + # [i for i in bounds.identities() if i not in identities] + # ) + # # TODO ncvar AND? + # + # return identities @_deprecated_kwarg_check("relaxed_identity") def identity( diff --git a/cf/read_write/um/umread.py b/cf/read_write/um/umread.py index b7e2e1f9a9..f48f961739 100644 --- a/cf/read_write/um/umread.py +++ b/cf/read_write/um/umread.py @@ -1063,7 +1063,7 @@ def __init__( # Force cyclic X axis for particular values of LBHEM if xkey is not None and int_hdr[lbhem] in (0, 1, 2, 4): -# field.cyclic("X", period=360) + # field.cyclic("X", period=360) field.cyclic(xkey, period=360) self.fields.append(field) @@ -1195,8 +1195,9 @@ def atmosphere_hybrid_height_coordinate(self, axiscode): dc = self.implementation.initialise_DimensionCoordinate() dc = self.coord_data(dc, array, bounds, units=_Units[""]) self.implementation.set_properties( - dc, {"standard_name": "atmosphere_hybrid_height_coordinate"}, - copy=False + dc, + {"standard_name": "atmosphere_hybrid_height_coordinate"}, + copy=False, ) dc = self.coord_axis(dc, axiscode) dc = self.coord_positive(dc, axiscode, _axis["z"]) @@ -1220,8 +1221,7 @@ def atmosphere_hybrid_height_coordinate(self, axiscode): ac = self.coord_data(ac, array, bounds, units=_Units["1"]) ac.id = "UM_atmosphere_hybrid_height_coordinate_b" self.implementation.set_properties( - ac, {"long_name": "height based hybrid coeffient b"}, - copy=False + ac, {"long_name": "height based hybrid coeffient b"}, copy=False ) key_b = self.implementation.set_domain_ancillary( field, ac, axes=[_axis["z"]], copy=False diff --git a/cf/test/test_CoordinateReference.py b/cf/test/test_CoordinateReference.py index e534b16d72..463f9535f3 100644 --- a/cf/test/test_CoordinateReference.py +++ b/cf/test/test_CoordinateReference.py @@ -230,9 +230,9 @@ def test_CoordinateReference_get__getitem__(self): self.vcr["standard_name"], self.vconversion.get_parameter("standard_name"), ) - self.assertTrue( - self.vcr.get("earth_radius") - is self.datum.get_parameter("earth_radius") + self.assertEqual( + self.vcr.get("earth_radius"), + self.datum.get_parameter("earth_radius") ) self.assertIsNone(self.vcr.get("orog")) self.assertEqual(self.vcr.get("orog", "qwerty"), "qwerty") @@ -255,11 +255,11 @@ def test_CoordinateReference_get__getitem__(self): self.hcr["grid_mapping_name"], self.hconversion.get_parameter("grid_mapping_name"), ) - self.assertIs( + self.assertEqual( self.hcr.get("earth_radius"), self.datum.get_parameter("earth_radius"), ) - self.assertIs( + self.assertEqual( self.hcr.get("grid_north_pole_latitude", "qwerty"), self.hconversion.get_parameter("grid_north_pole_latitude"), ) @@ -269,9 +269,6 @@ def test_CoordinateReference_get__getitem__(self): _ = self.hcr["qwerty"] -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() From fc84a18aae0f932834440575a8957bf33dc76130 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Wed, 24 Mar 2021 00:34:36 +0000 Subject: [PATCH 04/53] tests pass --- cf/aggregate.py | 1 + cf/bounds.py | 4 +- cf/field.py | 117 +++++++--------------------- cf/mixin/propertiesdata.py | 66 ++++++++-------- cf/test/test_CoordinateReference.py | 2 +- cf/test/test_aggregate.py | 2 +- 6 files changed, 66 insertions(+), 126 deletions(-) diff --git a/cf/aggregate.py b/cf/aggregate.py index 00632bdf46..19030955df 100644 --- a/cf/aggregate.py +++ b/cf/aggregate.py @@ -239,6 +239,7 @@ def __init__( strict=strict_identities, relaxed=relaxed_identities, nc_only=ncvar_identities, + default=None ) if field_identity: diff --git a/cf/bounds.py b/cf/bounds.py index 2ea9b2fbd1..b870f5bc2a 100644 --- a/cf/bounds.py +++ b/cf/bounds.py @@ -196,9 +196,7 @@ def contiguous(self, overlap=True, direction=None, period=None, verbose=1): return True if ndim > 2: - raise ValueError( - f"Can't tell if {ndim}-d cells are contiguous" - ) + raise ValueError(f"Can't tell if {ndim}-d cells are contiguous") if nbounds != 2: raise ValueError( diff --git a/cf/field.py b/cf/field.py index 748bc60b71..1fd7b36df9 100644 --- a/cf/field.py +++ b/cf/field.py @@ -10756,7 +10756,6 @@ def collapse( ).value(None) if dc is not None and not dc.has_bounds(): dc.set_bounds(dc.create_bounds(cellsize=0)) - # --- End: if continue @@ -10849,7 +10848,6 @@ def collapse( if not g_weights: g_weights = None - # --- End: if axis = collapse_axes.key() @@ -11018,7 +11016,7 @@ def collapse( ) for key, value in c.filter_by_axis("or", axis).items(): logger.info( - " Removing {!r}".format(value) + f" Removing {value.construct_type}" ) # pragma: no cover f.del_construct(key) @@ -11028,7 +11026,7 @@ def collapse( c = f.auxiliary_coordinates.filter_by_naxes(gt(1)) for key, value in c.filter_by_axis("or", axis).items(): logger.info( - " Removing {!r}".format(value) + f" Removing {value.construct_type} {key!r}" ) # pragma: no cover f.del_construct(key) @@ -11044,15 +11042,18 @@ def collapse( for key, aux in f.auxiliary_coordinates.filter_by_axis( "exact", axis ).items(): - logger.info( - "key, aux = {} {!r}".format(key, repr(aux)) - ) # pragma: no cover + logger.info(f"key = {key}") # pragma: no cover d = aux[0] + # TODODASK: remove once dask. For some reason, + # without this we now get LAMA related failures in + # Partition.nbytes ... + _ = aux.dtype + if aux.has_bounds() or (aux[:-1] != aux[1:]).any(): logger.info( - " Removing {!r}".format(aux) + f" Removing {aux.construct_type} {key!r}" ) # pragma: no cover f.del_construct(key) @@ -11062,12 +11063,11 @@ def collapse( aux.set_data(d.data, copy=False) if d.has_bounds(): aux.bounds.set_data(d.bounds.data, copy=False) - # --- End: for # Reset the axis size f.domain_axes[axis].set_size(1) logger.info( - "Changing axis size to 1: {}".format(axis) + f"Changing axis size to 1: {axis}" ) # pragma: no cover dim = f.dimension_coordinates.filter_by_axis( @@ -11108,14 +11108,13 @@ def collapse( else: raise ValueError( "Can't collapse: Bad parameter value: " - "coordinate={!r}".format(coordinate) + f"coordinate={coordinate!r}" ) bounds = self._Bounds(data=Data([bounds_data], units=units)) dim.set_data(data, copy=False) dim.set_bounds(bounds, copy=False) - # --- End: for # -------------------------------------------------------- # Update the cell methods @@ -11129,7 +11128,6 @@ def collapse( over=over, verbose=verbose, ) - # --- End: for # ------------------------------------------------------------ # Return the collapsed field (or the classification array) @@ -11225,8 +11223,6 @@ def _ddddd( return classification, n, lower, upper - # --- End: def - def _time_interval( classification, n, @@ -11301,7 +11297,6 @@ def _time_interval( group_by_coords, extra_condition, ) - # --- End: if return classification, n @@ -11380,7 +11375,6 @@ def _time_interval_over( group_by_coords, extra_condition, ) - # --- End: if return classification, n @@ -11436,7 +11430,6 @@ def _data_interval( group_by_coords, extra_condition, ) - # --- End: if return classification, n @@ -11509,10 +11502,7 @@ def _selection( # for i in range(1, max(1, int(float(len(x))/group_span))): # n += 1 # classification[x[i*group_span:(i + 1)*group_span]] = n - # # --- End: if - # n += 1 - # --- End: for return classification, n @@ -11543,7 +11533,6 @@ def _discern_runs(classification, within=False): if classification[i] >= 0: classification[i:j] = n n += 1 - # --- End: for if classification[x[-1]] >= 0: classification[x[-1] :] = n @@ -11569,7 +11558,6 @@ def _discern_runs_within(classification, coord): classification[start : i + 1] = n start = i + 1 n += 1 - # --- End: for return classification @@ -11635,7 +11623,6 @@ def _tyu(coord, group_by, time_interval): TimeDuration.__class__.__name__, coord.Units ) ) - # --- End: if return (lower, upper, lower_limit, upper_limit) @@ -11680,7 +11667,6 @@ def _group_weights(weights, iaxis, index): indices[iaxes.index(iaxis)] = index weights[iaxes] = value[tuple(indices)] break - # --- End: for return weights @@ -11765,7 +11751,6 @@ def _group_weights(weights, iaxis, index): # Set group to None group = None - # --- End: if if group is not None: if isinstance(group, Query): @@ -11786,7 +11771,6 @@ def _group_weights(weights, iaxis, index): start = end end += group n += 1 - # --- End: while if group_span is True or group_span is None: # Use the group definition as the group span @@ -11891,7 +11875,6 @@ def _group_weights(weights, iaxis, index): ).value(None) if coord is None: raise ValueError("asdad8777787 TODO") - # --- End: if classification = numpy_empty((axis_size,), int) classification.fill(-1) @@ -11911,9 +11894,8 @@ def _group_weights(weights, iaxis, index): elif group_span is True: raise ValueError( "Can't collapse: Can't set group_span=True when " - "group={!r}".format(group) + f"group={group!r}" ) - # --- End: if if classification is None: if over == "days": @@ -11952,11 +11934,8 @@ def _group_weights(weights, iaxis, index): elif isinstance(over_days, TimeDuration): if over_days.Units.istime and over_days < Data(1, "day"): raise ValueError( - "Bad parameter value: over_days={!r}".format( - over_days - ) + f"Bad parameter value: over_days={over_days!r}" ) - # --- End: if coordinate = "minimum" @@ -12086,9 +12065,8 @@ def _group_weights(weights, iaxis, index): else: raise ValueError( "over_years is not a whole number of calendar " - "years: {!r}".format(over_years) + f"years: {over_years!r}" ) - # --- End: if coordinate = "minimum" @@ -12126,7 +12104,7 @@ def _group_weights(weights, iaxis, index): # Keep a record of the first cell mdHMS0 = mdHMS logger.info( - " mdHMS0 = {!r}".format(mdHMS0) + f" mdHMS0 = {mdHMS0!r}" ) # pragma: no cover elif mdHMS.equals(mdHMS0): # We've got repeat of the first cell, which @@ -12135,7 +12113,7 @@ def _group_weights(weights, iaxis, index): break logger.info( - " mdHMS = {!r}".format(mdHMS) + f" mdHMS = {mdHMS!r}" ) # pragma: no cover if over_years is None: @@ -12174,7 +12152,6 @@ def _group_weights(weights, iaxis, index): parameter="over_years", extra_condition=mdHMS, ) - # --- End: for elif within == "days": # ---------------------------------------------------- @@ -12209,10 +12186,9 @@ def _group_weights(weights, iaxis, index): ): # % Data(1, 'day'): # % within_days: raise ValueError( - "Can't collapse: within_days={!r} is not an " - "exact factor of 1 day".format(within_days) + f"Can't collapse: within_days={within_days!r} " + "is not an exact factor of 1 day" ) - # --- End: if if isinstance(within_days, TimeDuration): # ------------------------------------------------ @@ -12262,7 +12238,7 @@ def _group_weights(weights, iaxis, index): elif group_span is True: raise ValueError( "Can't collapse: Can't set group_span=True when " - "within_days={!r}".format(within_days) + f"within_days={within_days!r}" ) elif within == "years": @@ -12351,22 +12327,20 @@ def _group_weights(weights, iaxis, index): elif over is not None: raise ValueError( - "Can't collapse: Bad 'over' syntax: {!r}".format(over) + f"Can't collapse: Bad 'over' syntax: {over!r}" ) elif within is not None: raise ValueError( - "Can't collapse: Bad 'within' syntax: " - "{!r}".format(within) + f"Can't collapse: Bad 'within' syntax: {within!r}" ) - # --- End: if if classification is not None: # --------------------------------------------------------- # Collapse each group # --------------------------------------------------------- logger.info( - " classification = {}".format(classification) + f" classification = {classification}" ) # pragma: no cover unique = numpy_unique(classification) @@ -12379,22 +12353,6 @@ def _group_weights(weights, iaxis, index): pc = self.subspace(**{axis: index}) - # if group_span is not None: - # if over == 'days': - # t = pc.dimension_coordinate('T').copy() - # t.units = 'days since ' + str(t.reference_datetime) - # logger.info('{} {} {} {}'.format( - # t.bounds.Units, u, len(index), - # int(t.bounds.range().ceil() - # ) - # ) - # if over_days != int(t.bounds.range().ceil()): - # classification[index] = ignore_n - # ignore_n -= 1 - # continue - # - # # --- End: if - # ---------------------------------------------------- # Ignore groups that don't meet the specified criteria # ---------------------------------------------------- @@ -12410,8 +12368,6 @@ def _group_weights(weights, iaxis, index): ignore_n -= 1 continue else: - # coord = pc.coordinates.filter_by_axis( - # 'exact', axis).value(None) if coord is None: raise ValueError( "Can't collapse: Need an unambiguous 1-d " @@ -12443,7 +12399,6 @@ def _group_weights(weights, iaxis, index): classification[index] = ignore_n ignore_n -= 1 continue - # --- End: if if ( group_contiguous @@ -12458,7 +12413,6 @@ def _group_weights(weights, iaxis, index): classification[index] = ignore_n ignore_n -= 1 continue - # --- End: if if regroup: continue @@ -12468,7 +12422,7 @@ def _group_weights(weights, iaxis, index): # ---------------------------------------------------- w = _group_weights(weights, iaxis, index) logger.info( - " Collapsing group {}: {!r}".format(u, pc) + f" Collapsing group {u}:" ) # pragma: no cover fl.append( @@ -12486,7 +12440,6 @@ def _group_weights(weights, iaxis, index): _update_cell_methods=False, ) ) - # --- End: for if regroup: # return the numpy array @@ -12498,16 +12451,12 @@ def _group_weights(weights, iaxis, index): # Still here? if not fl: c = "contiguous " if group_contiguous else "" - s = ( - " spanning {}".format(group_span) - if group_span is not False - else "" - ) + s = f" spanning {group_span}" if group_span is not False else "" if within is not None: - s = " within {}{}".format(within, s) + s = f" within {within}{s}" raise ValueError( - "Can't collapse: No {}groups{} were identified".format(c, s) + f"Can't collapse: No {c}groups{s} were identified" ) if len(fl) == 1: @@ -12523,7 +12472,6 @@ def _group_weights(weights, iaxis, index): c.set_bounds(c.create_bounds()) except Exception: pass - # --- End: for # -------------------------------------------------------- # Sort the list of collapsed fields @@ -12547,8 +12495,7 @@ def _group_weights(weights, iaxis, index): try: f = self.concatenate(fl, axis=iaxis, _preserve=False) except ValueError as error: - raise ValueError("Can't collapse: {0}".format(error)) - # --- End: if + raise ValueError(f"Can't collapse: {error}") if squeeze and f.domain_axes[axis].get_size() == 1: # Remove a totally collapsed axis from the field's @@ -12662,15 +12609,12 @@ def _update_cell_methods( lastcm.over = over cell_method = None - # --- End: if if cell_method is not None: self.set_construct(cell_method) logger.info( - " Modified cell methods = {}".format( - self.cell_methods.ordered() - ) + f" Modified cell methods = {self.cell_methods.ordered()}" ) # pragma: no cover @_deprecated_kwarg_check("axes") @@ -12735,7 +12679,6 @@ def direction(self, identity, axes=None, **kwargs): for key, coord in self.dimension_coordinates.items(): if axis == self.get_data_axes(key)[0]: return coord.direction() - # --- End: for return True @@ -12764,7 +12707,6 @@ def directions(self): if not direction: axis = self.get_data_axes(key)[0] out[axis] = dc.direction() - # --- End: for return out @@ -13075,7 +13017,7 @@ def indices(self, *mode, **kwargs): if len(c) != 1: raise ValueError( "Can't find indices: Ambiguous axis or axes: " - "{!r}".format(identity) + f"{identity!r}" ) key, construct = dict(c).popitem() @@ -13091,7 +13033,6 @@ def indices(self, *mode, **kwargs): ) unique_axes.update(sorted_axes) - # --- End: for if len(unique_axes) < n_axes: raise ValueError( diff --git a/cf/mixin/propertiesdata.py b/cf/mixin/propertiesdata.py index 7c58d29715..049f3cd901 100644 --- a/cf/mixin/propertiesdata.py +++ b/cf/mixin/propertiesdata.py @@ -2353,7 +2353,7 @@ def hardmask(self): data = self.get_data(None, _fill_value=False) if data is None: raise AttributeError( - f"{self.__class__.__name__} doesn't have attribute 'hardmask'" + f"{self.__class__.__name__} doesn't have attribute 'hardmask'" ) return data.hardmask @@ -4771,41 +4771,41 @@ def identity( def identities(self, generator=False, **kwargs): """Return all possible identities. - The identities comprise: + The identities comprise: - * The "standard_name" property. - * The "id" attribute, preceded by ``'id%'``. - * The "cf_role" property, preceded by ``'cf_role='``. - * The "axis" property, preceded by ``'axis='``. - * The "long_name" property, preceded by ``'long_name='``. - * All other properties (including "standard_name"), preceded by - the property name and an ``'='``. - * The coordinate type (``'X'``, ``'Y'``, ``'Z'`` or ``'T'``). - * The netCDF variable name, preceded by ``'ncvar%'``. - - .. versionadded:: 3.0.0 - - .. seealso:: `id`, `identity` - TODO - :Returns: - - `list` - The identities. + * The "standard_name" property. + * The "id" attribute, preceded by ``'id%'``. + * The "cf_role" property, preceded by ``'cf_role='``. + * The "axis" property, preceded by ``'axis='``. + * The "long_name" property, preceded by ``'long_name='``. + * All other properties (including "standard_name"), preceded by + the property name and an ``'='``. + * The coordinate type (``'X'``, ``'Y'``, ``'Z'`` or ``'T'``). + * The netCDF variable name, preceded by ``'ncvar%'``. - **Examples:** + .. versionadded:: 3.0.0 - >>> f.properties() - {'foo': 'bar', - 'long_name': 'Air Temperature', - 'standard_name': 'air_temperature'} - >>> f.nc_get_variable() - 'tas' - >>> f.identities() - ['air_temperature', - 'long_name=Air Temperature', - 'foo=bar', - 'standard_name=air_temperature', - 'ncvar%tas'] + .. seealso:: `id`, `identity` + TODO + :Returns: + + `list` + The identities. + + **Examples:** + + >>> f.properties() + {'foo': 'bar', + 'long_name': 'Air Temperature', + 'standard_name': 'air_temperature'} + >>> f.nc_get_variable() + 'tas' + >>> f.identities() + ['air_temperature', + 'long_name=Air Temperature', + 'foo=bar', + 'standard_name=air_temperature', + 'ncvar%tas'] """ id_identity = "" diff --git a/cf/test/test_CoordinateReference.py b/cf/test/test_CoordinateReference.py index 463f9535f3..e56745da84 100644 --- a/cf/test/test_CoordinateReference.py +++ b/cf/test/test_CoordinateReference.py @@ -232,7 +232,7 @@ def test_CoordinateReference_get__getitem__(self): ) self.assertEqual( self.vcr.get("earth_radius"), - self.datum.get_parameter("earth_radius") + self.datum.get_parameter("earth_radius"), ) self.assertIsNone(self.vcr.get("orog")) self.assertEqual(self.vcr.get("orog", "qwerty"), "qwerty") diff --git a/cf/test/test_aggregate.py b/cf/test/test_aggregate.py index c47b1c3357..247e5808c4 100644 --- a/cf/test/test_aggregate.py +++ b/cf/test/test_aggregate.py @@ -144,7 +144,7 @@ def test_basic_aggregate(self): del t.standard_name del c.standard_name - x = cf.aggregate([c, t]) + x = cf.aggregate([c, t], verbose=1) self.assertEqual(len(x), 2) t.long_name = "qwerty" From 87ed4b74e8ea5a03e31e81dae9808a33ddf68cf2 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Thu, 25 Mar 2021 00:24:41 +0000 Subject: [PATCH 05/53] tests pass --- cf/aggregate.py | 42 +- cf/constructs.py | 11 +- cf/field.py | 1393 ++++++++++----------------- cf/read_write/netcdf/netcdfwrite.py | 3 - cf/read_write/um/umread.py | 1 - cf/test/create_test_files.py | 4 - cf/test/test_AuxiliaryCoordinate.py | 17 +- cf/test/test_CellMeasure.py | 9 +- cf/test/test_CellMethod.py | 5 - cf/test/test_Count.py | 7 +- cf/test/test_DimensionCoordinate.py | 15 +- cf/test/test_DomainAncillary.py | 9 +- cf/test/test_DomainAxis.py | 9 +- cf/test/test_Field.py | 265 ++--- cf/test/test_FieldAncillary.py | 13 +- cf/test/test_FieldList.py | 11 +- cf/test/test_Index.py | 7 +- cf/test/test_List.py | 7 +- cf/test/test_Partition.py | 3 - cf/test/test_Query.py | 4 - cf/test/test_Regrid.py | 4 - cf/test/test_TimeDuration.py | 2 - cf/test/test_aggregate.py | 3 - cf/test/test_cfa.py | 2 - cf/test/test_collapse.py | 6 +- cf/test/test_decorators.py | 6 - cf/test/test_dsg.py | 4 - cf/test/test_external.py | 3 - cf/test/test_formula_terms.py | 3 - cf/test/test_functions.py | 7 - cf/test/test_gathering.py | 4 - cf/test/test_general.py | 2 - cf/test/test_geometry.py | 13 +- cf/test/test_groups.py | 6 +- cf/test/test_pp.py | 3 - cf/test/test_read_write.py | 27 +- cf/test/test_style.py | 3 - 37 files changed, 696 insertions(+), 1237 deletions(-) diff --git a/cf/aggregate.py b/cf/aggregate.py index 19030955df..99509ddd78 100644 --- a/cf/aggregate.py +++ b/cf/aggregate.py @@ -239,7 +239,7 @@ def __init__( strict=strict_identities, relaxed=relaxed_identities, nc_only=ncvar_identities, - default=None + default=None, ) if field_identity: @@ -301,7 +301,7 @@ def __init__( # Dictionaries mapping auxiliary coordinate identifiers # to their auxiliary coordinate objects - aux_1d = dict(f.auxiliary_coordinates.filter_by_naxes(1)) + aux_1d = dict(f.auxiliary_coordinates(view=True).filter_by_naxes(1)) # A set containing the identity of each coordinate # @@ -314,13 +314,13 @@ def __init__( # ------------------------------------------------------------ # Coordinate references (formula_terms and grid mappings) # ------------------------------------------------------------ - refs = f.coordinate_references + refs = f.coordinate_references(view=True) if not refs: self.coordrefs = () else: self.coordrefs = list(refs.values()) - for axis in f.domain_axes: + for axis in f.domain_axes(view=True): # List some information about each 1-d coordinate which # spans this axis. The order of elements is arbitrary, as @@ -333,7 +333,9 @@ def __init__( info_dim = [] # dim_coord = item(axis) - dim_coords = f.dimension_coordinates.filter_by_axis("and", axis) + dim_coords = f.dimension_coordinates(view=True).filter_by_axis( + "and", axis + ) dim_coord = dim_coords.value(None) dim_coord_key = dim_coords.key(None) dim_identity = None @@ -485,9 +487,9 @@ def __init__( # N-d auxiliary coordinates # ------------------------------------------------------------ self.nd_aux = {} - for key, nd_aux_coord in f.auxiliary_coordinates.filter_by_naxes( - gt(1) - ).items(): + for key, nd_aux_coord in ( + f.auxiliary_coordinates(view=True).filter_by_naxes(gt(1)).items() + ): # Find axes' canonical identities axes = [self.axis_to_id[axis] for axis in f.get_data_axes(key)] @@ -524,7 +526,7 @@ def __init__( # Field ancillaries # ------------------------------------------------------------ self.field_anc = {} - for key, field_anc in f.field_ancillaries.items(): + for key, field_anc in f.field_ancillaries(view=True).items(): # Find this field ancillary's identity identity = self.field_ancillary_has_identity_and_data(field_anc) @@ -566,12 +568,12 @@ def __init__( # Firstly process domain ancillaries which are used in # coordinate references - for ref in f.coordinate_references.values(): + for ref in f.coordinate_references(view=True).values(): for ( term, identifier, ) in ref.coordinate_conversion.domain_ancillaries().items(): - key = f.domain_ancillaries(identifier).key(None) + key = f.domain_ancillaries(view=True)(identifier).key(None) if key is None: continue @@ -605,7 +607,7 @@ def __init__( # Secondly process domain ancillaries which are not being used # in coordinate references - for key, anc in f.domain_ancillaries.items(): + for key, anc in f.domain_ancillaries(view=True).items(): if key in ancs_in_refs: continue @@ -637,7 +639,7 @@ def __init__( self.msr = {} info_msr = {} copied_field = False - for key, msr in f.cell_measures.items(): + for key, msr in f.cell_measures(view=True).items(): # If the measure is an external variable, remove it because # the dimensions are not known so there is no way to tell if the # aggregation should have changed it. (This is sufficiently @@ -872,7 +874,7 @@ def canonical_cell_methods(self, rtol=None, atol=None): """ _canonical_cell_methods = self._canonical_cell_methods - cell_methods = self.field.cell_methods.ordered() + cell_methods = self.field.cell_methods(view=True).ordered() # cms = getattr(self.field, 'CellMethods', None) # TODO if not cell_methods: return None @@ -1940,8 +1942,10 @@ def aggregate( axis_items = meta[0].axis.items() for axis in axes: # TODO IMPORTANT: should this be filter_by_axis ???? - coords = meta[0].field.coordinates.filter_by_identity( - "exact", axis + coords = ( + meta[0] + .field.coordinates(view=True) + .filter_by_identity("exact", axis) ) coord = coords.value(default=None) if coord is None: @@ -2196,9 +2200,9 @@ def _create_hash_and_first_values( continue # Still here? - dim_coord = m.field.dimension_coordinates.filter_by_axis( - "and", axis - ) + dim_coord = m.field.dimension_coordinates( + view=True + ).filter_by_axis("and", axis) # Find the sort indices for this axis ... if dim_coord is not None: diff --git a/cf/constructs.py b/cf/constructs.py index 79706f2cc0..c640c49fa3 100644 --- a/cf/constructs.py +++ b/cf/constructs.py @@ -154,7 +154,7 @@ def _matching_values(self, value0, construct, value1, basic=False): # # return self.constructs[da_key] - def filter_by_identity(self, *identities): + def filter_by_identity(self, *identities, view=False, **kwargs): """Select metadata constructs by identity. .. versionadded:: 3.0.0 @@ -205,10 +205,13 @@ def filter_by_identity(self, *identities): .. note:: This is an extension to the functionality of `cfdm.Constucts.filter_by_identity`. + TODO + kwargs: optional Additional parameters for configuring each construct's `identities` method. ``generator=True`` is passed by - default. + default, and ``ctype`` is inferred from the + *identities* parmaeter. .. versionadded:: 3.9.0 @@ -246,4 +249,6 @@ def filter_by_identity(self, *identities): ctype = [i for i in "XYZT" if i in identities] - return super().filter_by_identity(*identities, ctype=ctype) + return super().filter_by_identity( + *identities, view=view, ctype=ctype, **kwargs + ) diff --git a/cf/field.py b/cf/field.py index 1fd7b36df9..cab3242987 100644 --- a/cf/field.py +++ b/cf/field.py @@ -375,7 +375,6 @@ def __init__( flags = getattr(source, "Flags", None) if flags is not None: self.Flags = flags.copy() - # --- End: if def __getitem__(self, indices): """Return a subspace of the field construct defined by indices. @@ -497,7 +496,7 @@ def __getitem__(self, indices): # Set sizes of domain axes data_axes = new.get_data_axes() - domain_axes = new.domain_axes + domain_axes = new.domain_axes(view=True) for axis, size in zip(data_axes, new_data.shape): domain_axes[axis].set_size(size) @@ -524,7 +523,6 @@ def __getitem__(self, indices): dice.append(indices[data_axes.index(axis)]) else: dice.append(slice(None)) - # --- End: for # Generally we do not apply an auxiliary mask to the # metadata items, but for DSGs we do. @@ -545,7 +543,6 @@ def __getitem__(self, indices): break mask = mask.squeeze(i) - # --- End: for if iaxes is None: item_mask = None @@ -555,15 +552,12 @@ def __getitem__(self, indices): for i, axis in enumerate(construct_axes): if axis not in data_axes: mask1.inset_dimension(i) - # --- End: for item_mask.append(mask1) - # --- End: for if item_mask: needs_slicing = True dice = [auxiliary_mask[0], item_mask] + dice - # --- End: if logger.debug(" dice = {}".format(dice)) # pragma: no cover @@ -575,8 +569,6 @@ def __getitem__(self, indices): axes=construct_axes, copy=False, ) - # --- End: for - # --- End: if new.set_data(new_data, axes=data_axes, copy=False) @@ -758,9 +750,10 @@ def analyse_items(self, relaxed_identities=None): if relaxed_identities is None: relaxed_identities = cf_relaxed_identities() - dimension_coordinates = self.dimension_coordinates + dimension_coordinates = self.dimension_coordinates(view=True) + auxiliary_coordinates = self.auxiliary_coordinates(view=True) - for axis in self.domain_axes: + for axis in self.domain_axes(view=True): dims = dimension_coordinates.filter_by_axis("and", axis) if len(dims) == 1: @@ -776,7 +769,6 @@ def analyse_items(self, relaxed_identities=None): if getattr(dim, ctype, False): identity = ctype break - # --- End: if if identity is None and relaxed_identities: identity = dim.identity(relaxed=True, default=None) @@ -796,7 +788,7 @@ def analyse_items(self, relaxed_identities=None): continue else: - auxs = self.auxiliary_coordinates.filter_by_axis("exact", axis) + auxs = auxiliary_coordinates.filter_by_axis("exact", axis) if len(auxs) == 1: # This axis of the domain does not have a # dimension coordinate but it does have exactly @@ -829,11 +821,9 @@ def analyse_items(self, relaxed_identities=None): axis_to_aux[axis] = key id_to_aux[identity] = key continue - # --- End: if # Still here? Then this axis is undefined undefined_axes.append(axis) - # --- End: for return { "axis_to_id": axis_to_id, @@ -886,7 +876,6 @@ def _is_broadcastable(self, shape): for n, m in zip(shape1[::-1], shape0[::-1]): if n != m and n != 1: return False - # --- End: for return True @@ -1004,7 +993,6 @@ def _binary_operation_old(self, other, method): ), ) ) - # --- End: if # Find the axis names which are present in both fields matching_ids = set(s["id_to_axis"]).intersection(v["id_to_axis"]) @@ -1028,7 +1016,6 @@ def _binary_operation_old(self, other, method): "Can't combine fields: {!r} axis defined by auxiliary " "in only 1 field".format(identity) ) # TODO ~WRONG - # --- End: for # ------------------------------------------------------------ # For matching dimension coordinates check that they have @@ -1220,7 +1207,7 @@ def _binary_operation_old(self, other, method): # data arrays and are both size 1 => this axis to # be omitted from the result field remove_size1_axes0.append(axis0) - # --- End: for + logger.debug( "1: s['size1_broadcast_axes'] = {}".format( s["size1_broadcast_axes"] @@ -1289,7 +1276,7 @@ def _binary_operation_old(self, other, method): else: # Defined but unmatched axis axes_unM.append(axis0) - # --- End: for + logger.debug( "2: axes_unD, axes_unM, axes0_M = {} {} {}".format( axes_unD, axes_unM, axes0_M @@ -1338,7 +1325,7 @@ def _binary_operation_old(self, other, method): else: # Defined but unmatched axis axes_unM.append(axis1) - # --- End: for + logger.debug( "2: axes_unD , axes_unM , axes0_M = {} {} {}".format( axes_unD, axes_unM, axes0_M @@ -1378,7 +1365,6 @@ def _binary_operation_old(self, other, method): for axis0, axis1 in axis0_to_axis1.items(): if field1.direction(axis1) != field0.direction(axis0): field1.flip(axis1, inplace=True) - # --- End: for # ------------------------------------------------------------ # 2f. Insert size 1 axes into the data array of field0 to @@ -1417,7 +1403,6 @@ def _binary_operation_old(self, other, method): start_of_matched0 += 1 data_axes0 = field0.get_data_axes() - # --- End: if # ------------------------------------------------------------ # Insert size 1 axes into the data array of field1 to @@ -1454,7 +1439,6 @@ def _binary_operation_old(self, other, method): start_of_unmatched1 += 1 data_axes1 = field1.get_data_axes() - # --- End: if # ------------------------------------------------------------ # Insert size 1 axes into the data array of field0 to @@ -1481,7 +1465,7 @@ def _binary_operation_old(self, other, method): s["new_size1_axes"].append(axis0) data_axes0 = field0.get_data_axes() - # --- End: if + logger.debug( "2: axis0_to_axis1 = {}".format(axis0_to_axis1) ) # pragma: no cover @@ -1556,24 +1540,29 @@ def _binary_operation_old(self, other, method): refs0 = dict(field0.coordinate_references) refs1 = dict(field1.coordinate_references) + field1_dimension_coordinates = field1.dimension_coordinates(view=True) + field1_auxiliary_coordinates = field1.auxiliary_coordinates(view=True) + field1_coordinate_references = field1.coordinate_references(view=True) + field1_domain_ancillaries = field1_domain_ancillaries(view=True) + field1_domain_axes = field1.domain_axes(view=True) + + # field0_auxiliary_coordinates = field0.auxiliary_coordinates(view=True) + field0_domain_ancillaries = field0_domain_ancillaries(view=True) + c = field0.constructs.filter_by_type( + "auxiliary_coordinate", "domain_ancillary", view=True + ) + for axis0 in s["size1_broadcast_axes"] + s["new_size1_axes"]: axis1 = axis0_to_axis1[axis0] - # field0._Axes[axis0] = field1._Axes[axis1] - field0.set_construct(field1.domain_axes[axis1], key=axis0) - logger.debug( - "4: field0 domain axes = {}".format(field0.domain_axes) - ) # pragma: no cover - logger.debug( - "4: field1 domain axes = {}".format(field1.domain_axes) - ) # pragma: no cover + + field0.set_construct(field1_domain_axes[axis1], key=axis0) # Copy field1 1-d coordinates for this axis to field0 # if axis1 in field1.Items.d: - if axis1 in field1.dimension_coordinates: + if axis1 in field1_dimension_coordinates: insert_dim[axis1] = [axis0] - # for key1 in field1.Items(role='a', axes_all=set((axis1,))): - for key1 in field1.auxiliary_coordinates.filter_by_axis( + for key1 in field1_auxiliary_coordinates.filter_by_axis( "exact", axis1 ): insert_aux[key1] = [axis0] @@ -1585,23 +1574,9 @@ def _binary_operation_old(self, other, method): for key1, ref1 in refs1.items(): if axis1 not in field1.coordinate_reference_domain_axes(key1): continue - # insert_ref.add(key1) - # for identifier1 in ref1.ancillaries.values(): - # key1 = field1.key(identifier1, exact=True, role='c') - # if key1 is not None: - # axes0 = [axis1_to_axis0[axis]ct2', 'dim1', 'dim2', - # 'fav0', 'fav1', 'fav2', 'fav3', 'msr0', - # 'ref1'] - # 5: field1.Items().keys() = ['aux0', 'aux1', 'aux2', 'c - # for axis in field1.Items.axes(key1)] - # insert_domain_anc[key1] = axes0 - # --- End: for # Remove all field0 auxiliary coordinates and domain # ancillaries which span this axis - c = field0.constructs.filter_by_type( - "auxiliary_coordinate", "domain_ancillary" - ) remove_items.update(c.filter_by_axis("and", axis0)) # Remove all field0 coordinate references which span this @@ -1612,14 +1587,13 @@ def _binary_operation_old(self, other, method): ref0 = refs0.pop(key0) remove_items.add(key0) remove_items.update( - field0.domain_ancillaries( + field0_domain_ancillaries( *tuple( ref0.coordinate_conversion.domain_ancillaries().values() - ) + ), + view=True, ) ) - # --- End: for - # --- End: for # ------------------------------------------------------------ # Consolidate auxiliary coordinates for matching axes @@ -1636,16 +1610,13 @@ def _binary_operation_old(self, other, method): # spanning the same axes which has the same identity and a # size-1 data array. # ------------------------------------------------------------- - auxs1 = dict(field1.auxiliary_coordinates) - logger.debug( - "5: field0.auxs() = {}".format(field0.auxiliary_coordinates) - ) # pragma: no cover + auxs1 = dict(field1_auxiliary_coordinates) logger.debug("5: field1.auxs() = {}".format(auxs1)) # pragma: no cover logger.debug( "5: remove_items = {}".format(remove_items) ) # pragma: no cover - for key0, aux0 in field0.auxiliary_coordinates.items(): + for key0, aux0 in field0_auxiliary_coordinates.items(): if key0 in remove_items: # Field0 auxiliary coordinate has already marked for # removal @@ -1700,24 +1671,21 @@ def _binary_operation_old(self, other, method): found_equivalent_auxiliary_coordinates = True del auxs1[key1] break - # --- End: for if not found_equivalent_auxiliary_coordinates: remove_items.add(key0) - # --- End: for # ------------------------------------------------------------ # Copy field1 auxiliary coordinates which do not span any # matching axes to field0 # ------------------------------------------------------------ - for key1 in field1.auxiliary_coordinates: + for key1 in field1_auxiliary_coordinates: if key1 in insert_aux: continue axes1 = field1.constructs.data_axes()[key1] if set(axes1).isdisjoint(matching_axis1_to_axis0): insert_aux[key1] = [axis1_to_axis0[axis1] for axis1 in axes1] - # --- End: for # ------------------------------------------------------------ # Insert field1 items into field0 @@ -1752,7 +1720,7 @@ def _binary_operation_old(self, other, method): for key1, axes0 in insert_dim.items(): try: key0 = field0.set_construct( - field1.dimension_coordinates[key1], axes=axes0 + field1_dimension_coordinates[key1], axes=axes0 ) except ValueError: # There was some sort of problem with the insertion, so @@ -1769,7 +1737,7 @@ def _binary_operation_old(self, other, method): for key1, axes0 in insert_aux.items(): try: key0 = field0.set_construct( - field1.auxiliary_coordinates[key1], axes=axes0 + field1_auxiliary_coordinates[key1], axes=axes0 ) except ValueError: # There was some sort of problem with the insertion, so @@ -1786,7 +1754,7 @@ def _binary_operation_old(self, other, method): for key1, axes0 in insert_domain_anc.items(): try: key0 = field0.set_construct( - field1.domain_ancillaries[key1], axes=axes0 + field1_domain_ancillaries[key1], axes=axes0 ) except ValueError as error: # There was some sort of problem with the insertion, so @@ -1819,7 +1787,7 @@ def _binary_operation_old(self, other, method): # after removing any coordinates and domain ancillaries) # ------------------------------------------------------------ for key1 in insert_ref: - ref1 = field1.coordinate_references[key1] + ref1 = field1_coordinate_references[key1] logger.debug( "Copying {!r} from field1 to field0".format(ref1) ) # pragma: no cover @@ -1829,6 +1797,7 @@ def _binary_operation_old(self, other, method): "dimension_coordinate", "axuiliary_coordinate", "domain_ancillary", + view=True, ) ) for key1, item1 in identity_map.copy().items(): @@ -1965,13 +1934,15 @@ def _binary_operation(self, other, method): for i, (f, out) in enumerate(zip((field0, field1), (out0, out1))): data_axes = f.get_data_axes() - for axis in f.domain_axes: + f_dimension_coordinates = f.dimension_coordinates(view=True) + f_auxiliary_coordinates = f.auxiliary_coordinates(view=True) + for axis in f.domain_axes(view=True): identity = None key = None coord = None coord_type = None - coords = f.dimension_coordinates.filter_by_axis("exact", axis) + coords = f_dimension_coordinates.filter_by_axis("exact", axis) if len(coords) == 1: # This axis of the domain has a dimension coordinate key = coords.key() @@ -1985,12 +1956,11 @@ def _binary_operation(self, other, method): if getattr(coord, ctype, False): identity = ctype break - # --- End: if if identity is None and relaxed_identities: identity = coord.identity(relaxed=True, default=None) else: - coords = f.auxiliary_coordinates.filter_by_axis( + coords = f_auxiliary_coordinates.filter_by_axis( "exact", axis ) if len(coords) == 1: @@ -2006,7 +1976,6 @@ def _binary_operation(self, other, method): identity = coord.identity( relaxed=True, default=None ) - # --- End: if if identity is None: identity = i @@ -2021,7 +1990,6 @@ def _binary_operation(self, other, method): coord_type=coord_type, scalar=(axis not in data_axes), ) - # --- End: for for identity, y in tuple(out1.items()): asdas = True @@ -2033,7 +2001,6 @@ def _binary_operation(self, other, method): if y.scalar and asdas: del out1[identity] - # --- End: for for identity, a in tuple(out0.items()): asdas = True @@ -2045,10 +2012,6 @@ def _binary_operation(self, other, method): if a.scalar and asdas: del out0[identity] - # --- End: for - - logger.info("out0\n {}".format(out0)) - logger.info("out1\n {}".format(out1)) squeeze1 = [] insert0 = [] @@ -2086,41 +2049,6 @@ def _binary_operation(self, other, method): insert0.append(y.axis) elif identity not in out0: insert0.append(y.axis) - # else: - # a = out0[identity] - # - # if y.size == 1: - # pass - # elif y.size > 1 and a.size == 1: - # axes_to_replace_from_field1[y.axis] = y - # else: - # pass - # - # if y.size != a.size: - # raise ValueError( - # "Can't broadcast size {} {!r} axis to size {}" - # "{!r} axis".format( - # y.size, identity, a.size, identity) - # ) - # - # # Ensure matching axis directions - # if y.coord.direction() != a.coord.direction(): - # other.flip(y.axis, inplace=True) - # - # # Check for matching coordinate values - # if not y.coord._equivalent_data(a.coord): - # raise ValueError( - # "Can't combine {!r} axes with different " - # "coordinate values".format(identity) - # ) - # - # # Check coord refs - # refs0 = field0.get_coordinate_reference(construct=a.key) - # refs1 = field1.get_coordinate_reference(construct=y.key) - # print(y.coord) - # print(refs0,refs1) - # pass - # --- End: for # Make sure that both data arrays have the same number of # dimensions @@ -2171,7 +2099,6 @@ def _binary_operation(self, other, method): field0.get_data_axes().index(a.axis), inplace=True, ) - # --- End: for axis_map = { axis1: axis0 @@ -2180,9 +2107,6 @@ def _binary_operation(self, other, method): ) } - # axis_map_0_to_1 = {axis0: axis1 for axis1, axis0 in zip( - # field1.get_data_axes(), field0.get_data_axes())} - logger.info("\naxis_map= {}\n".format(axis_map)) logger.info("{!r}".format(field0)) logger.info("{!r}".format(field1)) @@ -2238,11 +2162,9 @@ def _binary_operation(self, other, method): n_equivalent_refs += 1 refs0.remove(ref0) break - # --- End: for if n_equivalent_refs != n_refs: raise ValueError("TODO") - # --- End: for # Change the domain axis sizes in field0 so that they match # the broadcasted result data @@ -2270,7 +2192,7 @@ def _binary_operation(self, other, method): elif y.size > 1: axis0 = axis_map[y.axis] field0.domain_axis(axis0).set_size(y.size) - # --- End: for + logger.info("\n{!r}".format(field0)) logger.info("{!r}".format(field1)) logger.info("{!r}".format(field0.data)) @@ -2307,7 +2229,6 @@ def _binary_operation(self, other, method): # field1.get_data_axes(key1)] # key0 = field0.set_construct(c, axes=axes, copy=False) # already_copied[key1] = key0 - # # --- End: if # for axis1, y in axes_to_replace_from_field1.items(): # axis0 = axis_map[axis1] @@ -2318,7 +2239,10 @@ def _binary_operation(self, other, method): if new_axes: constructs = field1.constructs.filter_by_type( - "dimension_coordinate", "auxiliary_coordinate", "cell_measure" + "dimension_coordinate", + "auxiliary_coordinate", + "cell_measure", + view=True, ) constructs = constructs.filter_by_axis("subset", *new_axes) for key, c in constructs.items(): @@ -2326,26 +2250,17 @@ def _binary_operation(self, other, method): axes = [axis_map[axis1] for axis1 in c_axes] key0 = field0.set_construct(c, axes=axes, copy=False) already_copied[key] = key0 - # --- End: if - - # for axis1, y in axes_to_replace_from_field1.items(): - # axis0 = axis_map[axis1] - # for c in field1.coordinates.filter_by_axis('exact', axis1).values(): - # key0 = field0.set_construct(c, axes=axis0, copy=False) - # already_copied[y.key] = key0 - # # --- End: for # ------------------------------------------------------------ # Copy over coordinate reference constructs from field1, # including their domain ancillary constructs. # ------------------------------------------------------------ - for key, ref in field1.coordinate_references.items(): + for key, ref in field1.coordinate_references(view=True).items(): axes = field1._coordinate_reference_axes(key) if axes.issubset(new_axes): refs_to_add_from_field1.append(ref) elif axes.intersection(axes_to_replace_from_field1): refs_to_add_from_field1.append(ref) - # --- End: for logger.info("\nrefs_to_add_from_field1=", refs_to_add_from_field1) @@ -2364,12 +2279,10 @@ def _binary_operation(self, other, method): ] key0 = field0.set_construct(c, axes=axes, copy=False) already_copied[key1] = key0 - # --- End: if key0 = already_copied[key1] if key0 is not None: coords.append(key0) - # --- End: for ref.clear_coordinates() ref.set_coordinates(coords) @@ -2390,14 +2303,12 @@ def _binary_operation(self, other, method): ] key0 = field0.set_construct(c, axes=axes, copy=False) already_copied[key1] = key0 - # --- End: if key0 = already_copied[key1] ref.coordinate_conversion.set_domain_ancillary(term, key0) # Copy coordinate reference to field0 field0.set_construct(ref, copy=False) - # --- End: for # ------------------------------------------------------------ # Remove misleading identities @@ -2463,7 +2374,7 @@ def _conform_coordinate_references(self, key, coordref=None): identity = self.constructs[key].identity(strict=True) if coordref is None: - refs = self.coordinate_references.values() + refs = self.coordinate_references(view=True).values() else: refs = [coordref] @@ -2472,7 +2383,6 @@ def _conform_coordinate_references(self, key, coordref=None): if identity in coordinates: ref.del_coordinate(identity, None) ref.set_coordinate(key) - # --- End: for def _coordinate_reference_axes(self, key): """Returns the field's set of coordinate reference axes for a @@ -2507,20 +2417,14 @@ def _coordinate_reference_axes(self, key): def _conform_cell_methods(self): """Changes the axes of the field's cell methods so they conform. - :Parameters: - :Returns: `None` - **Examples:** - - >>> f._conform_cell_methods() - """ axis_map = {} - for cm in self.cell_methods.values(): + for cm in self.cell_methods(view=True).values(): for axis in cm.get_axes(()): if axis in axis_map: continue @@ -2530,10 +2434,8 @@ def _conform_cell_methods(self): continue axis_map[axis] = self.domain_axis(axis, key=True, default=axis) - # --- End: for cm.change_axes(axis_map, inplace=True) - # --- End: for @_manage_log_level_via_verbosity def _equivalent_coordinate_references( @@ -2570,17 +2472,20 @@ def _equivalent_coordinate_references( `bool` """ - ref0 = self.coordinate_references[key0] - ref1 = field1.coordinate_references[key1] + ref0 = self.coordinate_references(view=True)[key0] + ref1 = field1.coordinate_references(view=True)[key1] if not ref0.equivalent(ref1, rtol=rtol, atol=atol, verbose=verbose): logger.info( - "{}: Non-equivalent coordinate references " - "({!r}, {!r})".format(self.__class__.__name__, ref0, ref1) + f"{self.__class__.__name__}: Non-equivalent coordinate " + f"references ({ref0!r}, {ref1!r})" ) # pragma: no cover return False # Compare the domain ancillaries + domain_ancillaries = self.domain_ancillaries(view=True) + field1_domain_ancillaries = field1.domain_ancillaries(view=True) + for ( term, identifier0, @@ -2590,8 +2495,8 @@ def _equivalent_coordinate_references( identifier1 = ref1.coordinate_conversion.domain_ancillaries()[term] - key0 = self.domain_ancillaries.filter_by_key(identifier0).key() - key1 = field1.domain_ancillaries.filter_by_key(identifier1).key() + key0 = domain_ancillaries.filter_by_key(identifier0).key() + key1 = field1_domain_ancillaries.filter_by_key(identifier1).key() if not self._equivalent_construct_data( field1, @@ -2606,7 +2511,6 @@ def _equivalent_coordinate_references( ): # add traceback TODO return False - # --- End: for return True @@ -2642,35 +2546,30 @@ def _set_construct_parse_axes(self, item, axes=None, allow_scalar=True): if not shape or len(shape) != len(set(shape)): raise ValueError( - "Can't insert {!r}: Ambiguous shape: {}. " - "Consider setting the 'axes' parameter.".format( - item, shape - ) + f"Can't insert {item!r}: Ambiguous shape: " + f"{shape}. Consider setting the 'axes' parameter." ) + domain_axes = self.domain_axes(view=True) axes = [] axes_sizes = [ domain_axis.get_size(None) - for domain_axis in self.domain_axes.values() + for domain_axis in domain_axes.values() ] for n in shape: if not axes_sizes.count(n): raise ValueError( - "Can't insert {!r}: There is no " - "domain axis construct with size {}.".format( - item, n - ) + f"Can't insert {item!r}: There is no " + f"domain axis construct with size {n}." ) if axes_sizes.count(n) == 1: - axes.append( - self.domain_axes.filter_by_size(n).key() - ) + axes.append(domain_axes.filter_by_size(n).key()) else: raise ValueError( - "Can't insert {!r}: Ambiguous shape: {}. " - "Consider setting the 'axes' " - "parameter.".format(item, shape) + f"Can't insert {item!r}: Ambiguous shape: " + "f{shape}. Consider setting the 'axes' " + "parameter." ) else: # -------------------------------------------------------- @@ -2689,41 +2588,35 @@ def _set_construct_parse_axes(self, item, axes=None, allow_scalar=True): if len(axes) != ndim or len(set(axes)) != ndim: raise ValueError( - "Can't insert {!r}: Incorrect number of given axes " - "(got {}, expected {})".format( - item, len(set(axes)), ndim - ) + f"Can't insert {item!r}: Incorrect number of given " + f"axes (got {len(set(axes))}, expected {ndim})" ) + domain_axes = self.domain_axes(view=True) axes2 = [] for axis, size in zip(axes, item.data.shape): dakey = self.domain_axis( axis, key=True, - default=ValueError("Unknown axis: {!r}".format(axis)), + default=ValueError(f"Unknown axis: {axis!r}"), ) - # dakey = self.domain_axis(axis, key=True, default=None) - # if axis is None: - # raise ValueError("Unknown axis: {!r}".format(axis)) - axis_size = self.domain_axes[dakey].get_size(None) + axis_size = domain_axes[dakey].get_size(None) if size != axis_size: raise ValueError( - "Can't insert {!r}: Mismatched axis size " - "({} != {})".format(item, size, axis_size) + f"Can't insert {item!r}: Mismatched axis size " + f"({size} != {axis_size})" ) axes2.append(dakey) - # --- End: for axes = axes2 if ndim != len(set(axes)): raise ValueError( - "Can't insert {!r}: Mismatched number of axes " - "({} != {})".format(item, len(set(axes)), ndim) + f"Can't insert {item!r}: Mismatched number of axes " + f"({len(set(axes))} != {ndim})" ) - # --- End: if return axes @@ -2774,7 +2667,6 @@ def _conform_for_assignment(self, other, check_coordinates=False): "Can't assign: {0!r} axis defined by auxiliary in only " "1 field".format(identity) ) - # --- End: for copied = False @@ -2788,23 +2680,25 @@ def _conform_for_assignment(self, other, check_coordinates=False): # then other.data becomes Y X T # ------------------------------------------------------------ squeeze_axes1 = [] + other_domain_axes = other.domain_axes(view=True) + for axis1 in v["undefined_axes"]: - axis_size = other.domain_axes[axis1].get_size() + axis_size = other_domain_axes[axis1].get_size() if axis_size != 1: raise ValueError( "Can't assign: Can't broadcast undefined axis with " - "size {}".format(axis_size) + f"size {axis_size}" ) squeeze_axes1.append(axis1) for identity in set(v["id_to_axis"]).difference(matching_ids): axis1 = v["id_to_axis"][identity] - axis_size = other.domain_axes[axis1].get_size() + axis_size = other_domain_axes[axis1].get_size() if axis_size != 1: raise ValueError( - "Can't assign: Can't broadcast size {0} {1!r} " - "axis".format(axis_size, identity) + "Can't assign: Can't broadcast size " + f"{axis_size} {identity!r} axis" ) squeeze_axes1.append(axis1) @@ -2834,7 +2728,6 @@ def _conform_for_assignment(self, other, check_coordinates=False): axis1 = v["id_to_axis"][identity] if axis1 in data_axes1: transpose_axes1.append(axis1) - # --- End: for if transpose_axes1 != data_axes1: if not copied: @@ -2860,7 +2753,6 @@ def _conform_for_assignment(self, other, check_coordinates=False): expand_positions1.append(i) else: expand_positions1.append(i) - # --- End: for if expand_positions1: if not copied: @@ -2870,7 +2762,6 @@ def _conform_for_assignment(self, other, check_coordinates=False): for i in expand_positions1: new_axis = other.set_construct(other._DomainAxis(1)) other.insert_dimension(new_axis, position=i, inplace=True) - # --- End: if # ---------------------------------------------------------------- # Make sure that each pair of matching axes has the same @@ -2882,7 +2773,6 @@ def _conform_for_assignment(self, other, check_coordinates=False): axis0 = s["id_to_axis"][identity] if other.direction(axis1) != self.direction(axis0): flip_axes1.append(axis1) - # --- End: for if flip_axes1: if not copied: @@ -2922,8 +2812,8 @@ def _conform_for_assignment(self, other, check_coordinates=False): # are compatible if not coord0._equivalent_data(coord1): raise ValueError( - "Matching {!r} coordinate constructs have different " - "data".format(identity) + f"Matching {identity!r} coordinate constructs have " + "different data" ) # If the defining coordinates are attached to @@ -2934,12 +2824,12 @@ def _conform_for_assignment(self, other, check_coordinates=False): # contain the defining coordinate. refs0 = [ key - for key, ref in self.coordinate_references.items() + for key, ref in self.coordinate_references(view=True).items() if key0 in ref.coordinates() ] refs1 = [ key - for key, ref in other.coordinate_references.items() + for key, ref in other.coordinate_references(view=True).items() if key1 in ref.coordinates() ] @@ -2951,7 +2841,6 @@ def _conform_for_assignment(self, other, check_coordinates=False): other, key0=refs0[0], key1=refs1[0], s=s, t=v ): raise ValueError("TODO") - # --- End: for return other @@ -2983,7 +2872,6 @@ def _conform_for_data_broadcasting(self, other): if ndiff > 0 and set(other.shape[:ndiff]) == set((1,)): for i in range(ndiff): other = other.squeeze(0) - # --- End: if return other @@ -3097,7 +2985,6 @@ def _equivalent_construct_data( return False transpose_axes.append(axes1.index(axis1)) - # --- End: if copy1 = True @@ -3179,45 +3066,44 @@ def _regrid_get_latlong(self, name, axes=None): and Y coordinates are returned, which are not long/lat. """ + auxiliary_coordinates = None + if axes is None: # Retrieve the field construct's X and Y dimension coordinates - xdims = self.dimension_coordinates("X") + xdims = self.dimension_coordinates(view=True)("X") len_x = len(xdims) if not len_x: raise ValueError( - "No X dimension coordinate found for the {} " + f"No X dimension coordinate found for the {name} " "field. If none is present you " "may need to specify the axes keyword, " "otherwise you may need to set the X " "attribute of the X dimension coordinate " - "to True.".format(name) + "to True." ) if len_x > 1: raise ValueError( - "{} field has multiple 'X' dimension coordinates".format( - name.captalize() - ) + f"{name.captalize()} field has multiple 'X' dimension " + "coordinates" ) - ydims = self.dimension_coordinates("Y") + ydims = self.dimension_coordinates(view=True)("Y") len_y = len(ydims) if not len_y: raise ValueError( - "No Y dimension coordinate found for the {} " + f"No Y dimension coordinate found for the {name} " "field. If none is present you " "may need to specify the axes keyword, " "otherwise you may need to set the Y " "attribute of the Y dimension coordinate " - "to True.".format(name) + "to True." ) if len_y > 1: raise ValueError( - "{} field has multiple 'Y' dimension coordinates".format( - name - ) + f"{name} field has multiple 'Y' dimension coordinates" ) x = xdims.value() @@ -3227,10 +3113,6 @@ def _regrid_get_latlong(self, name, axes=None): x_axis = self.domain_axis(x_key, key=True) y_axis = self.domain_axis(y_key, key=True) - # x_axis, x = dict(x).popitem() - # y_axis, y = dict(y).popitem() - # x_key = x_axis - # y_key = y_axis x_size = x.size y_size = y.size else: @@ -3240,10 +3122,9 @@ def _regrid_get_latlong(self, name, axes=None): for key in ("X", "Y"): if key not in axes: raise ValueError( - "Key {!r} must be specified for axes of {} " - "field.".format(key, name) + f"Key {key!r} must be specified for axes of {name} " + "field." ) - # --- End: for if axes["X"] in (1, 0) and axes["Y"] in (0, 1): # Axes specified by integer position in dimensions of @@ -3251,8 +3132,11 @@ def _regrid_get_latlong(self, name, axes=None): if axes["X"] == axes["Y"]: raise ValueError("TODO") - x = self.auxiliary_coordinates("X").filter_by_naxes(2) - y = self.auxiliary_coordinates("Y").filter_by_naxes(2) + auxiliary_coordinates = self.auxiliary_coordinates( + view=True, cache=auxiliary_coordinates + ) + x = auxiliary_coordinates("X", view=True).filter_by_naxes(2) + y = auxiliary_coordinates("Y", view=True).filter_by_naxes(2) if len(x) != 1: raise ValueError("TODO") if len(y) != 1: @@ -3273,18 +3157,14 @@ def _regrid_get_latlong(self, name, axes=None): lon_axes[axes["X"]], key=True, default=ValueError( - "'X' axis specified for {} field not found.".format( - name - ) + f"'X' axis specified for {name} field not found." ), ) y_axis = self.domain_axis( lat_axes[axes["Y"]], key=True, default=ValueError( - "'Y' axis specified for {} field not found.".format( - name - ) + f"'Y' axis specified for {name} field not found." ), ) else: @@ -3292,9 +3172,7 @@ def _regrid_get_latlong(self, name, axes=None): axes["X"], key=True, default=ValueError( - "'X' axis specified for {} field not found.".format( - name - ) + f"'X' axis specified for {name} field not found." ), ) @@ -3302,14 +3180,13 @@ def _regrid_get_latlong(self, name, axes=None): axes["Y"], key=True, default=ValueError( - "'Y' axis specified for {} field not found.".format( - name - ) + f"'Y' axis specified for {name} field not found." ), ) - x_size = self.domain_axes[x_axis].get_size() - y_size = self.domain_axes[y_axis].get_size() + domain_axes = self.domain_axes(view=True) + x_size = domain_axes[x_axis].get_size() + y_size = domain_axes[y_axis].get_size() axis_keys = [x_axis, y_axis] axis_sizes = [x_size, y_size] @@ -3324,52 +3201,51 @@ def _regrid_get_latlong(self, name, axes=None): lon_found = False lat_found = False - for key, aux in self.auxiliary_coordinates.filter_by_naxes( - 2 - ).items(): + auxiliary_coordinates = self.auxiliary_coordinates( + view=True, cache=auxiliary_coordinates + ) + + for key, aux in auxiliary_coordinates.filter_by_naxes(2).items(): if aux.Units.islongitude: if lon_found: raise ValueError( "The 2-d auxiliary longitude coordinate " - "of the {} field is not unique.".format(name) + f"of the {name} field is not unique." ) else: lon_found = True x = aux x_key = key - # --- End: if if aux.Units.islatitude: if lat_found: raise ValueError( "The 2-d auxiliary latitude coordinate " - "of the {} field is not unique.".format(name) + f"of the {name} field is not unique." ) else: lat_found = True y = aux y_key = key - # --- End: for if not lon_found or not lat_found: raise ValueError( "Both longitude and latitude coordinates " - "were not found for the {} field.".format(name) + f"were not found for the {name} field." ) if axes is not None: if set(axis_keys) != set(self.get_data_axes(x_key)): raise ValueError( "Axes of longitude do not match " - "those specified for {} field.".format(name) + f"those specified for {name} field." ) if set(axis_keys) != set(self.get_data_axes(y_key)): raise ValueError( "Axes of latitude do not match " - "those specified for {} field.".format(name) + f"those specified for {name} field." ) - # --- End: if coords_2D = True else: @@ -3378,9 +3254,8 @@ def _regrid_get_latlong(self, name, axes=None): if x_size == 1 or y_size == 1: raise ValueError( "Neither the longitude nor latitude dimension coordinates " - "of the {} field can be of size 1.".format(name) + f"of the {name} field can be of size 1." ) - # --- End: if coord_keys = [x_key, y_key] coords = [x, y] @@ -3413,24 +3288,14 @@ def _regrid_get_cartesian_coords(self, name, axes): for axis in axes: key = self.domain_axis(axis, key=True) axis_keys.append(key) - # tmp = self.axes(axis).keys() - # len_tmp = len(tmp) - # if not len_tmp: - # raise ValueError('No ' + name + ' axis found: ' + str(axis)) - # elif len(tmp) != 1: - # raise ValueError('Axis of ' + name + ' must be unique: ' + - # str(axis)) - # - # axis_keys.append(tmp.pop()) coords = [] for key in axis_keys: - # d = self.dim(key) d = self.dimension_coordinate(key, default=None) if d is None: raise ValueError( - "No unique " + name + " dimension coordinate " - "matches key " + key + "." + f"No unique {name} dimension coordinate " + f"matches key {key}." ) coords.append(d.copy()) @@ -3570,32 +3435,30 @@ def _regrid_check_bounds( for coord in coords: if not coord.has_bounds(): raise ValueError( - "{} {!r} coordinates must have bounds " - "for conservative regridding.".format(x, coord) + f"{x} {coord!r} coordinates must have bounds " + "for conservative regridding." ) if not coord.contiguous(overlap=False): raise ValueError( - "{} {!r} coordinates must have " + f"{x} {coord!r} coordinates must have " "contiguous, non-overlapping bounds " - "for conservative regridding.".format(x, coord) + "for conservative regridding." ) - # --- End: for if ext_coords is not None: for coord in ext_coords: if not coord.has_bounds(): raise ValueError( - "{!r} dimension coordinates must have bounds " - "for conservative regridding.".format(coord) + f"{coord!r} dimension coordinates must have " + "bounds for conservative regridding." ) if not coord.contiguous(overlap=False): raise ValueError( - "{!r} dimension coordinates must have " + f"{coord!r} dimension coordinates must have " "contiguous, non-overlapping bounds " - "for conservative regridding.".format(coord) + "for conservative regridding." ) - # --- End: if @classmethod def _regrid_check_method(cls, method): @@ -3611,10 +3474,8 @@ def _regrid_check_method(cls, method): raise ValueError("Can't regrid: Must select a regridding method") elif method not in regridding_methods: - raise ValueError( - "Can't regrid: Invalid method: {!r}".format(method) - ) - elif method == "bilinear": # use logging.info() once have logging + raise ValueError(f"Can't regrid: Invalid method: {method!r}") + elif method == "bilinear": # TODO use logging.info() once have logging print( "Note the 'bilinear' method argument has been renamed to " "'linear' at version 3.2.0. It is still supported for now " @@ -3677,11 +3538,13 @@ def _regrid_get_reordered_sections( # possibibly reduce the number of trasnistions between different masks # - each change is slow. + dimensions_coordinates = self.dimension_coordinates(view=True) + axis_indices = [] if axis_order is not None: for axis in axis_order: # axis_key = self.dim(axis, key=True) - axis_key = self.dimension_coordinates.filter_by_axis( + axis_key = dimension_coordinates.filter_by_axis( "exact", axis ).key(None) if axis_key is not None: @@ -3698,7 +3561,6 @@ def _regrid_get_reordered_sections( else: raise ValueError("Axis not found: " + str(axis)) - # --- End: if # Section the data sections = self.data.section(regrid_axis_indices) @@ -3739,10 +3601,6 @@ def _regrid_get_destination_mask( A numpy array with the mask. """ - # dst_mask = self.section(axes, stop=1, - # ndim=1)[0].squeeze().array.mask - # dst_mask = dst_mask.transpose(dst_order) - indices = { axis: [0] for axis in self.get_data_axes() if axis not in axes } @@ -3757,7 +3615,6 @@ def _regrid_get_destination_mask( for coord in coords_ext: tmp.append(coord.size) dst_mask = numpy_tile(dst_mask, tmp + [1] * dst_mask.ndim) - # --- End: if return dst_mask @@ -3941,8 +3798,10 @@ def _regrid_update_coordinate_references( regridding. """ - for key, ref in self.coordinate_references.items(): - # ref_axes = self.axes(ref.coordinates, exact=True) # v2 + domain_axes = None + domain_ancillaries = None + + for key, ref in self.coordinate_references(view=True).items(): ref_axes = [] for k in ref.coordinates(): ref_axes.extend(self.get_data_axes(k)) @@ -3955,8 +3814,11 @@ def _regrid_update_coordinate_references( term, value, ) in ref.coordinate_conversion.domain_ancillaries().items(): - # key = self.domain_anc(value, key=True) # v2 - key = self.domain_ancillaries(value).key(default=None) + domain_ancillaries = self.domain_ancillaries( + view=True, cache=domain_ancillaries + ) + + key = domain_ancillaries(value, view=True).key(default=None) if key is None: continue @@ -3965,7 +3827,7 @@ def _regrid_update_coordinate_references( # if f.domain_anc(key, axes_all=('X', 'Y')):# v2 x = self.domain_axis("X", key=True) y = self.domain_axis("Y", key=True) - if self.domain_ancillaries.filter_by_key(key).filter_by_axis( + if domain_ancillaries.filter_by_key(key).filter_by_axis( "exact", x, y ): # Convert the domain ancillary into an independent @@ -4000,10 +3862,14 @@ def _regrid_update_coordinate_references( ) d_axes = self.get_data_axes(key) + domain_axes = self.domain_axes( + view=True, cache=domain_axes + ) + for k_s, new_size in zip( src_axis_keys, dst_axis_sizes ): - self.domain_axes[k_s].set_size(new_size) + domain_axes[k_s].set_size(new_size) self.set_construct( self._DomainAncillary(source=value), @@ -4011,9 +3877,6 @@ def _regrid_update_coordinate_references( axes=d_axes, copy=False, ) - # --- End: if - # --- End: for - # --- End: for def _regrid_copy_coordinate_references(self, dst, dst_axis_keys): """Copy coordinate references from the destination field to the @@ -4032,7 +3895,7 @@ def _regrid_copy_coordinate_references(self, dst, dst_axis_keys): `None` """ - for ref in dst.coordinate_references.values(): + for ref in dst.coordinate_references(view=True).values(): axes = set() for key in ref.coordinates(): axes.update(dst.get_data_axes(key)) @@ -4112,30 +3975,38 @@ def _regrid_update_coordinates( # Remove the source coordinates of new field # self.remove_items(axes=src_axis_keys) # for key in self.constructs.filter_by_axis('or', *src_axis_keys): - for key in self.coordinates.filter_by_axis("or", *src_axis_keys): + for key in self.coordinates(view=True).filter_by_axis( + "or", *src_axis_keys + ): self.del_construct(key) + domain_axes = self.domain_axes(view=True) + dst_auxiliary_coordinates = None + if cartesian: # Make axes map if not dst_dict: axis_map = {} for k_s, k_d in zip(src_axis_keys, dst_axis_keys): axis_map[k_d] = k_s - # --- End: if # Insert coordinates from dst into new field if dst_dict: for k_s, d in zip(src_axis_keys, dst_coords): - self.domain_axes[k_s].set_size(d.size) + domain_axes[k_s].set_size(d.size) self.set_construct(d, axes=[k_s]) else: for k_d in dst_axis_keys: d = dst.dimension_coordinate(k_d) k_s = axis_map[k_d] - self.domain_axes[k_s].set_size(d.size) + domain_axes[k_s].set_size(d.size) self.set_construct(d, axes=[k_s]) - for aux_key, aux in dst.auxiliary_coordinates.filter_by_axis( + dst_auxiliary_coordinates = dst.auxiliary_coordinates( + view=True, cache=dst_auxiliary_coordinates + ) + + for aux_key, aux in dst_auxiliary_coordinates.filter_by_axis( "subset", *dst_axis_keys ).items(): aux_axes = [ @@ -4149,7 +4020,7 @@ def _regrid_update_coordinates( # Insert 'X' and 'Y' coordinates from dst into new field for axis_key, axis_size in zip(src_axis_keys, dst_axis_sizes): - self.domain_axes[axis_key].set_size(axis_size) + domain_axes[axis_key].set_size(axis_size) if dst_dict: if dst_coords_2D: @@ -4162,6 +4033,10 @@ def _regrid_update_coordinates( for coord, axis_key in zip(dst_coords, src_axis_keys): self.set_construct(coord, axes=[axis_key]) else: + dst_auxiliary_coordinates = dst.auxiliary_coordinates( + view=True, cache=dst_auxiliary_coordinates + ) + for src_axis_key, dst_axis_key in zip( src_axis_keys, dst_axis_keys ): @@ -4178,13 +4053,12 @@ def _regrid_update_coordinates( if dim_coord is not None: self.set_construct(dim_coord, axes=[src_axis_key]) - for aux in dst.auxiliary_coordinates.filter_by_axis( + for aux in dst_auxiliary_coordinates.filter_by_axis( "exact", dst_axis_key ).values(): self.set_construct(aux, axes=[src_axis_key]) - # --- End: for - for aux_key, aux in dst.auxiliary_coordinates.filter_by_axis( + for aux_key, aux in dst_auxiliary_coordinates.filter_by_axis( "exact", *dst_axis_keys ).items(): aux_axes = dst.get_data_axes(aux_key) @@ -4192,20 +4066,21 @@ def _regrid_update_coordinates( self.set_construct(aux, axes=src_axis_keys) else: self.set_construct(aux, axes=src_axis_keys[::-1]) - # --- End: if # Copy names of dimensions from destination to source field if not dst_dict: + dst_domain_axes = dst.domain_axes(view=True) for src_axis_key, dst_axis_key in zip( src_axis_keys, dst_axis_keys ): - ncdim = dst.domain_axes[dst_axis_key].nc_get_dimension(None) + ncdim = dst_domain_axes[dst_axis_key].nc_get_dimension(None) if ncdim is not None: - self.domain_axes[src_axis_key].nc_set_dimension(ncdim) - # --- End: if + domain_axes[src_axis_key].nc_set_dimension(ncdim) # ---------------------------------------------------------------- # End of worker functions for regridding + # + # TODO move to another file # ---------------------------------------------------------------- # ---------------------------------------------------------------- @@ -4239,8 +4114,10 @@ def _weights_area_XY( `bool` or `None` """ - xdims = dict(self.dimension_coordinates("X")) - ydims = dict(self.dimension_coordinates("Y")) + dimension_coordinates = self.dimension_coordinates(view=True) + + xdims = dict(dimension_coordinates("X", view=True)) + ydims = dict(dimension_coordinates("Y", view=True)) if not (xdims and ydims): if auto: @@ -4289,10 +4166,9 @@ def _weights_area_XY( return raise ValueError( - "Multiple weights specifications for {!r} " - "axis".format(self.constructs.domain_axis_identity(axis)) + "Multiple weights specifications for " + f"{self.constructs.domain_axis_identity(axis)!r} axis" ) - # --- End: if if measure and radius is not None: radius = self.radius(default=radius) @@ -4303,8 +4179,8 @@ def _weights_area_XY( return raise ValueError( - "Can't create area weights: No bounds for {!r} " - "axis".format(xcoord.identity()) + "Can't create area weights: No bounds for " + f"{xcoord.identity()!r} axis" ) if methods: @@ -4322,7 +4198,6 @@ def _weights_area_XY( comp[(xaxis,)] = cells weights_axes.add(xaxis) - # --- End: if if measure or ycoord.size > 1: if not ycoord.has_bounds(): @@ -4331,7 +4206,7 @@ def _weights_area_XY( raise ValueError( "Can't create area weights: No bounds for " - "{!r} axis".format(ycoord.identity()) + f"{ycoord.identity()!r} axis" ) if ycoord.Units.equivalent(Units("radians")): @@ -4352,10 +4227,8 @@ def _weights_area_XY( else: cells = ycoord.cellsize comp[(yaxis,)] = cells - # --- End: if weights_axes.add(yaxis) - # --- End: if return True @@ -4391,8 +4264,8 @@ def _weights_data( if len(axes) != w.ndim: raise ValueError( "'axes' parameter must provide an axis identifier " - "for each weights data dimension. Got {!r} for {} " - "dimension(s).".format(axes, w.ndim) + "for each weights data dimension. Got {axes!r} for " + f"{w.ndim} dimension(s)." ) iaxes = [ @@ -4405,7 +4278,6 @@ def _weights_data( if i not in iaxes: w = w.insert_dimension(position=i) iaxes.insert(i, i) - # --- End: for w = w.transpose(iaxes) @@ -4436,7 +4308,6 @@ def _weights_data( self.constructs.domain_axis_identity(axis0) ) ) - # --- End: for if methods: comp[tuple(axes0)] = "custom data" @@ -4451,6 +4322,8 @@ def _weights_field(self, fields, comp, weights_axes, methods=False): """Creates a weights field.""" s = self.analyse_items() + domain_axes = self.domain_axes(view=True) + for w in fields: t = w.analyse_items() @@ -4459,36 +4332,39 @@ def _weights_field(self, fields, comp, weights_axes, methods=False): t["undefined_axes"] ): raise ValueError("345jn456jn TODO") - # --- End: if + # TODO BUG: "t.domain_axes" w = w.squeeze() + w_domain_axes = w.domain_axes(view=True) + axis1_to_axis0 = {} + coordinate_references = self.coordinate_references(view=True) + w_coordinate_references = w.coordinate_references(view=True) + for axis1 in w.get_data_axes(): identity = t["axis_to_id"].get(axis1, None) if identity is None: raise ValueError( - "Weights field has unmatched, size > 1 {!r} " - "axis".format(w.constructs.domain_axis_identity(axis1)) + "Weights field has unmatched, size > 1 " + f"{w.constructs.domain_axis_identity(axis1)!r} axis" ) axis0 = s["id_to_axis"].get(identity, None) if axis0 is None: raise ValueError( - "Weights field has unmatched, size > 1 {!r} " - "axis".format(identity) + f"Weights field has unmatched, size > 1 {identity!r} " + "axis" ) - w_axis_size = w.domain_axes[axis1].get_size() - self_axis_size = self.domain_axes[axis0].get_size() + w_axis_size = w_domain_axes[axis1].get_size() + self_axis_size = domain_axes[axis0].get_size() if w_axis_size != self_axis_size: raise ValueError( - "Weights field has incorrectly sized {!r} " - "axis ({} != {})".format( - identity, w_axis_size, self_axis_size - ) + f"Weights field has incorrectly sized {identity!r} " + f"axis ({w_axis_size} != {self_axis_size})" ) axis1_to_axis0[axis1] = axis0 @@ -4502,8 +4378,8 @@ def _weights_field(self, fields, comp, weights_axes, methods=False): w, key0=key0, key1=key1, s=s, t=t ): raise ValueError( - "Weights field has incompatible {!r} " - "coordinates".format(identity) + f"Weights field has incompatible {identity!r} " + "coordinates" ) # Still here? Then the defining coordinates have @@ -4514,12 +4390,12 @@ def _weights_field(self, fields, comp, weights_axes, methods=False): # coordinate references are equivalent refs0 = [ key - for key, ref in self.coordinate_references.items() + for key, ref in coordinate_references.items() if key0 in ref.coordinates() ] refs1 = [ key - for key, ref in w.coordinate_references.items() + for key, ref in w_coordinate_references.items() if key1 in ref.coordinates() ] @@ -4544,7 +4420,6 @@ def _weights_field(self, fields, comp, weights_axes, methods=False): "Input weights field has an incompatible " "coordinate reference" ) - # --- End: for axes0 = tuple( [axis1_to_axis0[axis1] for axis1 in w.get_data_axes()] @@ -4553,16 +4428,14 @@ def _weights_field(self, fields, comp, weights_axes, methods=False): for axis0 in axes0: if axis0 in weights_axes: raise ValueError( - "Multiple weights specified for {!r} axis".format( - self.constructs.domain_axis_identity(axis0) - ) + "Multiple weights specified for " + f"{self.constructs.domain_axis_identity(axis0)!r} " + "axis" ) - # --- End: for comp[tuple(axes0)] = w.data weights_axes.update(axes0) - # --- End: for return True @@ -4673,7 +4546,6 @@ def _weights_geometry_area( interior_ring.shape, aux_X.bounds.shape[:-1] ) ) - # --- End: if x = aux_X.bounds.data y = aux_Y.bounds.data @@ -4715,7 +4587,6 @@ def _weights_geometry_area( # for the "last" edge of the polygon that # joins the first and last points. all_areas[i, j] += x[-1] * y[0] - x[0] * y[-1] - # --- End: for all_areas = all_areas.abs() * 0.5 @@ -4779,7 +4650,6 @@ def _weights_geometry_area( ) all_areas[i, j] += interior_angle + numpy_pi - # --- End: for area_min = all_areas.min() if area_min < 0: @@ -4808,9 +4678,7 @@ def _weights_geometry_area( if not z.Units.equivalent(_units_metres): raise ValueError( "Z coordinates must have units equivalent to " - "metres for area calculations. Got {!r}".format( - z.Units - ) + f"metres for area calculations. Got {z.Units!r}" ) positive = aux_Z.get_property("positive", None) @@ -4824,9 +4692,8 @@ def _weights_geometry_area( else: raise ValueError( "Bad value of Z coordinate 'positive' " - "property: {!r}.".format(positive) + f"property: {positive!r}." ) - # --- End: if areas *= r ** 2 @@ -4959,7 +4826,6 @@ def _weights_geometry_line( if measure: all_lengths *= radius else: - # return False # Sum the lengths of each part to get the total length of @@ -5041,7 +4907,6 @@ def _weights_geometry_volume( if measure: delta_z = abs(z[..., 1] - z[..., 0]) delta_z.squeeze(axis=-1, inplace=True) - # --- End: if if x.Units.equivalent(_units_metres) and y.Units.equivalent( _units_metres @@ -5214,7 +5079,7 @@ def _weights_linear( raise ValueError( "Can't create weights: Can't find domain axis " - "matching {!r}".format(axis) + f"matching {axis!r}" ) dim = self.dimension_coordinate(da_key, default=None) @@ -5223,8 +5088,8 @@ def _weights_linear( return False raise ValueError( - "Can't create linear weights for {!r} axis: Can't find " - "dimension coordinate construct.".format(axis) + f"Can't create linear weights for {axis!r} axis: Can't find " + "dimension coordinate construct." ) if not measure and dim.size == 1: @@ -5235,8 +5100,8 @@ def _weights_linear( return False raise ValueError( - "Can't create linear weights for {!r} axis: Multiple " - "axis specifications".format(axis) + f"Can't create linear weights for {axis!r} axis: Multiple " + "axis specifications" ) if not dim.has_bounds(): @@ -5245,8 +5110,7 @@ def _weights_linear( return False raise ValueError( - "Can't create linear weights for {!r} axis: No " - "bounds".format(axis) + f"Can't create linear weights for {axis!r} axis: No " "bounds" ) else: # Bounds exist @@ -5256,7 +5120,6 @@ def _weights_linear( ] = "linear " + self.constructs.domain_axis_identity(da_key) else: comp[(da_key,)] = dim.cellsize - # --- End: if weights_axes.add(da_key) @@ -5279,9 +5142,10 @@ def _weights_measure( `bool` """ - m = self.cell_measures.filter_by_measure(measure) + m = self.cell_measures(view=True).filter_by_measure(measure) + len_m = len(m) - if not m: + if not len_m: if measure == "area": return False @@ -5289,17 +5153,15 @@ def _weights_measure( return raise ValueError( - "Can't find weights: No {!r} cell measure".format(measure) + f"Can't find weights: No {measure!r} cell measure" ) - elif len(m) > 1: + elif len_m > 1: if auto: return False raise ValueError( - "Can't find weights: Multiple {!r} cell measures".format( - measure - ) + f"Can't find weights: Multiple {measure!r} cell measures" ) key, clm = dict(m).popitem() @@ -5319,7 +5181,6 @@ def _weights_measure( "Multiple weights specifications for {!r} " "axis".format(self.constructs.domain_axis_identity(axis)) ) - # --- End: for clm = clm.get_data(_fill_value=False).copy() if clm_axes != clm_axes0: @@ -5352,8 +5213,7 @@ def _weights_scale(self, w, scale): scale = Data.asdata(scale).datum() if scale <= 0: raise ValueError( - "'scale' parameter must be a positive number. " - "Got {}".format(scale) + "'scale' parameter must be a positive number. " f"Got {scale}" ) wmax = w.maximum() @@ -5394,7 +5254,10 @@ def _weights_yyy( y_axis = None z_axis = None - for key, aux in self.auxiliary_coordinates.filter_by_naxes(1).items(): + auxiliary_coordinates = self.auxiliary_coordinates(view=True) + auxiliary_coordinates = auxiliary_coordinates.filter_by_naxes(1) + + for key, aux in auxiliary_coordinates.items(): if aux.get_geometry(None) != geometry_type: continue @@ -5416,7 +5279,6 @@ def _weights_yyy( if domain_axis is not None and z_axis != domain_axis: aux_Z = None continue - # --- End: for if aux_X is None or aux_Y is None: if auto: @@ -5424,7 +5286,7 @@ def _weights_yyy( raise ValueError( "Can't create weights: Need both X and Y nodes to " - "calculate {} geometry weights".format(geometry_type) + f"calculate {geometry_type} geometry weights" ) if x_axis != y_axis: @@ -5451,9 +5313,8 @@ def _weights_yyy( raise ValueError( "Can't find weights: X and Y geometry coordinate bounds " - "must have the same shape. Got {} and {}".format( - aux_X.bounds.shape, aux_Y.bounds.shape - ) + "must have the same shape. " + f"Got {aux_X.bounds.shape} and {aux_Y.bounds.shape}" ) if not methods: @@ -5465,16 +5326,12 @@ def _weights_yyy( aux_Y.bounds.dtype.itemsize ): aux_X.bounds.varray - # --- End: if if aux_Z is None: - for key, aux in self.auxiliary_coordinates.filter_by_naxes( - 1 - ).items(): + for key, aux in auxiliary_coordinates.items(): if aux.Z: aux_Z = aux.copy() z_axis = self.get_data_axes(key)[0] - # --- End: if # Check Z coordinates if aux_Z is not None: @@ -5486,7 +5343,6 @@ def _weights_yyy( "Z coordinates span different domain axis to X and Y " "geometry coordinates" ) - # --- End_if return axis, aux_X, aux_Y, aux_Z @@ -5562,11 +5418,10 @@ def Flags(self): def ncdimensions(self): """""" out = {} - for dim, domain_axis in self.domain_axes.items(): + for dim, domain_axis in self.domain_axes(view=True).items(): ncdim = domain_axis.nc_get_dimension(None) if ncdim is not None: out[dim] = ncdim - # --- End: for return out @@ -5603,7 +5458,7 @@ def rank(self): 4 """ - return len(self.domain_axes) + return len(self.domain_axes(view=True)) @property def varray(self): @@ -5668,9 +5523,8 @@ def flag_values(self): return self.Flags.flag_values except AttributeError: raise AttributeError( - "{!r} doesn't have CF property 'flag_values'".format( - self.__class__.__name__ - ) + f"{self.__class__.__name__!r} doesn't have CF property " + "'flag_values'" ) @flag_values.setter @@ -5688,8 +5542,8 @@ def flag_values(self): del self.Flags.flag_values except AttributeError: raise AttributeError( - "Can't delete non-existent %s CF property 'flag_values'" - % self.__class__.__name__ + f"{self.__class__.__name__!r} doesn't have CF property " + "'flag_values'" ) else: if not self.Flags: @@ -5724,9 +5578,8 @@ def flag_masks(self): return self.Flags.flag_masks except AttributeError: raise AttributeError( - "{!r} doesn't have CF property 'flag_masks'".format( - self.__class__.__name__ - ) + f"{self.__class__.__name__!r} doesn't have CF property " + "'flag_masks'" ) @flag_masks.setter @@ -5744,8 +5597,8 @@ def flag_masks(self): del self.Flags.flag_masks except AttributeError: raise AttributeError( - "Can't delete non-existent {!r} CF property " - "'flag_masks'".format(self.__class__.__name__) + f"{self.__class__.__name__!r} doesn't have CF property " + "'flag_masks'" ) else: if not self.Flags: @@ -5793,9 +5646,8 @@ def flag_meanings(self): return " ".join(self.Flags.flag_meanings) except AttributeError: raise AttributeError( - "{!r} doesn't have CF property 'flag_meanings'".format( - self.__class__.__name__ - ) + f"{self.__class__.__name__!r} doesn't have CF property " + "'flag_meanings'" ) @flag_meanings.setter @@ -5813,8 +5665,8 @@ def flag_meanings(self): del self.Flags.flag_meanings except AttributeError: raise AttributeError( - "Can't delete non-existent {!r} CF property " - "'flag_meanings'".format(self.__class__.__name__) + f"{self.__class__.__name__!r} doesn't have CF property " + "'flag_meanings'" ) else: if not self.Flags: @@ -6109,11 +5961,6 @@ def cell_area( self, "cell_area", {"force": force}, version="3.0.0" ) # pragma: no cover - # x_axis = self.domain_axis('X', key=True, default=None) - # y_axis = self.domain_axis('Y', key=True, default=None) - # area_clm = self.cell_measures.filter_by_measure('area').filter_by_axis( - # 'exact', x_axis, y_axis) - w = self.weights( "area", radius=radius, @@ -6122,31 +5969,6 @@ def cell_area( great_circle=great_circle, ) - # if not force and area_clm: - # w = self.weights('area') - # else: - # x = self.dimension_coordinate('X', default=None) - # y = self.dimension_coordinate('Y', default=None) - # if (x is None or y is None or - # not x.Units.equivalent(_units_radians) or - # not y.Units.equivalent(_units_radians)): - # raise ValueError( - # "X or Y coordinates have incompatible units " - # "({!r}, {!r}). Expected units equivalent to {!r}".format( - # x.Units, y.Units, _units_radians) - # ) - # - # # Got x and y coordinates in radians, so we can calculate. - # - # # Parse the radius of the sphere - # radius = self.radius(default=radius) - # - # w = self.weights('area') - # radius **= 2 - # w *= radius - # w.override_units(radius.Units, inplace=True) - # # --- End: if - w.set_property("standard_name", "cell_area", copy=False) return w @@ -6203,7 +6025,7 @@ def radius(self, default=None): """ radii = [] - for cr in self.coordinate_references.values(): + for cr in self.coordinate_references(view=True).values(): r = cr.datum.get_parameter("earth_radius", None) if r is not None: r = Data.asdata(r) @@ -6219,16 +6041,14 @@ def radius(self, default=None): if r == _: got = True break - # --- End: for if not got: radii.append(r) - # --- End: for if len(radii) > 1: raise ValueError( "Multiple radii found in coordinate reference " - "constructs: {!r}".format(radii) + f"constructs: {radii!r}" ) if not radii: @@ -6252,7 +6072,7 @@ def radius(self, default=None): r = Data.asdata(radii[0]).squeeze() if r.size != 1: - raise ValueError("Multiple radii: {!r}".format(r)) + raise ValueError(f"Multiple radii: {r!r}") r.Units = Units("m") r.dtype = float @@ -6309,7 +6129,7 @@ def close(self): """ super().close() - for construct in self.constructs.filter_by_data().values(): + for construct in self.constructs.filter_by_data(view=True).values(): construct.close() def iscyclic(self, identity, **kwargs): @@ -6366,8 +6186,7 @@ def iscyclic(self, identity, **kwargs): axis = self.domain_axis(identity, key=True, default=None) if axis is None: raise ValueError( - "Can't identify unique axis from identity " - "{!r}".format(identity) + "Can't identify unique axis from identity " f"{identity!r}" ) return axis in self.cyclic() @@ -6425,7 +6244,9 @@ def concatenate(cls, fields, axis=0, _preserve=True): # ------------------------------------------------------------ # Concatenate constructs with data # ------------------------------------------------------------ - for key, construct in field0.constructs.filter_by_data().items(): + for key, construct in field0.constructs.filter_by_data( + view=True + ).items(): construct_axes = field0.get_data_axes(key) if dim not in construct_axes: @@ -6467,7 +6288,6 @@ def concatenate(cls, fields, axis=0, _preserve=True): out.set_construct( construct, key=key, axes=construct_axes, copy=False ) - # --- End: for return out @@ -6566,7 +6386,6 @@ def cyclic(self, identity=None, iscyclic=True, period=None, **kwargs): raise ValueError( "A cyclic dimension coordinate must have a period" ) - # --- End: if return old @@ -6924,13 +6743,7 @@ def weights( ): # Found volume weights from cell measures pass - # elif self._weights_geometry_volume(comp, weights_axes, - # measure=measure, - # radius=radius, - # great_circle=great_circle, - # , methods=methods, auto=True): - # # Found volume weights from polygon geometries - # pass + elif self._weights_measure( "area", comp, weights_axes, methods=methods, auto=True ): @@ -6947,23 +6760,10 @@ def weights( # Found area weights from X and Y dimension # coordinates pass - # elif self._weights_geometry_area(None, comp, weights_axes, - # measure=measure, - # radius=radius, - # great_circle=great_circle, - # methods=methods, - # auto=True): - # # Found area weights from polygon geometries - # pass - - # # 1-d linear weights from dimension coordinates - # for dc_key in self.dimension_coordinates: - # axis = self.get_data_axes(dc_key)[0] - # self._weights_linear(axis, comp, weights_axes, - # auto=True, measure=measure, - # methods=methods) - - for da_key in self.domain_axes: + + domain_axes = self.domain_axes(view=True) + + for da_key in domain_axes: if self._weights_geometry_area( da_key, comp, @@ -6998,17 +6798,15 @@ def weights( ): # Found linear weights from dimension coordinates pass - # --- End: for weights_axes = [] for key in comp: weights_axes.extend(key) size_N_axes = [] - for key, domain_axis in self.domain_axes.items(): + for key, domain_axis in domain_axes.items(): if domain_axis.get_size(0) > 1: size_N_axes.append(key) - # --- End: for missing_axes = set(size_N_axes).difference(weights_axes) if missing_axes: @@ -7029,7 +6827,6 @@ def weights( for k in key: if k not in field_data_axes: raise ValueError("TODO {!r} domain axis".format(k)) - # --- End: for multiple_weights = weights_axes.intersection(key) if multiple_weights: @@ -7104,32 +6901,6 @@ def weights( cell_measures.append(w) else: axes.append(w) - # --- End: if - - # da_key_x = None - # da_key_y = None - # xaxis = self.domain_axis('X', key=True, default=None) - # yaxis = self.domain_axis('Y', key=True, default=None) - # for axis in axes: - # da_key = self.domain_axis(axis, key=True, default=None) - # da_key = self.domain_axis(axis, key=True, default=None) - # if da_key == xaxis: - # da_key_x = da_key - # elif da_key == yaxis: - # da_key_y = da_key - # # --- End: if - # - # if da_key_x and da_key_y: - # xdim = self.dimension_coordinate(xaxis, default=None) - # ydim = self.dimension_coordinate(yaxis, default=None) - # if (xdim is not None and ydim is not None and - # xdim.has_bounds() and ydim.has_bounds() and - # xdim.Units.equivalent(Units('radians')) and - # ydim.Units.equivalent(Units('radians'))): - # ydim = ydim.clip(-90, 90, units=Units('degrees')) - # ydim.sin(inplace=True) - # comp[(yaxis,)] = ydim.cellsize - # # --- End: if # Field weights self._weights_field(fields, comp, weights_axes) @@ -7139,15 +6910,6 @@ def weights( self._weights_measure( "volume", comp, weights_axes, methods=methods, auto=False ) - # pass - # else: - # # Area weights from polygon geometries - # self._weights_geometry_volume(comp, weights_axes, - # measure=measure, - # radius=radius, - # great_circle=great_circle, - # auto=False, methods=methods) - # --- End: if # Area weights if "area" in cell_measures: @@ -7179,7 +6941,6 @@ def weights( methods=methods, auto=False, ) - # --- End: if for axis in axes: da_key = self.domain_axis(axis, key=True, default=None) @@ -7222,7 +6983,6 @@ def weights( methods=methods, auto=False, ) - # --- End: for # Check for area weights specified by X and Y axes # separately and replace them with area weights @@ -7243,7 +7003,6 @@ def weights( radius=radius, methods=methods, ) - # --- End: if if not methods: if scale is not None: @@ -7252,16 +7011,14 @@ def weights( # -------------------------------------------------------- for key, w in comp.items(): comp[key] = self._weights_scale(w, scale) - # --- End: if for w in comp.values(): mn = w.minimum() if mn <= 0: raise ValueError( "All weights must be positive. " - "Got a weight of {}".format(mn) + f"Got a weight of {mn}" ) - # --- End: if if components or methods: # -------------------------------------------------------- @@ -7321,7 +7078,6 @@ def weights( for axis in self.get_data_axes(): if axis in waxes and self.iscyclic(axis): wdata.cyclic(waxes.index(axis), iscyclic=True) - # --- End: for if data: # Insert missing size one dimensions for broadcasting @@ -7329,7 +7085,6 @@ def weights( if axis not in waxes: waxes.insert(i, axis) wdata.insert_dimension(i, inplace=True) - # --- End: for return wdata @@ -7338,20 +7093,21 @@ def weights( field.del_data() field.del_data_axes() - not_needed_axes = set(field.domain_axes).difference(weights_axes) + not_needed_axes = set(field.domain_axes(view=True)).difference( + weights_axes + ) - for key in self.cell_methods: + for key in self.cell_methods(view=True): field.del_construct(key) - for key in self.field_ancillaries: + for key in self.field_ancillaries(view=True): field.del_construct(key) - for key in field.coordinate_references: + for key in field.coordinate_references(view=True): if field.coordinate_reference_domain_axes(key).intersection( not_needed_axes ): field.del_coordinate_reference(key) - # --- End: for for key in field.constructs.filter_by_axis("or", *not_needed_axes): field.del_construct(key) @@ -7704,7 +7460,6 @@ def digitize( long_name = f.del_property("long_name", None) if long_name is not None: f.set_property("bin_long_name", long_name, copy=False) - # --- End: if bin_units = bins.Units units = getattr(bin_units, "units", None) @@ -8197,7 +7952,6 @@ def bin( raise ValueError( "Can't set scale for 'integral' calculations." ) - # --- End: if axes = [] bin_indices = [] @@ -8379,8 +8133,6 @@ def bin( b &= a == n b.filled(False, inplace=True) - # b.hardmask = False - # b.where(b.mask, False, inplace=True) c.set_data( self.data.where(b, None, cf_masked), set_axes=False, copy=False @@ -8400,7 +8152,7 @@ def bin( # Create a cell method (if possible) # ------------------------------------------------------------ standard_names = [] - domain_axes = self.domain_axes.filter_by_size(ge(2)) + domain_axes = self.domain_axes(view=True).filter_by_size(ge(2)) for da_key in domain_axes: dim = self.dimension_coordinate(da_key, default=None) @@ -8420,10 +8172,6 @@ def bin( ) out.set_construct(cell_method, copy=False) - # Return - # if return_indices: - # d.hardmask = True - # return out, d return out def has_construct(self, identity=None): @@ -8609,7 +8357,7 @@ def del_construct(self, identity, default=ValueError()): return self._default( default, "Can't identify construct to delete from identity " - "{!r}".format(identity), + f"{identity!r}", ) return super().del_construct(key, default=default) @@ -8732,7 +8480,7 @@ def del_coordinate_reference( default, f"Can't identify construct from {construct!r}" ) - for key, ref in tuple(self.coordinate_references.items()): + for key, ref in tuple(self.coordinate_references(view=True).items()): if c_key in ref.coordinates(): self.del_coordinate_reference( key, construct=None, default=default @@ -8749,7 +8497,6 @@ def del_coordinate_reference( ) out.append(ref) continue - # --- End: for return out @@ -8886,7 +8633,9 @@ def del_domain_axis( if dakey in self.get_data_axes(default=()): self.squeeze(dakey, inplace=True) - for ckey, construct in self.constructs.filter_by_data().items(): + for ckey, construct in self.constructs.filter_by_data( + view=True + ).items(): data = construct.get_data(None, _fill_value=False) if data is None: continue @@ -9006,7 +8755,9 @@ def get_coordinate_reference( default, f"Can't identify construct from {construct!r}" ) - for cr_key, ref in tuple(self.coordinate_references.items()): + for cr_key, ref in tuple( + self.coordinate_references(view=True).items() + ): if c_key in [ ref.coordinates(), ref.coordinate_conversion.domain_ancillaries().values(), @@ -9018,7 +8769,6 @@ def get_coordinate_reference( out.append(ref) continue - # --- End: for return out @@ -9075,12 +8825,14 @@ def set_coordinate_reference( # Still here? ref = coordinate_reference.copy() + coordinates = field.coordinates(view=True) + domain_ancillaries = field.domain_ancillaries(view=True) + ckeys = [] for value in coordinate_reference.coordinates(): - if value in field.coordinates: - identity = field.coordinates[value].identity(strict=strict) + if value in coordinates: + identity = coordinates[value].identity(strict=strict) ckeys.append(self.coordinate(identity, key=True, default=None)) - # --- End: for ref.clear_coordinates() ref.set_coordinates(ckeys) @@ -9089,16 +8841,13 @@ def set_coordinate_reference( dakeys = {} for term, value in coordinate_conversion.domain_ancillaries().items(): - if value in field.domain_ancillaries: - identity = field.domain_ancillaries[value].identity( - strict=strict - ) + if value in domain_ancillaries: + identity = domain_ancillaries[value].identity(strict=strict) dakeys[term] = self.domain_ancillary( identity, key=True, default=None ) else: dakeys[term] = None - # --- End: for ref.coordinate_conversion.clear_domain_ancillaries() ref.coordinate_conversion.set_domain_ancillaries(dakeys) @@ -10649,11 +10398,14 @@ def collapse( # ------------------------------------------------------------ # Convert axes into domain axis construct keys # ------------------------------------------------------------ + domain_axes = None + input_axes = all_axes all_axes = [] for axes in input_axes: if axes is None: - all_axes.append(list(self.domain_axes.keys())) + domain_axes = self.domain_axes(view=True, cache=domain_axes) + all_axes.append(list(domain_axes.keys())) continue axes2 = [] @@ -10677,10 +10429,8 @@ def collapse( if a is None: raise ValueError(msg.format(x)) axes2.append(a) - # --- End: for all_axes.append(axes2) - # --- End: for logger.info( " all_methods, all_axes, all_within, all_over = " @@ -10695,6 +10445,10 @@ def collapse( # ------------------------------------------------------------ # # ------------------------------------------------------------ + domain_axes = f.domain_axes(view=True, cache=domain_axes) + auxiliary_coordinates = f.auxiliary_coordinates(view=True) + dimension_coordinates = f.dimension_coordinates(view=True) + for method, axes, within, over, axes_in in zip( all_methods, all_axes, all_within, all_over, input_axes ): @@ -10707,7 +10461,7 @@ def collapse( method = method2 - collapse_axes_all_sizes = f.domain_axes.filter_by_key(*axes) + collapse_axes_all_sizes = domain_axes.filter_by_key(*axes) logger.info( " axes = {}".format(axes) @@ -10751,7 +10505,7 @@ def collapse( if _create_zero_size_cell_bounds: # Create null bounds if requested for axis in axes: - dc = f.dimension_coordinates.filter_by_axis( + dc = dimension_coordinates.filter_by_axis( "and", axis ).value(None) if dc is not None and not dc.has_bounds(): @@ -11012,7 +10766,7 @@ def collapse( # REMOVE all cell measures and domain ancillaries # which span this axis c = f.constructs.filter_by_type( - "cell_measure", "domain_ancillary" + "cell_measure", "domain_ancillary", view=True ) for key, value in c.filter_by_axis("or", axis).items(): logger.info( @@ -11023,7 +10777,7 @@ def collapse( # REMOVE all 2+ dimensional auxiliary coordinates # which span this axis - c = f.auxiliary_coordinates.filter_by_naxes(gt(1)) + c = auxiliary_coordinates.filter_by_naxes(gt(1)) for key, value in c.filter_by_axis("or", axis).items(): logger.info( f" Removing {value.construct_type} {key!r}" @@ -11039,7 +10793,7 @@ def collapse( # one-dimensional auxiliary coordinates which span # this axis and have the same values in their data # array and bounds. - for key, aux in f.auxiliary_coordinates.filter_by_axis( + for key, aux in auxiliary_coordinates.filter_by_axis( "exact", axis ).items(): logger.info(f"key = {key}") # pragma: no cover @@ -11065,12 +10819,12 @@ def collapse( aux.bounds.set_data(d.bounds.data, copy=False) # Reset the axis size - f.domain_axes[axis].set_size(1) + f.domain_axes(view=True)[axis].set_size(1) logger.info( f"Changing axis size to 1: {axis}" ) # pragma: no cover - dim = f.dimension_coordinates.filter_by_axis( + dim = dimension_coordinates.filter_by_axis( "exact", axis ).value(None) if dim is None: @@ -11714,7 +11468,7 @@ def _group_weights(weights, iaxis, index): ) # pragma: no cover # Size of uncollapsed axis - axis_size = self.domain_axes[axis].get_size() + axis_size = self.domain_axes(view=True)[axis].get_size() # Integer position of collapse axis iaxis = self.get_data_axes().index(axis) @@ -11780,9 +11534,11 @@ def _group_weights(weights, iaxis, index): # ---------------------------------------------------- # E.g. group=cf.M() # ---------------------------------------------------- - coord = self.dimension_coordinates.filter_by_axis( - "exact", axis - ).value(None) + coord = ( + self.dimension_coordinates(view=True) + .filter_by_axis("exact", axis) + .value(None) + ) if coord is None: raise ValueError("dddddd siduhfsuildfhsuil dhfdui TODO") @@ -11813,9 +11569,11 @@ def _group_weights(weights, iaxis, index): # ---------------------------------------------------- # Chunks of # ---------------------------------------------------- - coord = self.dimension_coordinates.filter_by_axis( - "exact", axis - ).value(None) + coord = ( + self.dimension_coordinates(view=True) + .filter_by_axis("exact", axis) + .value(None) + ) if coord is None: raise ValueError("TODO dddddd siduhfsuildfhsuil dhfdui ") @@ -11866,13 +11624,17 @@ def _group_weights(weights, iaxis, index): # ---------------------------------------------------- # E.g. group=[cf.month(4), cf.month(cf.wi(9, 11))] # ---------------------------------------------------- - coord = self.dimension_coordinates.filter_by_axis( - "exact", axis - ).value(None) + coord = ( + self.dimension_coordinates(view=True) + .filter_by_axis("exact", axis) + .value(None) + ) if coord is None: - coord = self.auxiliary_coordinates.filter_by_axis( - "exact", axis - ).value(None) + coord = ( + self.auxiliary_coordinates(view=True) + .filter_by_axis("exact", axis) + .value(None) + ) if coord is None: raise ValueError("asdad8777787 TODO") @@ -11917,7 +11679,7 @@ def _group_weights(weights, iaxis, index): "required for an 'over days' collapse" ) - cell_methods = self.cell_methods.ordered() + cell_methods = self.cell_methods(view=True).ordered() w = [ cm.get_qualifier("within", None) for cm in cell_methods.values() @@ -12020,9 +11782,11 @@ def _group_weights(weights, iaxis, index): # ---------------------------------------------------- # Over years # ---------------------------------------------------- - coord = self.dimension_coordinates.filter_by_axis( - "exact", axis - ).value(None) + coord = ( + self.dimension_coordinates(view=True) + .filter_by_axis("exact", axis) + .value(None) + ) if coord is None or not coord.Units.isreftime: raise ValueError( "Reference-time dimension coordinates are required " @@ -12036,7 +11800,7 @@ def _group_weights(weights, iaxis, index): "required for an 'over years' collapse" ) - cell_methods = self.cell_methods.ordered() + cell_methods = self.cell_methods(view=True).ordered() w = [ cm.get_qualifier("within", None) for cm in cell_methods.values() @@ -12245,9 +12009,11 @@ def _group_weights(weights, iaxis, index): # ---------------------------------------------------- # Within years # ---------------------------------------------------- - coord = self.dimension_coordinates.filter_by_axis( - "exact", axis - ).value() + coord = ( + self.dimension_coordinates(view=True) + .filter_by_axis("exact", axis) + .value() + ) if coord is None or not coord.Units.isreftime: raise ValueError( "Can't collapse: Reference-time dimension " @@ -12357,13 +12123,14 @@ def _group_weights(weights, iaxis, index): # Ignore groups that don't meet the specified criteria # ---------------------------------------------------- if over is None: - # coord = pc.coordinates.filter_by_axis( - # 'exact', axis).value(None) coord = pc.coordinate(axis_in, default=None) if group_span is not False: if isinstance(group_span, int): - if pc.domain_axes[axis].get_size() != group_span: + if ( + pc.domain_axes(view=True)[axis].get_size() + != group_span + ): classification[index] = ignore_n ignore_n -= 1 continue @@ -12465,9 +12232,11 @@ def _group_weights(weights, iaxis, index): # Hack to fix missing bounds! for g in fl: try: - c = g.dimension_coordinates.filter_by_axis( - "exact", axis - ).value() + c = ( + g.dimension_coordinates(view=True) + .filter_by_axis("exact", axis) + .value() + ) if not c.has_bounds(): c.set_bounds(c.create_bounds()) except Exception: @@ -12481,9 +12250,8 @@ def _group_weights(weights, iaxis, index): and coord.construct_type == "dimension_coordinate" ): fl.sort( - key=lambda g: g.dimension_coordinates.filter_by_axis( - "exact", axis - ) + key=lambda g: g.dimension_coordinates(view=True) + .filter_by_axis("exact", axis) .value() .datum(0), reverse=coord.decreasing, @@ -12497,7 +12265,7 @@ def _group_weights(weights, iaxis, index): except ValueError as error: raise ValueError(f"Can't collapse: {error}") - if squeeze and f.domain_axes[axis].get_size() == 1: + if squeeze and f.domain_axes(view=True)[axis].get_size() == 1: # Remove a totally collapsed axis from the field's # data array f.squeeze(axis, inplace=True) @@ -12534,7 +12302,7 @@ def _update_cell_methods( `None` """ - original_cell_methods = self.cell_methods.ordered() + original_cell_methods = self.cell_methods(view=True).ordered() logger.info(" Update cell methods:") # pragma: no cover logger.info( " Original cell methods = {}".format(original_cell_methods) @@ -12580,7 +12348,7 @@ def _update_cell_methods( if ( original_domain_axis.get_size() - == self.domain_axes[key].get_size() + == self.domain_axes(view=True)(view=True)[key].get_size() ): if ( lastcm.get_axes(None) == axes @@ -12614,7 +12382,7 @@ def _update_cell_methods( self.set_construct(cell_method) logger.info( - f" Modified cell methods = {self.cell_methods.ordered()}" + f" Modified cell methods = {self.cell_methods().ordered()}" ) # pragma: no cover @_deprecated_kwarg_check("axes") @@ -12676,7 +12444,7 @@ def direction(self, identity, axes=None, **kwargs): if axis is None: return True - for key, coord in self.dimension_coordinates.items(): + for key, coord in self.dimension_coordinates(view=True).items(): if axis == self.get_data_axes(key)[0]: return coord.direction() @@ -12700,9 +12468,9 @@ def directions(self): {'dim1': True, 'dim0': False} """ - out = {key: True for key in self.domain_axes.keys()} + out = {key: True for key in self.domain_axes(view=True).keys()} - for key, dc in self.dimension_coordinates.items(): + for key, dc in self.dimension_coordinates(view=True).items(): direction = dc.direction() if not direction: axis = self.get_data_axes(key)[0] @@ -13001,8 +12769,8 @@ def indices(self, *mode, **kwargs): # Initialize indices indices = [slice(None)] * self.ndim - domain_axes = self.domain_axes - constructs = self.constructs.filter_by_data() + domain_axes = self.domain_axes(view=True) + constructs = self.constructs.filter_by_data(view=True) parsed = {} unique_axes = set() @@ -13013,7 +12781,7 @@ def indices(self, *mode, **kwargs): key = None construct = None else: - c = constructs.filter_by_identity(identity) + c = constructs.filter_by_identity(identity, view=True) if len(c) != 1: raise ValueError( "Can't find indices: Ambiguous axis or axes: " @@ -13300,14 +13068,12 @@ def indices(self, *mode, **kwargs): else: contains = False break - # --- End: if v = Data.asdata(v) if v.Units: v.Units = construct.Units points2.append(v.datum()) - # --- End: if if contains: # The coordinates have bounds and the condition is @@ -13342,8 +13108,6 @@ def indices(self, *mode, **kwargs): if delete: ind = [numpy_delete(ind_1d, delete) for ind_1d in ind] - # --- End: if - # --- End: if if ind is not None: mask_shape = [None] * self.ndim @@ -13373,7 +13137,7 @@ def indices(self, *mode, **kwargs): # Create a full index for this axis start = 0 # stop = self.axis_size(axis) - stop = self.domain_axes[axis].get_size() + stop = self.domain_axes(view=True)[axis].get_size() size = stop - start index = slice(start, stop) else: @@ -13386,7 +13150,6 @@ def indices(self, *mode, **kwargs): mask_shape[position] = size masked_subspace_size *= size ind[i] -= start - # --- End: for create_mask = ind.shape[1] < masked_subspace_size else: @@ -13528,7 +13291,8 @@ def set_data( # Construct new field f = _inplace_enabled_define_and_cleanup(self) - if axes is None and not f.domain_axes: + domain_axes = f.domain_axes(view=True) + if axes is None and not domain_axes: set_axes = False if not set_axes: @@ -13540,7 +13304,6 @@ def set_data( data = data.override_units(units, inplace=False) else: data.override_units(units, inplace=True) - # --- End: if super(cfdm.Field, f).set_data( data, axes=None, copy=copy, inplace=True @@ -13555,7 +13318,7 @@ def set_data( if axes or axes == 0: raise ValueError( "Can't set data: Wrong number of axes for scalar data " - "array: axes={}".format(axes) + f"array: axes={axes}" ) axes = [] @@ -13576,7 +13339,6 @@ def set_data( ) ) - domain_axes = f.domain_axes() for axis, size in zip(axes, data.shape): axis_size = domain_axes[axis].get_size(None) if size != axis_size: @@ -13584,12 +13346,9 @@ def set_data( domain_axes[axis].get_size(None) for axis in axes ) raise ValueError( - "Can't set data: Data shape {} differs from shape " - "implied by axes {}: {}".format( - data.shape, axes, axes_shape - ) + f"Can't set data: Data shape {data.shape} differs " + f"from shape implied by axes {axes}: {axes_shape}" ) - # --- End: for elif f.get_data_axes(default=None) is None: # -------------------------------------------------------- @@ -13598,26 +13357,24 @@ def set_data( # # => infer the axes # -------------------------------------------------------- - domain_axes = f.domain_axes - if not domain_axes: - raise ValueError("Can't set data: No domain axes exist") - data_shape = data.shape if len(data_shape) != len(set(data_shape)): raise ValueError( - "Can't insert data: Ambiguous data shape: {}. " - "Consider setting the axes parameter.".format(data_shape) + f"Can't insert data: Ambiguous data shape: {data_shape}. " + "Consider setting the axes parameter." ) + if not domain_axes: + raise ValueError("Can't set data: No domain axes exist") + axes = [] for n in data_shape: da = domain_axes.filter_by_size(n) if len(da) != 1: raise ValueError( - "Can't insert data: Ambiguous data shape: {}. " - "Consider setting the axes parameter.".format( - data_shape - ) + "Can't insert data: Ambiguous data shape: " + f"{data_shape}. " + "Consider setting the axes parameter." ) axes.append(da.key()) @@ -13630,40 +13387,16 @@ def set_data( axes = f.get_data_axes() if len(axes) != data.ndim: raise ValueError( - "Wrong number of axes for data array: {!r}".format(axes) + f"Wrong number of axes for data array: {axes!r}" ) - # domain_axes = f.domain_axes - # for n in data.shape: - # da = domain_axes.filter_by_size(n) - # if len(da) != 1: - # raise ValueError( - # "Can't insert data: Ambiguous data shape: {}. {} " - # "domain axes have size {}. Consider setting the " - # "axes parameter.".format( - # data.shape, len(da), n) - # ) - # # --- End: for - - domain_axes = f.domain_axes for axis, size in zip(axes, data.shape): if domain_axes[axis].get_size(None) != size: raise ValueError( "Can't insert data: Incompatible size for axis " - "{!r}: {}".format(axis, size) + f"{axis!r}: {size}" ) - # try: - # f.set_construct( - # DomainAxis(size), key=axis, replace=False) - # except ValueError: - # raise ValueError( - # "Can't insert data: Incompatible size for axis " - # "{!r}: {}".format(axis, size) - # ) - # --- End: for - # --- End: if - if not data.Units: units = getattr(f, "Units", None) if units is not None: @@ -13672,7 +13405,6 @@ def set_data( data = data.override_units(units, inplace=False) else: data.override_units(units, inplace=True) - # --- End: if super(cfdm.Field, f).set_data(data, axes=axes, copy=copy, inplace=True) @@ -13711,7 +13443,7 @@ def domain_mask(self, **kwargs): mask.nc_del_variable(None) for key in self.constructs.filter_by_type( - "cell_method", "field_ancillary" + "cell_method", "field_ancillary", view=True ): mask.del_construct(key) @@ -13836,7 +13568,7 @@ def compute_vertical_coordinates( """ f = _inplace_enabled_define_and_cleanup(self) - for cr in f.coordinate_references.values(): + for cr in f.coordinate_references(view=True).values(): # -------------------------------------------------------- # Compute the non-parametric vertical coordinates, if # possible. @@ -14013,21 +13745,6 @@ def match_by_construct(self, *identities, OR=False, **conditions): TODO """ - # if constructs: - # for key, value in constructs.items(): - # if value is None: - # message = ("Since its value is None, use {!r} as a " - # "positional argument instead".format(value)) - # else: - # message = ("Evaluating criteria on data values is not " - # "longer possible with this method.") - # - # _DEPRECATION_ERROR_KWARGS( - # self, 'match_by_construct', kwargs={key: value}, - # message=message, version='3.1.0' - # ) # pragma: no cover - # # --- End: if - if identities: if identities[0] == "or": _DEPRECATION_ERROR_ARG( @@ -14046,7 +13763,6 @@ def match_by_construct(self, *identities, OR=False, **conditions): message="Use 'OR=False' instead.", version="3.1.0", ) # pragma: no cover - # --- End: if if not identities and not conditions: return True @@ -14058,6 +13774,8 @@ def match_by_construct(self, *identities, OR=False, **conditions): n = 0 + self_cell_methods = self.cell_methods(view=True) + for identity in identities: cms = False try: @@ -14074,7 +13792,6 @@ def match_by_construct(self, *identities, OR=False, **conditions): ] if axes: cm.set_axes(axes) - # --- End: if if not cms: filtered = constructs(identity) @@ -14083,20 +13800,21 @@ def match_by_construct(self, *identities, OR=False, **conditions): if set(filtered.construct_types().values()) == { "cell_method" }: - key = tuple(self.cell_methods.ordered())[-1] - filtered = self.cell_methods(key)(identity) + key = tuple(self_cell_methods.ordered())[-1] + filtered = self_cell_methods(key, view=True)( + identity, view=True + ) if not filtered: if not OR: return False n -= 1 - # --- End: if n += 1 elif not OR: return False else: - cell_methods = tuple(self.cell_methods.ordered().values())[ + cell_methods = tuple(self_cell_methods.ordered().values())[ -len(cms) : ] for cm0, cm1 in zip(cms, cell_methods): @@ -14130,10 +13848,8 @@ def match_by_construct(self, *identities, OR=False, **conditions): if not ok: n -= 1 break - # --- End: for n += 1 - # --- End: for if conditions: for identity, value in conditions.items(): @@ -14141,7 +13857,6 @@ def match_by_construct(self, *identities, OR=False, **conditions): n += 1 elif not OR: return False - # --- End: if if OR: return bool(n) @@ -14196,12 +13911,11 @@ def match_by_rank(self, *ranks): if not ranks: return True - n_domain_axes = len(self.domain_axes) + n_domain_axes = len(self.domain_axes(view=True)) for rank in ranks: ok = rank == n_domain_axes if ok: return True - # --- End: for return False @@ -14504,8 +14218,8 @@ def moving_window( method_values = ("mean", "sum", "integral") if method not in method_values: raise ValueError( - "Non-valid 'method' parameter value: {!r}. " - "Expected one of {!r}".format(method, method_values) + f"Non-valid 'method' parameter value: {method!r}. " + f"Expected one of {method_values!r}" ) if cval is not None and cval != 0: @@ -14544,21 +14258,16 @@ def moving_window( if isinstance(weights, Data): if weights.ndim > 1: raise ValueError( - "The input weights (shape {}) do not match the " - "selected axis (size {})".format( - weights.shape, f.shape[iaxis] - ) + f"The input weights (shape {weights.shape}) do not " + f"match the selected axis (size {f.shape[iaxis]})" ) if weights.ndim == 1: if weights.shape[0] != f.shape[iaxis]: raise ValueError( - "The input weights (size {}) do not match " - "the selected axis (size {})".format( - weights.size, f.shape[iaxis] - ) + f"The input weights (size {weights.size}) do not " + f"match the selected axis (size {f.shape[iaxis]})" ) - # --- End: if # Get the data weights w = f.weights( @@ -14576,7 +14285,6 @@ def moving_window( f *= w else: f = f * w - # --- End: if # Create the window weights window = numpy_full((window_size,), 1.0) @@ -14614,7 +14322,7 @@ def moving_window( # Add a cell method if f.domain_axis(axis).get_size() > 1 or method == "integral": f._update_cell_methods( - method=method, domain_axes=f.domain_axes(axis) + method=method, domain_axes=f.domain_axes(view=True)(axis) ) return f @@ -14636,20 +14344,20 @@ def convolution_filter( """Convolve the field construct along the given axis with the specified filter. - The magnitude of the integral of the filter (i.e. the sum of the - window weights defined by the *window* parameter) affects the - convolved values. For example, window weights of ``[0.2, 0.2 0.2, - 0.2, 0.2]`` will produce a non-weighted 5-point running mean; and - window weights of ``[1, 1, 1, 1, 1]`` will produce a 5-point - running sum. Note that the window weights returned by functions of - the `scipy.signal.windows` package do not necessarily sum to 1 - (see the examples for details). + The magnitude of the integral of the filter (i.e. the sum of + the window weights defined by the *window* parameter) affects + the convolved values. For example, window weights of ``[0.2, + 0.2 0.2, 0.2, 0.2]`` will produce a non-weighted 5-point + running mean; and window weights of ``[1, 1, 1, 1, 1]`` will + produce a 5-point running sum. Note that the window weights + returned by functions of the `scipy.signal.windows` package do + not necessarily sum to 1 (see the examples for details). .. note:: The `moving_window` method can not, in general, be emulated by the `convolution_filter` method, as the latter i) can not change the window weights as the - filter passes through the axis; and ii) does not update - the cell method constructs. + filter passes through the axis; and ii) does not + update the cell method constructs. .. seealso:: `collapse`, `derivative`, `moving_window`, `cf.relative_vorticity` @@ -14663,9 +14371,9 @@ def convolution_filter( An unweighted 5-point moving average can be computed with ``window=[0.2, 0.2, 0.2, 0.2, 0.2]`` - Note that the `scipy.signal.windows` package has suite of - window functions for creating window weights for filtering - (see the examples for details). + Note that the `scipy.signal.windows` package has suite + of window functions for creating window weights for + filtering (see the examples for details). .. versionadded:: 3.3.0 (replaces the old weights parameter) @@ -14894,7 +14602,6 @@ def convolution_filter( mode = "wrap" else: mode = "constant" - # --- End: if # Construct new field f = _inplace_enabled_define_and_cleanup(self) @@ -14951,7 +14658,6 @@ def convolution_filter( coord.set_bounds( self._Bounds(data=Data(new_bounds, units=coord.Units)) ) - # --- End: if return f @@ -15076,9 +14782,9 @@ def cumsum( ): """Return the field cumulatively summed along the given axis. - The cell bounds of the axis are updated to describe the range over - which the sums apply, and a new "sum" cell method construct is - added to the resulting field construct. + The cell bounds of the axis are updated to describe the range + over which the sums apply, and a new "sum" cell method + construct is added to the resulting field construct. .. versionadded:: 3.0.0 @@ -15212,12 +14918,12 @@ def cumsum( raise ValueError( "'coordinate' parameter must be one of " "(None, 'mid_range', 'minimum', 'maximum'). " - "Got {!r}".format(coordinate) + f"Got {coordinate!r}" ) # Add a cell method f._update_cell_methods( - method="sum", domain_axes=f.domain_axes(axis_key) + method="sum", domain_axes=f.domain_axes(view=True)(axis_key) ) return f @@ -15286,7 +14992,7 @@ def flip(self, axes=None, inplace=False, i=False, **kwargs): super(Field, f).flip(iaxes, inplace=True) # Flip any constructs which span the flipped axes - for key, construct in f.constructs.filter_by_data().items(): + for key, construct in f.constructs.filter_by_data(view=True).items(): construct_axes = f.get_data_axes(key) construct_flip_axes = axes.intersection(construct_axes) if construct_flip_axes: @@ -15425,8 +15131,10 @@ def anchor( else: f = _inplace_enabled_define_and_cleanup(self) - dim = f.dimension_coordinates.filter_by_axis("and", axis).value( - default=None + dim = ( + f.dimension_coordinates(view=True) + .filter_by_axis("and", axis) + .value(default=None) ) if dim is None: raise ValueError( @@ -15437,20 +15145,17 @@ def anchor( period = dim.period() if period is None: - raise ValueError( - "Cyclic {!r} axis has no period".format(dim.identity()) - ) + raise ValueError(f"Cyclic {dim.identity()!r} axis has no period") value = Data.asdata(value) if not value.Units: value = value.override_units(dim.Units) elif not value.Units.equivalent(dim.Units): raise ValueError( - "Anchor value has incompatible units: {!r}".format(value.Units) + f"Anchor value has incompatible units: {value.Units!r}" ) - # axis_size = f.axis_size(axis) - axis_size = f.domain_axes[axis].get_size() + axis_size = f.domain_axes(view=True)[axis].get_size() if axis_size <= 1: # Don't need to roll a size one axis if dry_run: @@ -15458,8 +15163,8 @@ def anchor( else: if inplace: f = None + return f - # --- End: if c = dim.get_data(_fill_value=False) @@ -15472,8 +15177,11 @@ def anchor( if not dry_run: f.roll(axis, shift, inplace=True) - dim = f.dimension_coordinates.filter_by_axis("and", axis).value() - # dim = f.item(axis) + dim = ( + f.dimension_coordinates(view=True) + .filter_by_axis("and", axis) + .value() + ) n = ((value - dim.data[0]) / period).ceil() else: # Adjust value so it's in the range (c[0]-period, c[0]] @@ -15486,10 +15194,7 @@ def anchor( f.roll(axis, shift, inplace=True) dim = f.dimension_coordinate(axis) - # dim = f.dimension_coordinates.filter_by_axis('and', axis).value() - # dim = f.item(axis) n = ((value - dim.data[0]) / period).floor() - # --- End: if if dry_run: return {"axis": axis, "roll": shift, "nperiod": n * period} @@ -15500,7 +15205,6 @@ def anchor( bounds = dim.get_bounds(None) if bounds is not None: bounds += np - # --- End: if return f @@ -15628,13 +15332,11 @@ def autocyclic(self, verbose=None): >>> f.autocyclic() """ - dims = self.dimension_coordinates("X") + dims = self.dimension_coordinates(view=True)("X") if len(dims) != 1: logger.debug( - "Not one 'X' dimension coordinate construct: {}".format( - len(dims) - ) + f"Not one 'X' dimension coordinate construct: {len(dims)}" ) # pragma: no cover return False @@ -15649,7 +15351,6 @@ def autocyclic(self, verbose=None): self.cyclic(key, iscyclic=False) logger.debug(1) # pragma: no cover return False - # --- End: if bounds = dim.get_bounds(None) if bounds is None: @@ -15718,7 +15419,7 @@ def axes(self, axes=None, **kwargs): ) # pragma: no cover if axes is None: - return self.domain_axes + return self.domain_axes() if isinstance(axes, (str, int)): axes = (axes,) @@ -15731,7 +15432,7 @@ def axes(self, axes=None, **kwargs): out = set(out) out.discard(None) - return self.domain_axes.filter_by_key(*out) + return self.domain_axes().filter_by_key(*out) @_deprecated_kwarg_check("i") def squeeze(self, axes=None, inplace=False, i=False, **kwargs): @@ -15787,11 +15488,11 @@ def squeeze(self, axes=None, inplace=False, i=False, **kwargs): data_axes = self.get_data_axes() if axes is None: - all_axes = self.domain_axes + domain_axes = self.domain_axes(view=True) axes = [ axis for axis in data_axes - if all_axes[axis].get_size(None) == 1 + if domain_axes[axis].get_size(None) == 1 ] else: if isinstance(axes, (str, int)): @@ -16074,7 +15775,7 @@ def unsqueeze(self, inplace=False, i=False, axes=None, **kwargs): f = _inplace_enabled_define_and_cleanup(self) - size_1_axes = self.domain_axes.filter_by_size(1) + size_1_axes = self.domain_axes(view=True).filter_by_size(1) for axis in set(size_1_axes).difference(self.get_data_axes()): f.insert_dimension(axis, position=0, inplace=True) @@ -16186,27 +15887,22 @@ def auxiliary_coordinate( TODO """ - c = self.auxiliary_coordinates + auxiliary_coordinates = self.auxiliary_coordinates(view=True) + + c = auxiliary_coordinates if identity is not None: - c = c(identity) + c = c(identity, view=True) if not c: da_key = self.domain_axis(identity, key=True, default=None) if da_key is not None: - c = self.auxiliary_coordinates.filter_by_axis( - "exact", da_key - ) - # --- End: if - - # if key: - # return c.key(default=default) + c = auxiliary_coordinates.filter_by_axis("exact", da_key) if key: out = c.key(default=None) if out is None: return self._default( - default, - "No {!r} auxiliary coordinate construct".format(identity), + default, f"No {identity!r} auxiliary coordinate construct" ) return out @@ -16214,8 +15910,7 @@ def auxiliary_coordinate( out = c.value(default=None) if out is None: return self._default( - default, - "No {!r} auxiliary coordinate construct".format(identity), + default, f"No {identity!r} auxiliary coordinate construct" ) return out @@ -16457,15 +16152,15 @@ def domain_ancillary(self, identity=None, default=ValueError(), key=False): TODO """ - c = self.domain_ancillaries + domain_ancillaries = self.domain_ancillaries(view=True) + c = domain_ancillaries if identity is not None: c = c(identity) if not c: da_key = self.domain_axis(identity, key=True, default=None) if da_key is not None: - c = self.domain_ancillaries.filter_by_axis("exact", da_key) - # --- End: if + c = domain_ancillaries.filter_by_axis("exact", da_key) if key: return c.key(default=default) @@ -16570,15 +16265,15 @@ def cell_measure(self, identity=None, default=ValueError(), key=False): TODO """ - c = self.cell_measures + cell_measures = self.cell_measures(view=True) + c = cell_measures if identity is not None: c = c(identity) if not c: da_key = self.domain_axis(identity, key=True, default=None) if da_key is not None: - c = self.cell_measures.filter_by_axis("exact", da_key) - # --- End: if + c = cell_measures.filter_by_axis("exact", da_key) if key: return c.key(default=default) @@ -16669,22 +16364,22 @@ def cell_method(self, identity=None, default=ValueError(), key=False): TODO """ - c = self.cell_methods + cell_methods = self.cell_methods(view=True) + c = cell_methods if identity is not None: - c = c(identity) + c = c(identity, view=True) if not c: da_key = self.domain_axis(identity, key=True, default=None) cm_keys = [ key - for key, cm in self.cell_methods.items() + for key, cm in cell_methods.items() if cm.get_axes(None) == (da_key,) ] if cm_keys: - c = self.cell_methods(*cm_keys) + c = cell_methods(*cm_keys, view=True) else: - c = self.cell_methods(None) - # --- End: if + c = cell_methods(None, view=True) if key: return c.key(default=default) @@ -16789,15 +16484,15 @@ def coordinate(self, identity=None, default=ValueError(), key=False): TODO """ - c = self.coordinates + coordinates = self.coordinates(view=True) + c = coordinates if identity is not None: c = c(identity) if not c: da_key = self.domain_axis(identity, key=True, default=None) if da_key is not None: - c = self.coordinates.filter_by_axis("exact", da_key) - # --- End: if + c = coordinates(view=True).filter_by_axis("exact", da_key) if key: return c.key(default=default) @@ -16898,14 +16593,14 @@ def coordinate_reference( TODO """ - c = self.coordinate_references + coordinate_references = self.coordinate_references(view=True) + c = coordinate_references if identity is not None: - c = c.filter_by_identity(identity) - for cr_key, cr in self.coordinate_references.items(): + c = c.filter_by_identity(identity, view=False) + for cr_key, cr in coordinate_references.items(): if cr.match(identity): c._set_construct(cr, key=cr_key, copy=False) - # --- End: if if key: return c.key(default=default) @@ -17015,15 +16710,15 @@ def field_ancillary(self, identity=None, default=ValueError(), key=False): TODO """ - c = self.field_ancillaries + field_ancillaries = self.field_ancillaries(view=True) + c = field_ancillaries if identity is not None: c = c(identity) if not c: da_key = self.domain_axis(identity, key=True, default=None) if da_key is not None: - c = self.field_ancillaries.filter_by_axis("exact", da_key) - # --- End: if + c = field_ancillaries.filter_by_axis("exact", da_key) if key: return c.key(default=default) @@ -17132,16 +16827,15 @@ def dimension_coordinate( TODO """ - c = self.dimension_coordinates + dimension_coordinates = self.dimension_coordinates(view=True) + c = dimension_coordinates if identity is not None: c = c(identity) if not c: da_key = self.domain_axis(identity, key=True, default=None) if da_key is not None: - c = self.dimension_coordinates.filter_by_axis( - "exact", da_key - ) + c = dimension_coordinates.filter_by_axis("exact", da_key) if key: return c.key(default=default) @@ -17249,7 +16943,9 @@ def domain_axis(self, identity, key=False, default=ValueError()): else: identity = da_key - domain_axes = self.domain_axes(identity) + self_domain_axes = self.domain_axes(view=True) + + domain_axes = self_domain_axes(identity, view=True) if len(domain_axes) == 1: # identity is a unique domain axis construct identity da_key = domain_axes.key() @@ -17261,13 +16957,13 @@ def domain_axis(self, identity, key=False, default=ValueError()): return self._default( default, "No unique domain axis construct is identifable from " - "{!r}".format(identity), + f"{identity!r}", ) if key: return da_key - return self.constructs[da_key] + return self_domain_axes[da_key] def domain_axis_position(self, identity): """Return the position in the data of a domain axis construct. @@ -17387,7 +17083,7 @@ def axes_names(self, *identities, **kwargs): self, "axes_names", kwargs ) # pragma: no cover - out = dict(self.domain_axes) + out = dict(self.domain_axes(view=True)) for key in tuple(out): value = self.constructs.domain_axis_identity(key) @@ -17395,7 +17091,6 @@ def axes_names(self, *identities, **kwargs): out[key] = value else: del out[key] - # --- End: for return out @@ -17497,7 +17192,7 @@ def axis_size(self, identity, default=ValueError(), axes=None, **kwargs): axis = self.domain_axis(identity, key=True) - domain_axes = self.domain_axes + domain_axes = self.domain_axes(view=True) da = domain_axes.get(axis) if da is not None: @@ -17609,7 +17304,6 @@ def set_construct( copy = False else: construct.insert_dimension(0, inplace=True) - # --- End: if if set_axes: axes = self._set_construct_parse_axes( @@ -17621,20 +17315,17 @@ def set_construct( axes = self._set_construct_parse_axes( construct, axes, allow_scalar=True ) - # --- End: if if construct_type == "dimension_coordinate": - for dim, dim_axes in ( - self.constructs.filter_by_type(construct_type) - .data_axes() - .items() + dimension_coordinates = self.dimension_coordinates(view=True) + for dim, dim_axes in tuple( + dimension_coordinates.data_axes().items() ): if dim == key: continue if dim_axes == tuple(axes): self.del_construct(dim, default=None) - # --- End: if out = super().set_construct(construct, key=key, axes=axes, copy=copy) @@ -17653,9 +17344,8 @@ def set_construct( self._conform_cell_methods() elif construct_type == "coordinate_reference": - for ckey in self.coordinates: + for ckey in self.coordinates(view=True): self._conform_coordinate_references(ckey, coordref=construct) - # --- End: if # Return the construct key return out @@ -18029,7 +17719,6 @@ def halo( tripolar["Y"] = i_Y tripolar_axes = {X: "X", Y: "Y"} - # --- End: if # Add halos to the field construct's data size = {data_axes.index(axis): h for axis, h, in axis_halo.items()} @@ -18048,7 +17737,7 @@ def halo( d.set_size(d.get_size() + 2 * h) # Add halos to metadata constructs - for key, c in f.constructs.filter_by_data().items(): + for key, c in f.constructs.filter_by_data(view=True).items(): construct_axes = f.get_data_axes(key) construct_size = { construct_axes.index(axis): h @@ -18074,7 +17763,6 @@ def halo( inplace=True, verbose=verbose, ) - # --- End: for if verbose: print("Returns:{!r}".format(f)) # pragma: no cover @@ -18292,7 +17980,9 @@ def percentile( out.set_properties(self.properties()) for axis in [ - axis for axis in self.domain_axes if axis not in data_axes + axis + for axis in self.domain_axes(view=True) + if axis not in data_axes ]: out.set_construct(self._DomainAxis(1), key=axis) @@ -18319,9 +18009,11 @@ def percentile( # axes # ------------------------------------------------------------ if axes: - for key, c in self.dimension_coordinates.filter_by_axis( - "subset", *axes - ).items(): + for key, c in ( + self.dimension_coordinates(view=True) + .filter_by_axis("subset", *axes) + .items() + ): c_axes = self.get_data_axes(key) c = c.copy() @@ -18342,13 +18034,12 @@ def percentile( c.set_bounds(self._Bounds(data=bounds), copy=False) out.set_construct(c, axes=c_axes, key=key, copy=False) - # --- End: if # TODO other_axes = set( [ axis - for axis in self.domain_axes + for axis in self.domain_axes(view=True) if axis not in axes or self.domain_axis(axis).size == 1 ] ) @@ -18366,18 +18057,21 @@ def percentile( # ------------------------------------------------------------ # Copy coordinate reference constructs to the output field # ------------------------------------------------------------ - for cr_key, ref in self.coordinate_references.items(): + out_coordinates = out.coordinates(view=True) + out_domain_ancillaries = out.domain_ancillaries(view=True) + + for cr_key, ref in self.coordinate_references(view=True).items(): ref = ref.copy() for c_key in ref.coordinates(): - if c_key not in out.coordinates: + if c_key not in out_coordinates: ref.del_coordinate(c_key) for ( term, da_key, ) in ref.coordinate_conversion.domain_ancillaries().items(): - if da_key not in out.domain_ancillaries: + if da_key not in out_domain_ancillaries: ref.coordinate_conversion.set_domain_ancillary(term, None) out.set_construct(ref, key=cr_key, copy=False) @@ -18805,7 +18499,7 @@ def flatten(self, axes=None, return_axis=False, inplace=False): f.set_data_axes(new_data_axes) # Modify or remove cell methods that span the flatten axes - for key, cm in tuple(f.cell_methods.items()): + for key, cm in tuple(f.cell_methods(view=True).items()): cm_axes = set(cm.get_axes(())) if not cm_axes or cm_axes.isdisjoint(axes): continue @@ -18824,7 +18518,7 @@ def flatten(self, axes=None, return_axis=False, inplace=False): "auxiliary_coordinate", ): for c in ( - f.constructs.filter_by_type(ctype) + f.constructs.filter_by_type(ctype, view=True) .filter_by_axis("exact", a) .values() ): @@ -18834,7 +18528,6 @@ def flatten(self, axes=None, return_axis=False, inplace=False): if sn is not None: break - # --- End: for if sn is None: f.del_construct(key) @@ -18842,11 +18535,9 @@ def flatten(self, axes=None, return_axis=False, inplace=False): break else: cm_axes[i] = sn - # --- End: for if set_axes: cm.set_axes(cm_axes) - # --- End: for # Flatten the constructs that span all of the flattened axes, # or all of the flattened axes all bar some which have size 1. @@ -18935,23 +18626,27 @@ def roll(self, axis, shift, inplace=False, i=False, **kwargs): axis, key=True, default=ValueError( - "Can't roll: Bad axis specification: {!r}".format(axis) + f"Can't roll: Bad axis specification: {axis!r}" ), ) f = _inplace_enabled_define_and_cleanup(self) - if self.domain_axes[axis].get_size() <= 1: + domain_axes = self.domain_axes(view=True) + if domain_axes[axis].get_size() <= 1: if inplace: f = None + return f - dim = self.dimension_coordinates.filter_by_axis("exact", axis).value( - None + dim = ( + self.dimension_coordinates(view=True) + .filter_by_axis("exact", axis) + .value(None) ) if dim is not None and dim.period() is None: raise ValueError( - "Can't roll: {!r} axis has non-periodic dimension " - "coordinates".format(dim.identity()) + f"Can't roll: {dim.identity()!r} axis has non-periodic " + "dimension coordinates" ) try: @@ -18959,15 +18654,15 @@ def roll(self, axis, shift, inplace=False, i=False, **kwargs): except ValueError: if inplace: f = None + return f super(Field, f).roll(iaxis, shift, inplace=True) - for key, construct in f.constructs.filter_by_data().items(): + for key, construct in f.constructs.filter_by_data(view=True).items(): axes = f.get_data_axes(key, default=()) if axis in axes: construct.roll(axes.index(axis), shift, inplace=True) - # --- End: for return f @@ -19294,12 +18989,9 @@ def where( for i, axis in enumerate(data_axes): if axis not in construct_data_axes: construct_data.insert_dimension(i, inplace=True) - # --- End: if condition = condition.evaluate(construct_data) - # --- End: if - if x is not None and isinstance(x, self_class): x = self._conform_for_assignment(x) @@ -20221,7 +19913,6 @@ def regrids( ) unmasked_grid_created = True old_mask = None - # --- End: if # Fill the source and destination fields (the destination # field gets filled with a fill value, the source field @@ -20265,7 +19956,6 @@ def regrids( regridded_data.transpose(src_order).reshape(shape), units=self.Units, ) - # --- End: for # Construct new data from regridded sections new_data = Data.reconstruct_sectioned_data(sections) @@ -20668,7 +20358,7 @@ def regridc( raise ValueError( "Axis {!r} not specified in dst.".format(axis) ) - # --- End: for + dst_axis_keys = None else: dst_axis_keys, dst_coords = dst._regrid_get_cartesian_coords( @@ -20688,7 +20378,6 @@ def regridc( src_coord.Units, dst_coord.Units ) ) - # --- End: if # Get the axis indices and their order for the source field src_axis_indices, src_order = f._regrid_get_axis_indices(src_axis_keys) @@ -20880,7 +20569,6 @@ def regridc( ) unmasked_grid_created = True old_mask = None - # --- End: if # Fill the source and destination fields f._regrid_fill_fields(src_data, srcfield, dstfield) @@ -20922,20 +20610,12 @@ def regridc( .reshape(shape), units=f.Units, ) - # --- End: for + sections[k] = Data.reconstruct_sectioned_data(subsections) - # --- End: for # Construct new data from regridded sections new_data = Data.reconstruct_sectioned_data(sections) - # Construct new field - # if i: - # f = self - # else: - # f = self.copy(_omit_Data=True) - # # --- End:if - # # Update ancillary variables of new field # f._conform_ancillary_variables(src_axis_keys, keep_size_1=False) @@ -21054,7 +20734,9 @@ def derivative( if axis is None: raise ValueError("Invalid axis specifier") - dims = self.dimension_coordinates.filter_by_axis("exact", axis) + dims = self.dimension_coordinates(view=True).filter_by_axis( + "exact", axis + ) len_dims = len(dims) if not len_dims: raise ValueError("Invalid axis specifier") @@ -21089,22 +20771,12 @@ def derivative( d = coord.data.convolution_filter( window=[1, 0, -1], axis=0, mode=mode, cval=numpy_nan ) - # d = scipy_convolve1d(coord, [1, 0, -1], mode=mode, - # cval=numpy_nan) - # if not cyclic and not one_sided_at_boundary: - # with numpy_errstate(invalid='ignore'): - # d = numpy_ma_masked_invalid(d) - # # --- End: if # Reshape the finite difference of the axis for broadcasting - # shape = [1] * self.ndim - # shape[axis_index] = d.size - # d = d.reshape(shape) for _ in range(self.ndim - 1 - axis_index): d.insert_dimension(position=1, inplace=True) # Find the derivative - # f.data /= Data(d, coord.units) f.data /= d # Update the standard name and long name @@ -21134,7 +20806,7 @@ def aux(self, identity, default=ValueError(), key=False, **kwargs): return self.auxiliary_coordinate(identity, key=key, default=default) def auxs(self, *identities, **kwargs): - """Alias for `cf.Field.auxiliary_coordinates`.""" + """Alias for `cf.Field.auxiliary_coordinates()`.""" if kwargs: _DEPRECATION_ERROR_KWARGS( self, @@ -21163,9 +20835,8 @@ def auxs(self, *identities, **kwargs): i, i.replace(":", "=", 1) ) ) # pragma: no cover - # --- End: for - return self.auxiliary_coordinates(*identities) + return self.auxiliary_coordinates()(*identities) def axis(self, identity, key=False, default=ValueError(), **kwargs): """Alias of `cf.Field.domain_axis`.""" @@ -21189,7 +20860,7 @@ def coord(self, identity, default=ValueError(), key=False, **kwargs): "Use methods of the 'coordinates' attribute instead.", ) # pragma: no cover - if identity in self.domain_axes: + if identity in self.domain_axes(view=True): # Allow an identity to be the domain axis construct key # spanned by a dimension coordinate construct return self.dimension_coordinate( @@ -21199,7 +20870,7 @@ def coord(self, identity, default=ValueError(), key=False, **kwargs): return self.coordinate(identity, key=key, default=default) def coords(self, *identities, **kwargs): - """Alias for `cf.Field.coordinates`.""" + """Alias for `cf.Field.coordinates()`.""" if kwargs: _DEPRECATION_ERROR_KWARGS( self, @@ -21227,9 +20898,10 @@ def coords(self, *identities, **kwargs): i, i.replace(":", "=", 1) ) ) # pragma: no cover - # --- End: for - return self.coordinates.filter_by_identity(*identities) + return self.coordinates(view=True).filter_by_identity( + *identities, **kwargs + ) def dim(self, identity, default=ValueError(), key=False, **kwargs): """Alias for `cf.Field.dimension_coordinate`.""" @@ -21245,7 +20917,7 @@ def dim(self, identity, default=ValueError(), key=False, **kwargs): return self.dimension_coordinate(identity, key=key, default=default) def dims(self, *identities, **kwargs): - """Alias for `cf.Field.dimension_coordinates`.""" + """Alias for `cf.Field.dimension_coordinates()`.""" if kwargs: _DEPRECATION_ERROR_KWARGS( self, @@ -21274,9 +20946,10 @@ def dims(self, *identities, **kwargs): i, i.replace(":", "=", 1) ) ) # pragma: no cover - # --- End: for - return self.dimension_coordinates.filter_by_identity(*identities) + return self.dimension_coordinates(view=True).filter_by_identity( + *identities, **kwargs + ) def domain_anc(self, identity, default=ValueError(), key=False, **kwargs): """Alias for `cf.Field.domain_ancillary`.""" @@ -21292,7 +20965,7 @@ def domain_anc(self, identity, default=ValueError(), key=False, **kwargs): return self.domain_ancillary(identity, key=key, default=default) def domain_ancs(self, *identities, **kwargs): - """Alias for `cf.Field.domain_ancillaries`.""" + """Alias for `cf.Field.domain_ancillaries()`.""" if kwargs: _DEPRECATION_ERROR_KWARGS( self, @@ -21321,9 +20994,10 @@ def domain_ancs(self, *identities, **kwargs): i, i.replace(":", "=", 1) ) ) # pragma: no cover - # --- End: for - return self.domain_ancillaries.filter_by_identity(*identities) + return self.domain_ancillaries(view=True).filter_by_identity( + *identities, **kwargs + ) def field_anc(self, identity, default=ValueError(), key=False, **kwargs): """Alias for `cf.Field.field_ancillary`.""" @@ -21338,7 +21012,7 @@ def field_anc(self, identity, default=ValueError(), key=False, **kwargs): return self.field_ancillary(identity, key=key, default=default) def field_ancs(self, *identities, **kwargs): - """Alias for `cf.Field.field_ancillaries`.""" + """Alias for `cf.Field.field_ancillaries()`.""" if kwargs: _DEPRECATION_ERROR_KWARGS( self, @@ -21366,9 +21040,10 @@ def field_ancs(self, *identities, **kwargs): i, i.replace(":", "=", 1) ) ) # pragma: no cover - # --- End: for - return self.field_ancillaries.filter_by_identity(*identities) + return self.field_ancillaries(view=True).filter_by_identity( + *identities, **kwargs + ) def item(self, identity, key=False, default=ValueError(), **kwargs): """Alias for `cf.Field.construct`.""" @@ -21397,7 +21072,6 @@ def items(self, *identities, **kwargs): _DEPRECATION_ERROR_DICT() # pragma: no cover elif isinstance(i, (list, tuple, set)): _DEPRECATION_ERROR_SEQUENCE(i) # pragma: no cover - # --- End: for return self.constructs.filter_by_data().filter_by_identity(*identities) @@ -21426,7 +21100,7 @@ def measure(self, identity, default=ValueError(), key=False, **kwargs): return self.cell_measure(identity, key=key, default=default) def measures(self, *identities, **kwargs): - """Alias for `cf.Field.cell_measures`.""" + """Alias for `cf.Field.cell_measures()`.""" if kwargs: _DEPRECATION_ERROR_KWARGS( self, @@ -21457,9 +21131,8 @@ def measures(self, *identities, **kwargs): i, i.replace(":", "=", 1) ) ) # pragma: no cover - # --- End: for - return self.cell_measures(*identities) + return self.cell_measures()(*identities) def ref(self, identity, default=ValueError(), key=False, **kwargs): """Alias for `cf.Field.coordinate_reference`.""" @@ -21475,7 +21148,7 @@ def ref(self, identity, default=ValueError(), key=False, **kwargs): return self.coordinate_reference(identity, key=key, default=default) def refs(self, *identities, **kwargs): - """Alias for `cf.Field.coordinate_references`.""" + """Alias for `cf.Field.coordinate_references()`.""" if kwargs: _DEPRECATION_ERROR_KWARGS( self, @@ -21490,9 +21163,8 @@ def refs(self, *identities, **kwargs): _DEPRECATION_ERROR_DICT() # pragma: no cover elif isinstance(i, (list, tuple, set)): _DEPRECATION_ERROR_SEQUENCE(i) # pragma: no cover - # --- End: for - return self.coordinate_references(*identities) + return self.coordinate_references()(*identities) # ---------------------------------------------------------------- # Deprecated attributes and methods @@ -21554,7 +21226,7 @@ def equivalent(self, other, rtol=None, atol=None, verbose=None): Deprecated at version 3.0.0. """ - _DEPRECATION_ERROR_METHOD(self, "equivalent") + _DEPRECATION_ERROR_METHOD(self, "equivalent", version="3.0.0") @classmethod def example_field(cls, n): @@ -21904,6 +21576,3 @@ def unlimited(self, *args): "Use methods 'DomainAxis.nc_is_unlimited', and " "'DomainAxis.nc_set_unlimited' instead.", ) # pragma: no cover - - -# --- End: class diff --git a/cf/read_write/netcdf/netcdfwrite.py b/cf/read_write/netcdf/netcdfwrite.py index 8a897eed98..ab16fff6f5 100644 --- a/cf/read_write/netcdf/netcdfwrite.py +++ b/cf/read_write/netcdf/netcdfwrite.py @@ -621,6 +621,3 @@ def _convert_to_builtin_type(self, x): "{!r} object can't be converted to a JSON serializable type: " "{!r}".format(type(x), x) ) - - -# --- End: class diff --git a/cf/read_write/um/umread.py b/cf/read_write/um/umread.py index f48f961739..c58dc23cef 100644 --- a/cf/read_write/um/umread.py +++ b/cf/read_write/um/umread.py @@ -2878,7 +2878,6 @@ def xy_coordinate(self, axiscode, axis): origin -= 360.0 while origin + delta * size < -360.0: origin += 360.0 - # --- End: if array = numpy_arange( origin + delta, diff --git a/cf/test/create_test_files.py b/cf/test/create_test_files.py index 8a5360f33d..6f40d454e0 100644 --- a/cf/test/create_test_files.py +++ b/cf/test/create_test_files.py @@ -694,8 +694,6 @@ def _pp( nc.close() - # --- End: def - parent_file = os.path.join( os.path.dirname(os.path.abspath(__file__)), "parent.nc" ) @@ -727,8 +725,6 @@ def _jj(shape, list_values): array[index] = i return array - # --- End: def - n = netCDF4.Dataset(filename, "w", format="NETCDF3_CLASSIC") n.Conventions = "CF-" + VN diff --git a/cf/test/test_AuxiliaryCoordinate.py b/cf/test/test_AuxiliaryCoordinate.py index 87aa8284ba..b6d2297469 100644 --- a/cf/test/test_AuxiliaryCoordinate.py +++ b/cf/test/test_AuxiliaryCoordinate.py @@ -60,7 +60,7 @@ def test_AuxiliaryCoordinate_chunk(self): def test_AuxiliaryCoordinate__repr__str__dump(self): f = cf.read(self.filename)[0] - x = f.auxiliary_coordinates("latitude").value() + x = f.auxiliary_coordinates(view=True)("latitude").value() _ = repr(x) _ = str(x) @@ -68,7 +68,7 @@ def test_AuxiliaryCoordinate__repr__str__dump(self): def test_AuxiliaryCoordinate_bounds(self): f = cf.read(self.filename)[0] - d = f.dimension_coordinates("X").value() + d = f.dimension_coordinates(view=True)("X").value() x = cf.AuxiliaryCoordinate(source=d) _ = x.upper_bounds @@ -76,7 +76,7 @@ def test_AuxiliaryCoordinate_bounds(self): def test_AuxiliaryCoordinate_properties(self): f = cf.read(self.filename)[0] - x = f.auxiliary_coordinates("latitude").value() + x = f.auxiliary_coordinates(view=True)("latitude").value() x.positive = "up" self.assertEqual(x.positive, "up") @@ -88,12 +88,12 @@ def test_AuxiliaryCoordinate_properties(self): del x.axis self.assertIsNone(getattr(x, "axis", None)) - d = f.dimension_coordinates("X").value() + d = f.dimension_coordinates(view=True)("X").value() x = cf.AuxiliaryCoordinate(source=d) def test_AuxiliaryCoordinate_insert_dimension(self): f = cf.read(self.filename)[0] - d = f.dimension_coordinates("X").value() + d = f.dimension_coordinates(view=True)(view=True)("X").value() x = cf.AuxiliaryCoordinate(source=d) self.assertEqual(x.shape, (9,)) @@ -109,7 +109,7 @@ def test_AuxiliaryCoordinate_insert_dimension(self): def test_AuxiliaryCoordinate_transpose(self): f = cf.read(self.filename)[0] - x = f.auxiliary_coordinates("longitude").value() + x = f.auxiliary_coordinates(view=True)("longitude").value() bounds = cf.Bounds( data=cf.Data(numpy.arange(9 * 10 * 4).reshape(9, 10, 4)) @@ -129,7 +129,7 @@ def test_AuxiliaryCoordinate_transpose(self): def test_AuxiliaryCoordinate_squeeze(self): f = cf.read(self.filename)[0] - x = f.auxiliary_coordinates("longitude").value() + x = f.auxiliary_coordinates(view=True)("longitude").value() bounds = cf.Bounds( data=cf.Data(numpy.arange(9 * 10 * 4).reshape(9, 10, 4)) @@ -356,9 +356,6 @@ def test_AuxiliaryCoordinate_clip(self): self.assertIsNone(aux.clip(-15, 25, inplace=True)) -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_CellMeasure.py b/cf/test/test_CellMeasure.py index 49f5f4b467..4fd77acd6a 100644 --- a/cf/test/test_CellMeasure.py +++ b/cf/test/test_CellMeasure.py @@ -16,7 +16,7 @@ class CellMeasureTest(unittest.TestCase): def test_CellMeasure__repr__str__dump(self): f = cf.read(self.filename)[0] - x = f.cell_measures("measure:area").value() + x = f.cell_measures(view=True)("measure:area").value() _ = repr(x) _ = str(x) @@ -24,7 +24,7 @@ def test_CellMeasure__repr__str__dump(self): def test_CellMeasure_measure(self): f = cf.read(self.filename)[0] - x = f.cell_measures("measure:area").value() + x = f.cell_measures(view=True)("measure:area").value() self.assertEqual(x.measure, "area") del x.measure @@ -34,7 +34,7 @@ def test_CellMeasure_measure(self): def test_CellMeasure_identity(self): f = cf.read(self.filename)[0] - x = f.cell_measures("measure:area").value() + x = f.cell_measures(view=True)("measure:area").value() self.assertEqual(x.identity(), "measure:area") del x.measure @@ -43,9 +43,6 @@ def test_CellMeasure_identity(self): self.assertEqual(x.identity(), "") -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_CellMethod.py b/cf/test/test_CellMethod.py index 3d82103726..913b2c848e 100644 --- a/cf/test/test_CellMethod.py +++ b/cf/test/test_CellMethod.py @@ -86,7 +86,6 @@ def test_CellMethod_equivalent(self): cms = cf.CellMethod.create(s) for cm in cms: self.assertTrue(cm.equivalent(cm.copy(), verbose=2)) - # --- End: for # Intervals for s0, s1 in ( @@ -139,7 +138,6 @@ def test_CellMethod_equivalent(self): cm0.equivalent(cm1, verbose=2), "{0!r} not equivalent to {1!r}".format(cm0, cm1), ) - # --- End: for def test_CellMethod_get_set_delete(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: @@ -166,9 +164,6 @@ def test_CellMethod_intervals(self): self.assertEqual("1 hour", str(cm.intervals[0])) -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_Count.py b/cf/test/test_Count.py index d1452ec784..78e784b2f9 100644 --- a/cf/test/test_Count.py +++ b/cf/test/test_Count.py @@ -27,14 +27,11 @@ def test_Count__repr__str__dump(self): count = f.data.get_count() - _ = repr(count) - _ = str(count) + repr(count) + str(count) self.assertIsInstance(count.dump(display=False), str) -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_DimensionCoordinate.py b/cf/test/test_DimensionCoordinate.py index f8830680a6..fe7aa0885f 100644 --- a/cf/test/test_DimensionCoordinate.py +++ b/cf/test/test_DimensionCoordinate.py @@ -46,7 +46,7 @@ class DimensionCoordinateTest(unittest.TestCase): def test_DimensionCoordinate__repr__str__dump(self): f = cf.read(self.filename)[0] - x = f.dimension_coordinates("X").value() + x = f.dimension_coordinates(view=True)("X").value() _ = repr(x) _ = str(x) @@ -136,8 +136,8 @@ def test_DimensionCoordinate_convert_reference_time(self): def test_DimensionCoordinate_roll(self): f = cf.read(self.filename)[0] - x = f.dimension_coordinates("X").value() - y = f.dimension_coordinates("Y").value() + x = f.dimension_coordinates(view=True)("X").value() + y = f.dimension_coordinates(view=True)("Y").value() _ = x.roll(0, 3) with self.assertRaises(Exception): @@ -248,7 +248,7 @@ def test_DimensionCoordinate_override_calendar(self): def test_DimensionCoordinate_bounds(self): f = cf.read(self.filename)[0] - x = f.dimension_coordinates("X").value() + x = f.dimension_coordinates(view=True)("X").value() _ = x.upper_bounds _ = x.lower_bounds @@ -269,7 +269,7 @@ def test_DimensionCoordinate_bounds(self): def test_DimensionCoordinate_properties(self): f = cf.read(self.filename)[0] - x = f.dimension_coordinates("X").value() + x = f.dimension_coordinates(view=True)("X").value() x.positive = "up" self.assertEqual(x.positive, "up") @@ -284,7 +284,7 @@ def test_DimensionCoordinate_properties(self): def test_DimensionCoordinate_insert_dimension(self): f = cf.read(self.filename)[0] - x = f.dimension_coordinates("X").value() + x = f.dimension_coordinates(view=True)("X").value() self.assertEqual(x.shape, (9,)) self.assertEqual(x.bounds.shape, (9, 2)) @@ -474,9 +474,6 @@ def test_DimensionCoordinate__setitem__(self): self.assertTrue(d.bounds.equals(self.dim.bounds, verbose=3)) -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_DomainAncillary.py b/cf/test/test_DomainAncillary.py index 4f89b093c2..d414b9625c 100644 --- a/cf/test/test_DomainAncillary.py +++ b/cf/test/test_DomainAncillary.py @@ -11,12 +11,9 @@ class DomainAncillaryTest(unittest.TestCase): def test_DomainAncillary(self): f = cf.DomainAncillary() - _ = repr(f) - _ = str(f) - _ = f.dump(display=False) - - -# --- End: class + repr(f) + str(f) + f.dump(display=False) if __name__ == "__main__": diff --git a/cf/test/test_DomainAxis.py b/cf/test/test_DomainAxis.py index 07037d2e8f..ae2575de11 100644 --- a/cf/test/test_DomainAxis.py +++ b/cf/test/test_DomainAxis.py @@ -12,8 +12,8 @@ def test_DomainAxis__repr__str(self): x = cf.DomainAxis(size=56) x.nc_set_dimension("tas") - _ = repr(x) - _ = str(x) + repr(x) + str(x) def test_DomainAxis(self): x = cf.DomainAxis(size=111) @@ -45,10 +45,7 @@ def test_DomainAxis(self): self.assertLessEqual(x, 100) self.assertNotEqual(x, 100) - _ = hash(x) - - -# --- End: class + hash(x) if __name__ == "__main__": diff --git a/cf/test/test_Field.py b/cf/test/test_Field.py index e4c8375080..f832fe0e2d 100644 --- a/cf/test/test_Field.py +++ b/cf/test/test_Field.py @@ -38,7 +38,6 @@ def _remove_tmpfiles(): os.remove(f) except OSError: pass - # --- End: for atexit.register(_remove_tmpfiles) @@ -135,7 +134,6 @@ def test_Field_get_filenames(self): for c in g.constructs.filter_by_data().values(): if c.has_bounds(): c.bounds.data[...] = -99 - # --- End: for self.assertEqual(g.get_filenames(), set(), g.get_filenames()) @@ -159,7 +157,6 @@ def test_Field_halo(self): self.assertTrue( (numpy.array(c.shape) == numpy.array(d.shape) + i * 2).all() ) - # --- End: for def test_Field_has_construct(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: @@ -222,7 +219,6 @@ def test_Field_compress_uncompress(self): bool(c.data.get_compression_type()), message ) self.assertTrue(f.equals(c, verbose=2), message) - # --- End: for def test_Field_apply_masking(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: @@ -464,7 +460,6 @@ def test_Field_weights(self): measure=m, data=d, ) - # --- End: for with self.assertRaises(Exception): f.weights(components=True, data=True) @@ -483,7 +478,6 @@ def test_Field_replace_construct(self): ): for copy in (True, False): f.replace_construct(x, f.construct(x), copy=copy) - # --- End: for with self.assertRaises(Exception): f.replace_construct("grid_longitude", f.construct("latitude")) @@ -540,7 +534,6 @@ def test_Field_collapse(self): method, weights, axes, a, b ), ) - # --- End: for for method in ( "mean", @@ -562,7 +555,6 @@ def test_Field_collapse(self): method, weights, axes, a, b ), ) - # --- End: for for method in ("integral",): weights = "area" @@ -577,7 +569,6 @@ def test_Field_collapse(self): method, axes, a, b ), ) - # --- End: for for axes in axes_combinations(f): if axes == (0,): @@ -600,7 +591,6 @@ def test_Field_collapse(self): method, weights, axes, a, b ), ) - # --- End: for for method in ("mean_of_upper_decile",): for weights in (None, "area"): @@ -617,7 +607,6 @@ def test_Field_collapse(self): method, weights, axes, a, b ), ) - # --- End: for def test_Field_all(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: @@ -644,26 +633,6 @@ def test_Field_any(self): f.del_data() self.assertFalse(f.any()) - def test_Field_axis(self): - # v2 compatibility - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - - f = self.f - - self.assertTrue(f.domain_axes.equals(f.axes(), verbose=2)) - self.assertTrue( - f.domain_axes("domainaxis1").equals( - f.axes("domainaxis1"), verbose=2 - ) - ) - - self.assertTrue( - f.domain_axis("domainaxis1").equals( - f.axis("domainaxis1"), verbose=2 - ) - ) - def test_Field_atol_rtol(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: return @@ -774,7 +743,6 @@ def test_Field_AUXILIARY_MASK(self): self.assertTrue( cf.functions._numpy_allclose(t.array, a), message ) - # --- End: for cf.chunksize(self.original_chunksize) @@ -826,7 +794,6 @@ def test_Field_AUXILIARY_MASK(self): self.assertTrue( cf.functions._numpy_allclose(t.array, a), message ) - # --- End: for cf.chunksize(self.original_chunksize) @@ -877,7 +844,7 @@ def test_Field_AUXILIARY_MASK(self): self.assertTrue( cf.functions._numpy_allclose(t.array, a), message ) - # --- End: for + cf.chunksize(self.original_chunksize) def test_Field__getitem__(self): @@ -988,10 +955,10 @@ def test_Field__add__(self): b = g + f axis = a.domain_axis("grid_longitude", key=1) - for key in a.field_ancillaries.filter_by_axis("or", axis): + for key in a.field_ancillaries(view=True).filter_by_axis("or", axis): a.del_construct(key) - for key in a.cell_measures.filter_by_axis("or", axis): + for key in a.cell_measures(view=True).filter_by_axis("or", axis): a.del_construct(key) self.assertTrue(a.equals(b, verbose=2)) @@ -1164,7 +1131,6 @@ def test_Field_anchor(self): period, x1, anchor, x0 ), ) - # --- End: for def test_Field_cell_area(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: @@ -1175,21 +1141,21 @@ def test_Field_cell_area(self): ca = f.cell_area() self.assertEqual(ca.ndim, 2) - self.assertEqual(len(ca.dimension_coordinates), 2) - self.assertEqual(len(ca.domain_ancillaries), 0) - self.assertEqual(len(ca.coordinate_references), 1) + self.assertEqual(len(ca.dimension_coordinates(view=True)), 2) + self.assertEqual(len(ca.domain_ancillaries(view=True)), 0) + self.assertEqual(len(ca.coordinate_references(view=True)), 1) f.del_construct("cellmeasure0") y = f.dimension_coordinate("Y") y.set_bounds(y.create_bounds()) - self.assertEqual(len(f.cell_measures), 0) + self.assertEqual(len(f.cell_measures(view=True)), 0) ca = f.cell_area() self.assertEqual(ca.ndim, 2) - self.assertEqual(len(ca.dimension_coordinates), 2) - self.assertEqual(len(ca.domain_ancillaries), 0) - self.assertEqual(len(ca.coordinate_references), 1) + self.assertEqual(len(ca.dimension_coordinates(view=True)), 2) + self.assertEqual(len(ca.domain_ancillaries(view=True)), 0) + self.assertEqual(len(ca.coordinate_references(view=True)), 1) self.assertTrue(ca.Units.equivalent(cf.Units("m2")), ca.Units) y = f.dimension_coordinate("Y") @@ -1234,6 +1200,9 @@ def test_Field_radius(self): with self.assertRaises(Exception): _ = f.radius(default="qwerty") + cr = f.coordinate_reference( + "grid_mapping_name:rotated_latitude_longitude" + ) cr = f.coordinate_reference("rotated_latitude_longitude") cr.datum.set_parameter("earth_radius", a.copy()) @@ -1241,6 +1210,9 @@ def test_Field_radius(self): self.assertEqual(r.Units, cf.Units("m")) self.assertEqual(r, a) + cr = f.coordinate_reference( + "standard_name:atmosphere_hybrid_height_coordinate" + ) cr = f.coordinate_reference("atmosphere_hybrid_height_coordinate") cr.datum.set_parameter("earth_radius", a.copy()) @@ -1660,7 +1632,6 @@ def test_Field_indices(self): self.assertEqual( g.construct("grid_longitude").array, 40 ) # TODO - # --- End: for for mode in ("", "compress", "full", "envelope"): indices = f.indices(mode, grid_latitude=cf.contains(3)) @@ -1674,7 +1645,6 @@ def test_Field_indices(self): if mode != "full": self.assertEqual(g.construct("grid_latitude").array, 3) - # --- End: for for mode in ("", "compress", "full", "envelope"): indices = f.indices(mode, longitude=cf.contains(83)) @@ -1688,7 +1658,6 @@ def test_Field_indices(self): if mode != "full": self.assertEqual(g.construct("longitude").array, 83) - # --- End: for # Calls that should fail with self.assertRaises(Exception): @@ -1928,7 +1897,6 @@ def test_Field_moving_window(self): x /= weights[i - 1 : i + 2].sum() numpy.testing.assert_allclose(x, g.array[:, i]) - # --- End: for # Test 'wrap' for mode in (None, "wrap"): @@ -1947,7 +1915,6 @@ def test_Field_moving_window(self): x /= weights[ii].sum() numpy.testing.assert_allclose(x, g.array[:, i]) - # --- End: for # ------------------------------------------------------------ # Origin = 1 @@ -1975,7 +1942,6 @@ def test_Field_moving_window(self): x /= weights[ii].sum() numpy.testing.assert_allclose(x, g.array[:, i]) - # --- End: for # Test 'wrap' for mode in (None, "wrap"): @@ -1999,7 +1965,6 @@ def test_Field_moving_window(self): x /= weights[ii].sum() numpy.testing.assert_allclose(x, g.array[:, i]) - # --- End: for # ------------------------------------------------------------ # Constant @@ -2024,7 +1989,6 @@ def test_Field_moving_window(self): x /= weights[ii].sum() numpy.testing.assert_allclose(x, g.array[:, i]) - # --- End: for # ------------------------------------------------------------ # Weights broadcasting @@ -2037,7 +2001,9 @@ def test_Field_moving_window(self): "mean", window_size=3, axis="X", weights=weights ) - self.assertTrue(len(g.cell_methods) == len(f.cell_methods) + 1) + self.assertEqual( + len(g.cell_methods(view=True)), len(f.cell_methods(view=True)) + 1 + ) def test_Field_derivative(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: @@ -2082,24 +2048,24 @@ def test_Field_convert(self): c = f.convert("grid_latitude") self.assertTrue(c.ndim == 1) self.assertTrue(c.standard_name == "grid_latitude") - self.assertTrue(len(c.dimension_coordinates) == 1) - self.assertTrue(len(c.auxiliary_coordinates) == 1) - self.assertTrue(len(c.cell_measures) == 0) - self.assertTrue(len(c.coordinate_references) == 1) - self.assertTrue(len(c.domain_ancillaries) == 0) - self.assertTrue(len(c.field_ancillaries) == 0) - self.assertTrue(len(c.cell_methods) == 0) + self.assertTrue(len(c.dimension_coordinates(view=True)) == 1) + self.assertTrue(len(c.auxiliary_coordinates(view=True)) == 1) + self.assertTrue(len(c.cell_measures(view=True)) == 0) + self.assertTrue(len(c.coordinate_references(view=True)) == 1) + self.assertTrue(len(c.domain_ancillaries(view=True)) == 0) + self.assertTrue(len(c.field_ancillaries(view=True)) == 0) + self.assertTrue(len(c.cell_methods(view=True)) == 0) c = f.convert("latitude") self.assertTrue(c.ndim == 2) self.assertTrue(c.standard_name == "latitude") - self.assertTrue(len(c.dimension_coordinates) == 2) - self.assertTrue(len(c.auxiliary_coordinates) == 3) - self.assertTrue(len(c.cell_measures) == 1) - self.assertTrue(len(c.coordinate_references) == 1) - self.assertTrue(len(c.domain_ancillaries) == 0) - self.assertTrue(len(c.field_ancillaries) == 0) - self.assertTrue(len(c.cell_methods) == 0) + self.assertTrue(len(c.dimension_coordinates(view=True)) == 2) + self.assertTrue(len(c.auxiliary_coordinates(view=True)) == 3) + self.assertTrue(len(c.cell_measures(view=True)) == 1) + self.assertTrue(len(c.coordinate_references(view=True)) == 1) + self.assertTrue(len(c.domain_ancillaries(view=True)) == 0) + self.assertTrue(len(c.field_ancillaries(view=True)) == 0) + self.assertTrue(len(c.cell_methods(view=True)) == 0) # Cellsize c = f.convert("grid_longitude", cellsize=True) @@ -2166,29 +2132,11 @@ def test_Field_auxiliary_coordinate(self): ) self.assertEqual(f.auxiliary_coordinate(identity, key=True), key) - self.assertTrue(f.aux(identity).equals(c, verbose=2)) - self.assertEqual(f.aux(identity, key=True), key) - - self.assertEqual(len(f.auxs()), 3) - self.assertEqual(len(f.auxs("longitude")), 1) - self.assertEqual(len(f.auxs("longitude", "latitude")), 2) - identities = ["latitude", "longitude"] - c = f.auxiliary_coordinates(*identities) - self.assertTrue(f.auxs(*identities).equals(c, verbose=2)) - c = f.auxiliary_coordinates() - self.assertTrue(f.auxs().equals(c, verbose=2)) - c = f.auxiliary_coordinates(identities[0]) - self.assertTrue(f.auxs(identities[0]).equals(c, verbose=2)) - - self.assertIsNone(f.aux("long_name=qwerty:asd", None)) - self.assertEqual(len(f.auxs("long_name=qwerty:asd")), 0) - - with self.assertRaises(Exception): - f.aux("long_name:qwerty") - - with self.assertRaises(Exception): - f.auxs("long_name:qwerty") + auxiliary_coordinates = f.auxiliary_coordinates(view=True) + auxiliary_coordinates(*identities) + auxiliary_coordinates() + auxiliary_coordinates(identities[0]) def test_Field_coordinate(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: @@ -2204,12 +2152,16 @@ def test_Field_coordinate(self): "dimensioncoordinate1", ): if identity == "domainaxis2": - key = f.dimension_coordinates.filter_by_axis( - "and", identity - ).key() - c = f.dimension_coordinates.filter_by_axis( - "and", identity - ).value() + key = ( + f.dimension_coordinates(view=True) + .filter_by_axis("and", identity) + .key() + ) + c = ( + f.dimension_coordinates(view=True) + .filter_by_axis("and", identity) + .value() + ) else: key = f.construct_key(identity) c = f.construct(identity) @@ -2217,25 +2169,11 @@ def test_Field_coordinate(self): self.assertTrue(f.coordinate(identity).equals(c, verbose=2)) self.assertTrue(f.coordinate(identity, key=True) == key) - self.assertTrue(f.coord(identity).equals(c, verbose=2)) - self.assertTrue(f.coord(identity, key=True) == key) - identities = ["auxiliarycoordinate1", "dimensioncoordinate1"] - c = f.coordinates(*identities) - self.assertTrue(f.coords(*identities).equals(c, verbose=2)) - c = f.coordinates() - self.assertTrue(f.coords().equals(c, verbose=2)) - c = f.coordinates(identities[0]) - self.assertTrue(f.coords(identities[0]).equals(c, verbose=2)) - - self.assertIsNone(f.coord("long_name=qwerty:asd", None)) - self.assertTrue(len(f.coords("long_name=qwerty:asd")) == 0) - - with self.assertRaises(Exception): - f.coord("long_name:qwerty") - - with self.assertRaises(Exception): - f.coords("long_name:qwerty") + coordinates = f.coordinates(view=True) + coordinates(*identities) + coordinates() + coordinates(identities[0]) def test_Field_coordinate_reference(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: @@ -2259,9 +2197,6 @@ def test_Field_coordinate_reference(self): ) self.assertTrue(f.coordinate_reference(identity, key=True) == key) - self.assertTrue(f.ref(identity).equals(c, verbose=2)) - self.assertTrue(f.ref(identity, key=True) == key) - key = f.construct_key( "standard_name:atmosphere_hybrid_height_coordinate" ) @@ -2281,8 +2216,8 @@ def test_Field_coordinate_reference(self): # Delete self.assertIsNone(f.del_coordinate_reference("qwerty", default=None)) - self.assertTrue(len(f.coordinate_references) == 2) - self.assertTrue(len(f.domain_ancillaries) == 3) + self.assertTrue(len(f.coordinate_references(view=True)) == 2) + self.assertTrue(len(f.domain_ancillaries(view=True)) == 3) c = f.coordinate_reference( "standard_name:atmosphere_hybrid_height_coordinate" ) @@ -2290,13 +2225,13 @@ def test_Field_coordinate_reference(self): "standard_name:atmosphere_hybrid_height_coordinate" ) self.assertTrue(cr.equals(c, verbose=2)) - self.assertTrue(len(f.coordinate_references) == 1) - self.assertTrue(len(f.domain_ancillaries) == 0) + self.assertTrue(len(f.coordinate_references(view=True)) == 1) + self.assertTrue(len(f.domain_ancillaries(view=True)) == 0) f.del_coordinate_reference( "grid_mapping_name:rotated_latitude_longitude" ) - self.assertTrue(len(f.coordinate_references) == 0) + self.assertTrue(len(f.coordinate_references(view=True)) == 0) # Set f = self.f.copy() @@ -2309,7 +2244,7 @@ def test_Field_coordinate_reference(self): "grid_mapping_name:rotated_latitude_longitude" ) f.set_coordinate_reference(cr, field=g) - self.assertTrue(len(f.coordinate_references) == 1) + self.assertTrue(len(f.coordinate_references(view=True)) == 1) cr = g.coordinate_reference( "standard_name:atmosphere_hybrid_height_coordinate" @@ -2319,8 +2254,8 @@ def test_Field_coordinate_reference(self): "foo", "domainancillary99" ) f.set_coordinate_reference(cr, field=g) - self.assertTrue(len(f.coordinate_references) == 2) - self.assertTrue(len(f.domain_ancillaries) == 3) + self.assertTrue(len(f.coordinate_references(view=True)) == 2) + self.assertTrue(len(f.domain_ancillaries(view=True)) == 3) f.del_construct("coordinatereference0") f.del_construct("coordinatereference1") @@ -2329,7 +2264,7 @@ def test_Field_coordinate_reference(self): "grid_mapping_name:rotated_latitude_longitude" ) f.set_coordinate_reference(cr) - self.assertTrue(len(f.coordinate_references) == 1) + self.assertTrue(len(f.coordinate_references(view=True)) == 1) def test_Field_dimension_coordinate(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: @@ -2344,12 +2279,16 @@ def test_Field_dimension_coordinate(self): "dimensioncoordinate1", ): if identity == "domainaxis2": - key = f.dimension_coordinates.filter_by_axis( - "and", identity - ).key() - c = f.dimension_coordinates.filter_by_axis( - "and", identity - ).value() + key = ( + f.dimension_coordinates(view=True) + .filter_by_axis("and", identity) + .key() + ) + c = ( + f.dimension_coordinates(view=True) + .filter_by_axis("and", identity) + .value() + ) elif identity == "X": key = f.construct_key("grid_longitude") c = f.construct("grid_longitude") @@ -2362,16 +2301,11 @@ def test_Field_dimension_coordinate(self): ) self.assertTrue(f.dimension_coordinate(identity, key=True) == key) - self.assertTrue(f.dim(identity).equals(c, verbose=2)) - self.assertTrue(f.dim(identity, key=True) == key) - identities = ["grid_latitude", "X"] - c = f.dimension_coordinates(*identities) - self.assertTrue(f.dims(*identities).equals(c, verbose=2)) - c = f.dimension_coordinates() - self.assertTrue(f.dims().equals(c, verbose=2)) - c = f.dimension_coordinates(identities[0]) - self.assertTrue(f.dims(identities[0]).equals(c, verbose=2)) + dimension_coordinates = f.dimension_coordinates(view=True) + dimension_coordinates(*identities) + dimension_coordinates() + dimension_coordinates(identities[0]) self.assertIsNone(f.dim("long_name=qwerty:asd", None)) self.assertTrue(len(f.dims("long_name=qwerty:asd")) == 0) @@ -2437,26 +2371,6 @@ def test_Field_domain_ancillary(self): self.assertTrue(f.domain_ancillary(identity).equals(c, verbose=2)) self.assertTrue(f.domain_ancillary(identity, key=True) == key) - self.assertTrue(f.domain_anc(identity).equals(c, verbose=2)) - self.assertTrue(f.domain_anc(identity, key=True) == key) - - identities = ["surface_altitude", "key%domainancillary1"] - c = f.domain_ancillaries(*identities) - self.assertTrue(f.domain_ancs(*identities).equals(c, verbose=2)) - c = f.domain_ancillaries() - self.assertTrue(f.domain_ancs().equals(c, verbose=2)) - c = f.domain_ancillaries(identities[0]) - self.assertTrue(f.domain_ancs(identities[0]).equals(c, verbose=2)) - - self.assertIsNone(f.domain_anc("long_name=qwerty:asd", None)) - self.assertTrue(len(f.domain_ancs("long_name=qwerty:asd")) == 0) - - with self.assertRaises(Exception): - f.domain_anc("long_name:qwerty") - - with self.assertRaises(Exception): - f.domain_ancs("long_name:qwerty") - def test_Field_field_ancillary(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: return @@ -2470,29 +2384,11 @@ def test_Field_field_ancillary(self): self.assertTrue(f.field_ancillary(identity).equals(c, verbose=2)) self.assertTrue(f.field_ancillary(identity, key=True) == key) - self.assertTrue(f.field_anc(identity).equals(c, verbose=2)) - self.assertTrue(f.field_anc(identity, key=True) == key) - - self.assertTrue(len(f.field_ancs()) == 4) - self.assertTrue(len(f.field_ancs("ancillary0")) == 1) - self.assertTrue(len(f.field_ancs(*["ancillary0", "ancillary1"])) == 2) - identities = ["ancillary1", "ancillary3"] - c = f.field_ancillaries(*identities) - self.assertTrue(f.field_ancs(*identities).equals(c, verbose=2)) - c = f.field_ancillaries() - self.assertTrue(f.field_ancs().equals(c, verbose=2)) - c = f.field_ancillaries(identities[0]) - self.assertTrue(f.field_ancs(identities[0]).equals(c, verbose=2)) - - self.assertIsNone(f.field_anc("long_name=qwerty:asd", None)) - self.assertTrue(len(f.field_ancs("long_name=qwerty:asd")) == 0) - - with self.assertRaises(Exception): - f.field_anc("long_name:qwerty") - - with self.assertRaises(Exception): - f.field_ancs("long_name:qwerty") + field_ancillaries = f.field_ancillaries(view=True) + field_ancillaries(*identities) + field_ancillaries() + field_ancillaries(identities[0]) def test_Field_transpose(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: @@ -2510,8 +2406,6 @@ def test_Field_transpose(self): f = self.f.copy() h = f.transpose((1, 2, 0)) - # h0 = h.transpose( - # (re.compile('^atmos'), 'grid_latitude', 'grid_longitude')) h0 = h.transpose((re.compile("^atmos"), "grid_latitude", "X")) h.transpose((2, 0, 1), inplace=True) @@ -2661,9 +2555,6 @@ def test_Field_percentile(self): # for every possible axis combo (see also test_Data_percentile). -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_FieldAncillary.py b/cf/test/test_FieldAncillary.py index 91e76e97a6..602df3a886 100644 --- a/cf/test/test_FieldAncillary.py +++ b/cf/test/test_FieldAncillary.py @@ -24,12 +24,12 @@ def test_FieldAncillary(self): def test_FieldAncillary_source(self): f = cf.read(self.filename)[0] - a = f.auxiliary_coordinates("latitude").value() + a = f.auxiliary_coordinates(view=True)("latitude").value() cf.FieldAncillary(source=a) def test_FieldAncillary_properties(self): f = cf.read(self.filename)[0] - x = f.domain_ancillaries("ncvar%a").value() + x = f.domain_ancillaries(view=True)("ncvar%a").value() x.set_property("long_name", "qwerty") @@ -40,7 +40,7 @@ def test_FieldAncillary_properties(self): def test_FieldAncillary_insert_dimension(self): f = cf.read(self.filename)[0] - d = f.dimension_coordinates("grid_longitude").value() + d = f.dimension_coordinates(view=True)("grid_longitude").value() x = cf.FieldAncillary(source=d) self.assertEqual(x.shape, (9,)) @@ -53,7 +53,7 @@ def test_FieldAncillary_insert_dimension(self): def test_FieldAncillary_transpose(self): f = cf.read(self.filename)[0] - a = f.auxiliary_coordinates("longitude").value() + a = f.auxiliary_coordinates(view=True)("longitude").value() x = cf.FieldAncillary(source=a) self.assertEqual(x.shape, (9, 10)) @@ -66,7 +66,7 @@ def test_FieldAncillary_transpose(self): def test_FieldAncillary_squeeze(self): f = cf.read(self.filename)[0] - a = f.auxiliary_coordinates("longitude").value() + a = f.auxiliary_coordinates(view=True)("longitude").value() x = cf.FieldAncillary(source=a) x.insert_dimension(1, inplace=True) @@ -81,9 +81,6 @@ def test_FieldAncillary_squeeze(self): self.assertEqual(x.shape, (1, 9, 10)) -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_FieldList.py b/cf/test/test_FieldList.py index fd632d2559..8bdac3e9bb 100644 --- a/cf/test/test_FieldList.py +++ b/cf/test/test_FieldList.py @@ -25,8 +25,8 @@ def test_FieldList(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: return - _ = cf.FieldList(self.x) - _ = cf.FieldList([self.x]) + cf.FieldList(self.x) + cf.FieldList([self.x]) def test_FieldList__add__iadd__(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: @@ -531,13 +531,10 @@ def test_FieldList_index(self): self.assertEqual(g.index(c, start=-1), 2) with self.assertRaises(Exception): - _ = g.index(f) + g.index(f) with self.assertRaises(Exception): - _ = g.index(a, start=1) - - -# --- End: class + g.index(a, start=1) if __name__ == "__main__": diff --git a/cf/test/test_Index.py b/cf/test/test_Index.py index ad4dae1f85..a01db799f3 100644 --- a/cf/test/test_Index.py +++ b/cf/test/test_Index.py @@ -27,14 +27,11 @@ def test_Index__repr__str__dump(self): index = f.data.get_index() - _ = repr(index) - _ = str(index) + repr(index) + str(index) self.assertIsInstance(index.dump(display=False), str) -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_List.py b/cf/test/test_List.py index 7e1322b7fa..d13f7e4790 100644 --- a/cf/test/test_List.py +++ b/cf/test/test_List.py @@ -27,14 +27,11 @@ def test_List__repr__str__dump(self): list_ = f.data.get_list() - _ = repr(list_) - _ = str(list_) + repr(list_) + str(list_) self.assertIsInstance(list_.dump(display=False), str) -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_Partition.py b/cf/test/test_Partition.py index 4486937924..705d56873d 100644 --- a/cf/test/test_Partition.py +++ b/cf/test/test_Partition.py @@ -23,9 +23,6 @@ def test_Partition(self): return -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_Query.py b/cf/test/test_Query.py index e55b2c6659..4a46c2e29f 100644 --- a/cf/test/test_Query.py +++ b/cf/test/test_Query.py @@ -499,7 +499,6 @@ def test_Query_evaluate(self): self.assertNotEqual(cf.wi(8, 11), x) self.assertNotEqual(cf.wo(3, 8), x) self.assertNotEqual(cf.set([3, 8, 11]), x) - # --- End: for c = cf.wi(2, 4) d = cf.wi(6, 8) @@ -549,9 +548,6 @@ def test_Query_evaluate(self): self.assertNotEqual(x, cf.eq(re.compile("^.*RTY$"))) -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_Regrid.py b/cf/test/test_Regrid.py index c0edf2a5d5..6e70222eb0 100644 --- a/cf/test/test_Regrid.py +++ b/cf/test/test_Regrid.py @@ -113,7 +113,6 @@ def test_Field_regrids(self): chunksize ), ) - # --- End: for f6 = cf.read(self.filename6)[0] with self.assertRaises(Exception): @@ -183,9 +182,6 @@ def test_Field_regridc(self): ) -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_TimeDuration.py b/cf/test/test_TimeDuration.py index a97038c6f8..cda948224a 100644 --- a/cf/test/test_TimeDuration.py +++ b/cf/test/test_TimeDuration.py @@ -520,8 +520,6 @@ def test_Timeduration__days_in_month(self): ) -# --- End: class - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_aggregate.py b/cf/test/test_aggregate.py index 247e5808c4..9c4cab64ad 100644 --- a/cf/test/test_aggregate.py +++ b/cf/test/test_aggregate.py @@ -237,9 +237,6 @@ def test_aggregate_verbosity(self): ) -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_cfa.py b/cf/test/test_cfa.py index 0399cb0cbf..8344b91b83 100644 --- a/cf/test/test_cfa.py +++ b/cf/test/test_cfa.py @@ -52,8 +52,6 @@ def test_cfa(self): # else: (passes by default) -# --- End: class - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_collapse.py b/cf/test/test_collapse.py index 6fff5af7d9..02cd9c754d 100644 --- a/cf/test/test_collapse.py +++ b/cf/test/test_collapse.py @@ -134,7 +134,7 @@ def test_Field_collapse_CLIMATOLOGICAL_TIME(self): print(g.constructs) self.assertEqual(list(g.shape), expected_shape) - for key in f.cell_methods: + for key in f.cell_methods(view=True): f.del_construct(key) g = f.collapse( @@ -316,7 +316,6 @@ def test_Field_collapse(self): a = a.min(axis=0) self.assertTrue(numpy.allclose(a, g.array[m % 12])) - # --- End: for g = f.collapse("T: mean", group=360) @@ -340,7 +339,6 @@ def test_Field_collapse(self): group.offset.day, "{}!={}, group={}".format(bound.day, group.offset.day, group), ) - # --- End: for # for group in (cf.D(30), # cf.D(30, month=12), @@ -667,8 +665,6 @@ def test_Field_collapse_GROUPS(self): # g = f.collapse('T: mean', group=cf.M(5, month= 3), # group_contiguous=2) -# --- End: class - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) diff --git a/cf/test/test_decorators.py b/cf/test/test_decorators.py index 2512025749..4662a6e372 100644 --- a/cf/test/test_decorators.py +++ b/cf/test/test_decorators.py @@ -69,9 +69,6 @@ def decorated_logging_func(self): logger.warning(self.warning_message) -# --- End: class - - class DecoratorsTest(unittest.TestCase): """Test decorators module. @@ -218,9 +215,6 @@ def test_manage_log_level_via_verbose_attr(self): self.assertNotIn(msg, catch.output) -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_dsg.py b/cf/test/test_dsg.py index 7628c0d608..7181163584 100644 --- a/cf/test/test_dsg.py +++ b/cf/test/test_dsg.py @@ -27,7 +27,6 @@ def _remove_tmpfiles(): os.remove(f) except OSError: pass - # --- End: for atexit.register(_remove_tmpfiles) @@ -361,9 +360,6 @@ def test_DSG_create_contiguous(self): ) -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) print(cf.environment()) diff --git a/cf/test/test_external.py b/cf/test/test_external.py index 49378619f7..519e2483d1 100644 --- a/cf/test/test_external.py +++ b/cf/test/test_external.py @@ -221,9 +221,6 @@ def test_EXTERNAL_AGGREGATE(self): self.assertFalse(f_lon_thirds[1].cell_measures()) -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) print(cf.environment()) diff --git a/cf/test/test_formula_terms.py b/cf/test/test_formula_terms.py index ff557d7a2a..15c5ca1324 100644 --- a/cf/test/test_formula_terms.py +++ b/cf/test/test_formula_terms.py @@ -899,9 +899,6 @@ def test_compute_vertical_coordinates(self): ) -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_functions.py b/cf/test/test_functions.py index aa7920ee4b..2fe7247014 100644 --- a/cf/test/test_functions.py +++ b/cf/test/test_functions.py @@ -139,7 +139,6 @@ def test_configuration(self): ) else: self.assertEqual(post_set[name], val) - # --- End: for # Test the setting of more than one, but not all, items # simultaneously: @@ -164,7 +163,6 @@ def test_configuration(self): self.assertAlmostEqual(post_set[name], val, places=8) else: self.assertEqual(post_set[name], val) - # --- End: for # Test edge cases & invalid inputs... # ... 1. Falsy value inputs on some representative items: @@ -275,7 +273,6 @@ def test_context_managers(self): with self.assertRaises(AttributeError): with org: pass - # --- End: with # bounds_combination_mode func = cf.bounds_combination_mode @@ -293,7 +290,6 @@ def test_context_managers(self): with self.assertRaises(AttributeError): with org: pass - # --- End: with # Full configuration func = cf.configuration @@ -348,9 +344,6 @@ def test_environment(self): self.assertIn(component, ep) -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_gathering.py b/cf/test/test_gathering.py index b05d7f6823..1d99aabb10 100644 --- a/cf/test/test_gathering.py +++ b/cf/test/test_gathering.py @@ -28,7 +28,6 @@ def _remove_tmpfiles(): os.remove(f) except OSError: pass - # --- End: for atexit.register(_remove_tmpfiles) @@ -423,9 +422,6 @@ def test_GATHERING_create(self): ) -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) print(cf.environment()) diff --git a/cf/test/test_general.py b/cf/test/test_general.py index fbf169caac..c84652bca2 100644 --- a/cf/test/test_general.py +++ b/cf/test/test_general.py @@ -212,8 +212,6 @@ def test_GENERAL(self): cf.chunksize(original_chunksize) -# --- End: class - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_geometry.py b/cf/test/test_geometry.py index 1044214afa..29e007d72c 100644 --- a/cf/test/test_geometry.py +++ b/cf/test/test_geometry.py @@ -58,7 +58,7 @@ def test_node_count(self): self.assertEqual(len(f), 2, "f = " + repr(f)) for g in f: self.assertTrue(g.equals(g.copy(), verbose=2)) - self.assertEqual(len(g.auxiliary_coordinates), 2) + self.assertEqual(len(g.auxiliary_coordinates(view=True)), 2) g = f[0] for axis in ("X", "Y"): @@ -106,7 +106,7 @@ def test_geometry_2(self): for g in f: self.assertTrue(g.equals(g.copy(), verbose=2)) - self.assertEqual(len(g.auxiliary_coordinates), 3) + self.assertEqual(len(g.auxiliary_coordinates(view=True)), 3) g = f[0] for axis in ("X", "Y", "Z"): @@ -143,7 +143,7 @@ def test_geometry_3(self): for g in f: self.assertTrue(g.equals(g.copy(), verbose=2)) - self.assertEqual(len(g.auxiliary_coordinates), 3) + self.assertEqual(len(g.auxiliary_coordinates(view=True)), 3) g = f[0] for axis in ("X", "Y", "Z"): @@ -171,7 +171,7 @@ def test_geometry_4(self): for g in f: self.assertTrue(g.equals(g.copy(), verbose=2)) - self.assertEqual(len(g.auxiliary_coordinates), 3) + self.assertEqual(len(g.auxiliary_coordinates(view=True)), 3) for axis in ("X", "Y"): coord = g.construct("axis=" + axis) @@ -211,7 +211,7 @@ def test_geometry_interior_ring(self): for g in f: self.assertTrue(g.equals(g.copy(), verbose=2)) - self.assertEqual(len(g.auxiliary_coordinates), 4) + self.assertEqual(len(g.auxiliary_coordinates(view=True)), 4) g = f[0] for axis in ("X", "Y"): @@ -353,9 +353,6 @@ def test_geometry_interior_ring_files(self): self.assertTrue(isinstance(f.get_filenames(), set)) -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_groups.py b/cf/test/test_groups.py index 8901247c39..0900222a5a 100644 --- a/cf/test/test_groups.py +++ b/cf/test/test_groups.py @@ -368,12 +368,11 @@ def test_groups_dimension(self): # ------------------------------------------------------------ # Move all coordinate bounds constructs to the /forecast group # ------------------------------------------------------------ - for construct in g.coordinates.values(): + for construct in g.coordinates(view=True).values(): try: construct.bounds.nc_set_variable_groups(["forecast"]) except ValueError: pass - # --- End: for cf.write(g, grouped_file, verbose=1) @@ -411,9 +410,6 @@ def test_groups_dimension(self): self.assertTrue(f.equals(h, verbose=3)) -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_pp.py b/cf/test/test_pp.py index 2d3fce5113..ba8da09918 100644 --- a/cf/test/test_pp.py +++ b/cf/test/test_pp.py @@ -95,9 +95,6 @@ def test_PP_WGDOS_UNPACKING(self): ) -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_read_write.py b/cf/test/test_read_write.py index 43cff5497d..3e7a43b895 100644 --- a/cf/test/test_read_write.py +++ b/cf/test/test_read_write.py @@ -37,7 +37,6 @@ def _remove_tmpfiles(): os.remove(f) except OSError: pass - # --- End: for atexit.register(_remove_tmpfiles) @@ -157,7 +156,6 @@ def test_read_directory(self): os.symlink(pwd + f, pwd + subdir + "/" + f) except FileExistsError: pass - # --- End: for f = cf.read(dir, aggregate=False) self.assertEqual(len(f), 1, f) @@ -316,7 +314,7 @@ def test_read_write_netCDF4_compress_shuffle(self): "Bad read/write with lossless compression: " "{0}, {1}, {2}".format(fmt, compress, shuffle), ) - # --- End: for + cf.chunksize(self.original_chunksize) def test_write_datatype(self): @@ -370,7 +368,6 @@ def test_write_datatype(self): for double in (True, False): with self.assertRaises(Exception): _ = cf.write(g, double=double, single=single) - # --- End: for datatype = {numpy.dtype(float): numpy.dtype("float32")} with self.assertRaises(Exception): @@ -412,7 +409,7 @@ def test_write_reference_datetime(self): + repr(reference_datetime) ), ) - # --- End: for + cf.chunksize(self.original_chunksize) def test_read_write_unlimited(self): @@ -421,22 +418,26 @@ def test_read_write_unlimited(self): for fmt in ("NETCDF4", "NETCDF3_CLASSIC"): f = cf.read(self.filename)[0] + domain_axes = f.domain_axes(view=True) - f.domain_axes["domainaxis0"].nc_set_unlimited(True) + domain_axes["domainaxis0"].nc_set_unlimited(True) cf.write(f, tmpfile, fmt=fmt) f = cf.read(tmpfile)[0] - self.assertTrue(f.domain_axes["domainaxis0"].nc_is_unlimited()) + domain_axes = f.domain_axes(view=True) + self.assertTrue(domain_axes["domainaxis0"].nc_is_unlimited()) fmt = "NETCDF4" f = cf.read(self.filename)[0] - f.domain_axes["domainaxis0"].nc_set_unlimited(True) - f.domain_axes["domainaxis2"].nc_set_unlimited(True) + domain_axes = f.domain_axes(view=True) + domain_axes["domainaxis0"].nc_set_unlimited(True) + domain_axes["domainaxis2"].nc_set_unlimited(True) cf.write(f, tmpfile, fmt=fmt) f = cf.read(tmpfile)[0] - self.assertTrue(f.domain_axes["domainaxis0"].nc_is_unlimited()) - self.assertTrue(f.domain_axes["domainaxis2"].nc_is_unlimited()) + domain_axes = f.domain_axes(view=True) + self.assertTrue(domain_axes["domainaxis0"].nc_is_unlimited()) + self.assertTrue(domain_axes["domainaxis2"].nc_is_unlimited()) def test_read_pp(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: @@ -528,7 +529,6 @@ def test_read_write_string(self): for i, j in zip(cf.read(tmpfile1), cf.read(tmpfile0)): self.assertTrue(i.equals(j, verbose=1)) - # --- End: for def test_read_broken_bounds(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: @@ -538,9 +538,6 @@ def test_read_broken_bounds(self): self.assertEqual(len(f), 2) -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_style.py b/cf/test/test_style.py index 587f190bbf..e5ed4e589d 100644 --- a/cf/test/test_style.py +++ b/cf/test/test_style.py @@ -82,9 +82,6 @@ def test_pep8_compliance(self): ) -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() From b8fa815a41a8dab840f39f9bedccadce44e02c8c Mon Sep 17 00:00:00 2001 From: David Hassell Date: Thu, 25 Mar 2021 23:35:01 +0000 Subject: [PATCH 06/53] tests pass --- cf/aggregate.py | 4 +- cf/cellmeasure.py | 25 +- cf/cellmethod.py | 16 +- cf/constructs.py | 5 +- cf/domain.py | 3 - cf/field.py | 438 ++++++++++++++-------------- cf/mixin/coordinate.py | 233 ++++++++------- cf/test/test_AuxiliaryCoordinate.py | 2 +- cf/test/test_Field.py | 14 +- 9 files changed, 381 insertions(+), 359 deletions(-) diff --git a/cf/aggregate.py b/cf/aggregate.py index 99509ddd78..67a4d998a3 100644 --- a/cf/aggregate.py +++ b/cf/aggregate.py @@ -334,7 +334,7 @@ def __init__( # dim_coord = item(axis) dim_coords = f.dimension_coordinates(view=True).filter_by_axis( - "and", axis + axis, mode="and", view=True ) dim_coord = dim_coords.value(None) dim_coord_key = dim_coords.key(None) @@ -2202,7 +2202,7 @@ def _create_hash_and_first_values( # Still here? dim_coord = m.field.dimension_coordinates( view=True - ).filter_by_axis("and", axis) + ).filter_by_axis(axis, mode="and", view=True) # Find the sort indices for this axis ... if dim_coord is not None: diff --git a/cf/cellmeasure.py b/cf/cellmeasure.py index 348e995736..7b354bab44 100644 --- a/cf/cellmeasure.py +++ b/cf/cellmeasure.py @@ -144,47 +144,42 @@ def identity( n = self.nc_get_variable(None) if n is not None: - return "ncvar%{0}".format(n) + return f"ncvar%{n}" return default n = self.get_measure(default=None) if n is not None: - return "measure:{0}".format(n) + return f"measure:{n}" n = self.get_property("standard_name", None) if n is not None: - return "{0}".format(n) + return f"{n}" n = getattr(self, "id", None) if n is not None: - return "id%{0}".format(n) + return f"id%{n}" if relaxed: n = self.get_property("long_name", None) if n is not None: - return "long_name={0}".format(n) + return f"long_name={n}" n = self.nc_get_variable(None) if n is not None: - return "ncvar%{0}".format(n) + return f"ncvar%{n}" return default if strict: return default - for prop in ("long_name",): - n = self.get_property(prop, None) - if n is not None: - return "{0}={1}".format(prop, n) - # --- End: for + n = self.get_property("long_name", None) + if n is not None: + return f"long_name={n}" n = self.nc_get_variable(None) if n is not None: - return "ncvar%{0}".format(n) + return f"ncvar%{n}" return default - - -# --- End: class diff --git a/cf/cellmethod.py b/cf/cellmethod.py index c1b4769dd6..00fe2fd1be 100644 --- a/cf/cellmethod.py +++ b/cf/cellmethod.py @@ -128,7 +128,7 @@ def create(cls, cell_methods_string=None): axis = cell_methods.pop(0)[:-1] axes.append(axis) - # --- End: while + cm.set_axes(axes) if not cell_methods: @@ -149,7 +149,7 @@ def create(cls, cell_methods_string=None): cm.set_qualifier(attr, cell_methods.pop(0)) if not cell_methods: break - # --- End: while + if not cell_methods: out.append(cm) break @@ -190,7 +190,6 @@ def create(cls, cell_methods_string=None): intervals.append(data) continue - # --- End: if if term == "comment": comment = [] @@ -200,13 +199,11 @@ def create(cls, cell_methods_string=None): if cell_methods[0].endswith(":"): break comment.append(cell_methods.pop(0)) - # --- End: while + cm.set_qualifier("comment", " ".join(comment)) - # --- End: while if cell_methods[0].endswith(")"): cell_methods.pop(0) - # --- End: if n_intervals = len(intervals) if n_intervals > 1 and n_intervals != len(axes): @@ -220,7 +217,6 @@ def create(cls, cell_methods_string=None): cm.set_qualifier("interval", intervals) out.append(cm) - # --- End: while return out @@ -448,7 +444,6 @@ def intervals(self, value): raise ValueError( "Unparseable interval: {0!r}".format(interval) ) - # --- End: if if d.size != 1: raise ValueError( @@ -459,7 +454,6 @@ def intervals(self, value): d.squeeze(inplace=True) values.append(d) - # --- End: for self.set_qualifier("interval", tuple(values)) @@ -644,7 +638,6 @@ def equivalent( ) ) # pragma: no cover return False - # --- End: if # Still here? Then they are equivalent return True @@ -686,6 +679,3 @@ def remove_axes(self, axes): _DEPRECATION_ERROR_METHOD( self, "remove_axes", "Use method 'del_axes' instead." ) # pragma: no cover - - -# --- End: class diff --git a/cf/constructs.py b/cf/constructs.py index c640c49fa3..93ed302f3e 100644 --- a/cf/constructs.py +++ b/cf/constructs.py @@ -242,12 +242,13 @@ def filter_by_identity(self, *identities, view=False, **kwargs): """ # Allow keys without the 'key%' prefix - identities = list(identities) for n, identity in enumerate(identities): if identity in self: + identities = list(identities) identities[n] = "key%" + identity + break - ctype = [i for i in "XYZT" if i in identities] + ctype = [i for i in "XTYZ" if i in identities] return super().filter_by_identity( *identities, view=view, ctype=ctype, **kwargs diff --git a/cf/domain.py b/cf/domain.py index b906a44011..a800904c05 100644 --- a/cf/domain.py +++ b/cf/domain.py @@ -23,6 +23,3 @@ def __repr__(self): """ return super().__repr__().replace("<", "= field1.ndim. # ============================================================ - logger.debug( - "3: repr(field0) = {!r}".format(field0) - ) # pragma: no cover - logger.debug( - "3: repr(field1) = {!r}".format(field1) - ) # pragma: no cover - new_data0 = field0.data._binary_operation(field1.data, method) # new_data0 = super(Field, field0)._binary_operation( # field1, method).data @@ -1498,9 +1473,6 @@ def _binary_operation_old(self, other, method): logger.debug( "3: field0.shape = {}".format(field0.data.shape) ) # pragma: no cover - logger.debug( - "3: repr(field0) = {!r}".format(field0) - ) # pragma: no cover # ============================================================ # 4. Adjust the domain of field0 to accommodate its new data @@ -1540,16 +1512,16 @@ def _binary_operation_old(self, other, method): refs0 = dict(field0.coordinate_references) refs1 = dict(field1.coordinate_references) - field1_dimension_coordinates = field1.dimension_coordinates(view=True) - field1_auxiliary_coordinates = field1.auxiliary_coordinates(view=True) - field1_coordinate_references = field1.coordinate_references(view=True) - field1_domain_ancillaries = field1_domain_ancillaries(view=True) - field1_domain_axes = field1.domain_axes(view=True) + field1_dimension_coordinates = field1.dimension_coordinates(_dict=True) + field1_auxiliary_coordinates = field1.auxiliary_coordinates(_dict=True) + field1_coordinate_references = field1.coordinate_references(_dict=True) + field1_domain_ancillaries = field1_domain_ancillaries(_dict=True) + field1_domain_axes = field1.domain_axes(_dict=True) # field0_auxiliary_coordinates = field0.auxiliary_coordinates(view=True) - field0_domain_ancillaries = field0_domain_ancillaries(view=True) + field0_domain_ancillaries = field0_domain_ancillaries(_dict=True) c = field0.constructs.filter_by_type( - "auxiliary_coordinate", "domain_ancillary", view=True + "auxiliary_coordinate", "domain_ancillary", _dict=True ) for axis0 in s["size1_broadcast_axes"] + s["new_size1_axes"]: @@ -1563,7 +1535,7 @@ def _binary_operation_old(self, other, method): insert_dim[axis1] = [axis0] for key1 in field1_auxiliary_coordinates.filter_by_axis( - "exact", axis1 + axis1, mode="exact", view=True ): insert_aux[key1] = [axis0] @@ -1577,7 +1549,7 @@ def _binary_operation_old(self, other, method): # Remove all field0 auxiliary coordinates and domain # ancillaries which span this axis - remove_items.update(c.filter_by_axis("and", axis0)) + remove_items.update(c.filter_by_axis(axis0, mode="and", view=True)) # Remove all field0 coordinate references which span this # axis, and their domain ancillaries (even if those domain @@ -1591,7 +1563,7 @@ def _binary_operation_old(self, other, method): *tuple( ref0.coordinate_conversion.domain_ancillaries().values() ), - view=True, + _dict=True, ) ) @@ -1610,8 +1582,7 @@ def _binary_operation_old(self, other, method): # spanning the same axes which has the same identity and a # size-1 data array. # ------------------------------------------------------------- - auxs1 = dict(field1_auxiliary_coordinates) - logger.debug("5: field1.auxs() = {}".format(auxs1)) # pragma: no cover + auxs1 = dict(field1_auxiliary_coordinates.items()) logger.debug( "5: remove_items = {}".format(remove_items) ) # pragma: no cover @@ -1797,7 +1768,7 @@ def _binary_operation_old(self, other, method): "dimension_coordinate", "axuiliary_coordinate", "domain_ancillary", - view=True, + _dict=True, ) ) for key1, item1 in identity_map.copy().items(): @@ -1936,13 +1907,15 @@ def _binary_operation(self, other, method): f_dimension_coordinates = f.dimension_coordinates(view=True) f_auxiliary_coordinates = f.auxiliary_coordinates(view=True) - for axis in f.domain_axes(view=True): + for axis in f.domain_axes(_dict=True): identity = None key = None coord = None coord_type = None - coords = f_dimension_coordinates.filter_by_axis("exact", axis) + coords = f_dimension_coordinates.filter_by_axis( + axis, mode="exact", view=True + ) if len(coords) == 1: # This axis of the domain has a dimension coordinate key = coords.key() @@ -1961,7 +1934,7 @@ def _binary_operation(self, other, method): identity = coord.identity(relaxed=True, default=None) else: coords = f_auxiliary_coordinates.filter_by_axis( - "exact", axis + axis, mode="exact", view=True ) if len(coords) == 1: # This axis of the domain does not have a @@ -2108,8 +2081,6 @@ def _binary_operation(self, other, method): } logger.info("\naxis_map= {}\n".format(axis_map)) - logger.info("{!r}".format(field0)) - logger.info("{!r}".format(field1)) # ------------------------------------------------------------ # Check that the two fields have compatible metadata @@ -2173,7 +2144,9 @@ def _binary_operation(self, other, method): a = out0[identity] if y.size > 1 and a.size == 1: for key0, c in tuple( - field0.constructs.filter_by_axis("or", a.axis).items() + field0.constructs.filter_by_axis( + a.axis, mode="or", view=True + ).items() ): removed_refs0 = field0.del_coordinate_reference( construct=key0, default=None @@ -2193,19 +2166,13 @@ def _binary_operation(self, other, method): axis0 = axis_map[y.axis] field0.domain_axis(axis0).set_size(y.size) - logger.info("\n{!r}".format(field0)) - logger.info("{!r}".format(field1)) - logger.info("{!r}".format(field0.data)) - logger.info("{!r}".format(field1.data)) - # ------------------------------------------------------------ # Operate on the data # ------------------------------------------------------------ new_data = field0.data._binary_operation(field1.data, method) field0.set_data(new_data, set_axes=False, copy=False) - logger.info("{}".format(field0)) - # logger.info("{}".format(field0.array)) + logger.info("\naxes_added_from_field1= {}\n", axes_added_from_field1) logger.info( "axes_to_replace_from_field1= {}", axes_to_replace_from_field1 @@ -2244,7 +2211,9 @@ def _binary_operation(self, other, method): "cell_measure", view=True, ) - constructs = constructs.filter_by_axis("subset", *new_axes) + constructs = constructs.filter_by_axis( + *new_axes, mode="subset", view=True + ) for key, c in constructs.items(): c_axes = field1.get_data_axes(key) axes = [axis_map[axis1] for axis1 in c_axes] @@ -2255,7 +2224,7 @@ def _binary_operation(self, other, method): # Copy over coordinate reference constructs from field1, # including their domain ancillary constructs. # ------------------------------------------------------------ - for key, ref in field1.coordinate_references(view=True).items(): + for key, ref in field1.coordinate_references(_dict=True).items(): axes = field1._coordinate_reference_axes(key) if axes.issubset(new_axes): refs_to_add_from_field1.append(ref) @@ -2374,7 +2343,7 @@ def _conform_coordinate_references(self, key, coordref=None): identity = self.constructs[key].identity(strict=True) if coordref is None: - refs = self.coordinate_references(view=True).values() + refs = self.coordinate_references(_dict=True).values() else: refs = [coordref] @@ -2424,7 +2393,7 @@ def _conform_cell_methods(self): """ axis_map = {} - for cm in self.cell_methods(view=True).values(): + for cm in self.cell_methods(_dict=True).values(): for axis in cm.get_axes(()): if axis in axis_map: continue @@ -2472,8 +2441,8 @@ def _equivalent_coordinate_references( `bool` """ - ref0 = self.coordinate_references(view=True)[key0] - ref1 = field1.coordinate_references(view=True)[key1] + ref0 = self.coordinate_references(_dict=True)[key0] + ref1 = field1.coordinate_references(_dict=True)[key1] if not ref0.equivalent(ref1, rtol=rtol, atol=atol, verbose=verbose): logger.info( @@ -2564,7 +2533,9 @@ def _set_construct_parse_axes(self, item, axes=None, allow_scalar=True): ) if axes_sizes.count(n) == 1: - axes.append(domain_axes.filter_by_size(n).key()) + axes.append( + domain_axes.filter_by_size(n, view=True).key() + ) else: raise ValueError( f"Can't insert {item!r}: Ambiguous shape: " @@ -2592,7 +2563,7 @@ def _set_construct_parse_axes(self, item, axes=None, allow_scalar=True): f"axes (got {len(set(axes))}, expected {ndim})" ) - domain_axes = self.domain_axes(view=True) + domain_axes = self.domain_axes(_dict=True) axes2 = [] for axis, size in zip(axes, item.data.shape): dakey = self.domain_axis( @@ -2680,7 +2651,7 @@ def _conform_for_assignment(self, other, check_coordinates=False): # then other.data becomes Y X T # ------------------------------------------------------------ squeeze_axes1 = [] - other_domain_axes = other.domain_axes(view=True) + other_domain_axes = other.domain_axes(_dict=True) for axis1 in v["undefined_axes"]: axis_size = other_domain_axes[axis1].get_size() @@ -3106,6 +3077,7 @@ def _regrid_get_latlong(self, name, axes=None): f"{name} field has multiple 'Y' dimension coordinates" ) + # TODO review for view/_dict. Is x_axis same as x_key? x = xdims.value() y = ydims.value() x_key = xdims.key() @@ -3135,8 +3107,12 @@ def _regrid_get_latlong(self, name, axes=None): auxiliary_coordinates = self.auxiliary_coordinates( view=True, cache=auxiliary_coordinates ) - x = auxiliary_coordinates("X", view=True).filter_by_naxes(2) - y = auxiliary_coordinates("Y", view=True).filter_by_naxes(2) + x = auxiliary_coordinates("X", view=True).filter_by_naxes( + 2, view=True + ) + y = auxiliary_coordinates("Y", view=True).filter_by_naxes( + 2, view=True + ) if len(x) != 1: raise ValueError("TODO") if len(y) != 1: @@ -3184,7 +3160,7 @@ def _regrid_get_latlong(self, name, axes=None): ), ) - domain_axes = self.domain_axes(view=True) + domain_axes = self.domain_axes(_dict=True) x_size = domain_axes[x_axis].get_size() y_size = domain_axes[y_axis].get_size() @@ -3205,7 +3181,9 @@ def _regrid_get_latlong(self, name, axes=None): view=True, cache=auxiliary_coordinates ) - for key, aux in auxiliary_coordinates.filter_by_naxes(2).items(): + for key, aux in auxiliary_coordinates.filter_by_naxes( + 2, view=True + ).items(): if aux.Units.islongitude: if lon_found: raise ValueError( @@ -3545,7 +3523,7 @@ def _regrid_get_reordered_sections( for axis in axis_order: # axis_key = self.dim(axis, key=True) axis_key = dimension_coordinates.filter_by_axis( - "exact", axis + axis, mode="exact", view=True ).key(None) if axis_key is not None: if axis_key in regrid_axes: @@ -3801,7 +3779,7 @@ def _regrid_update_coordinate_references( domain_axes = None domain_ancillaries = None - for key, ref in self.coordinate_references(view=True).items(): + for key, ref in self.coordinate_references(_dict=True).items(): ref_axes = [] for k in ref.coordinates(): ref_axes.extend(self.get_data_axes(k)) @@ -3814,6 +3792,7 @@ def _regrid_update_coordinate_references( term, value, ) in ref.coordinate_conversion.domain_ancillaries().items(): + # TODo review for view/_dict domain_ancillaries = self.domain_ancillaries( view=True, cache=domain_ancillaries ) @@ -3827,9 +3806,12 @@ def _regrid_update_coordinate_references( # if f.domain_anc(key, axes_all=('X', 'Y')):# v2 x = self.domain_axis("X", key=True) y = self.domain_axis("Y", key=True) - if domain_ancillaries.filter_by_key(key).filter_by_axis( - "exact", x, y - ): + # if domain_ancillaries.filter_by_key(key).filter_by_axis( + # x, y, mode="exact", view=True + # ): + if domain_ancillaries.filter_by_axis( + x, y, mode="exact", view=True + ).get(key): # Convert the domain ancillary into an independent # field value = self.convert(key) @@ -3863,7 +3845,7 @@ def _regrid_update_coordinate_references( d_axes = self.get_data_axes(key) domain_axes = self.domain_axes( - view=True, cache=domain_axes + _dict=True, cache=domain_axes ) for k_s, new_size in zip( @@ -3895,7 +3877,7 @@ def _regrid_copy_coordinate_references(self, dst, dst_axis_keys): `None` """ - for ref in dst.coordinate_references(view=True).values(): + for ref in dst.coordinate_references(_dict=True).values(): axes = set() for key in ref.coordinates(): axes.update(dst.get_data_axes(key)) @@ -3975,12 +3957,14 @@ def _regrid_update_coordinates( # Remove the source coordinates of new field # self.remove_items(axes=src_axis_keys) # for key in self.constructs.filter_by_axis('or', *src_axis_keys): - for key in self.coordinates(view=True).filter_by_axis( - "or", *src_axis_keys + for key in tuple( + self.coordinates(view=True) + .filter_by_axis(*src_axis_keys, mode="or", view=True) + .keys() ): self.del_construct(key) - domain_axes = self.domain_axes(view=True) + domain_axes = self.domain_axes(_dict=True) dst_auxiliary_coordinates = None if cartesian: @@ -4007,7 +3991,7 @@ def _regrid_update_coordinates( ) for aux_key, aux in dst_auxiliary_coordinates.filter_by_axis( - "subset", *dst_axis_keys + *dst_axis_keys, mode="subset", view=True ).items(): aux_axes = [ axis_map[k_d] for k_d in dst.get_data_axes(aux_key) @@ -4054,12 +4038,12 @@ def _regrid_update_coordinates( self.set_construct(dim_coord, axes=[src_axis_key]) for aux in dst_auxiliary_coordinates.filter_by_axis( - "exact", dst_axis_key + dst_axis_key, mode="exact", view=True ).values(): self.set_construct(aux, axes=[src_axis_key]) for aux_key, aux in dst_auxiliary_coordinates.filter_by_axis( - "exact", *dst_axis_keys + *dst_axis_keys, mode="exact", view=True ).items(): aux_axes = dst.get_data_axes(aux_key) if aux_axes == tuple(dst_axis_keys): @@ -4069,7 +4053,7 @@ def _regrid_update_coordinates( # Copy names of dimensions from destination to source field if not dst_dict: - dst_domain_axes = dst.domain_axes(view=True) + dst_domain_axes = dst.domain_axes(_dict=True) for src_axis_key, dst_axis_key in zip( src_axis_keys, dst_axis_keys ): @@ -4328,9 +4312,9 @@ def _weights_field(self, fields, comp, weights_axes, methods=False): t = w.analyse_items() if t["undefined_axes"]: - if set(t.domain_axes.filter_by_size(gt(1))).intersection( - t["undefined_axes"] - ): + if set( + t.domain_axes.filter_by_size(gt(1), view=True) + ).intersection(t["undefined_axes"]): raise ValueError("345jn456jn TODO") # TODO BUG: "t.domain_axes" @@ -4340,8 +4324,8 @@ def _weights_field(self, fields, comp, weights_axes, methods=False): axis1_to_axis0 = {} - coordinate_references = self.coordinate_references(view=True) - w_coordinate_references = w.coordinate_references(view=True) + coordinate_references = self.coordinate_references(_dict=True) + w_coordinate_references = w.coordinate_references(_dict=True) for axis1 in w.get_data_axes(): identity = t["axis_to_id"].get(axis1, None) @@ -5142,7 +5126,7 @@ def _weights_measure( `bool` """ - m = self.cell_measures(view=True).filter_by_measure(measure) + m = self.cell_measures(view=True).filter_by_measure(measure, view=True) len_m = len(m) if not len_m: @@ -5255,7 +5239,9 @@ def _weights_yyy( z_axis = None auxiliary_coordinates = self.auxiliary_coordinates(view=True) - auxiliary_coordinates = auxiliary_coordinates.filter_by_naxes(1) + auxiliary_coordinates = auxiliary_coordinates.filter_by_naxes( + 1, view=True + ) for key, aux in auxiliary_coordinates.items(): if aux.get_geometry(None) != geometry_type: @@ -5418,7 +5404,7 @@ def Flags(self): def ncdimensions(self): """""" out = {} - for dim, domain_axis in self.domain_axes(view=True).items(): + for dim, domain_axis in self.domain_axes(_dict=True).items(): ncdim = domain_axis.nc_get_dimension(None) if ncdim is not None: out[dim] = ncdim @@ -5458,7 +5444,7 @@ def rank(self): 4 """ - return len(self.domain_axes(view=True)) + return len(self.domain_axes(_dict=True)) @property def varray(self): @@ -6025,7 +6011,7 @@ def radius(self, default=None): """ radii = [] - for cr in self.coordinate_references(view=True).values(): + for cr in self.coordinate_references(_dict=True).values(): r = cr.datum.get_parameter("earth_radius", None) if r is not None: r = Data.asdata(r) @@ -6129,7 +6115,7 @@ def close(self): """ super().close() - for construct in self.constructs.filter_by_data(view=True).values(): + for construct in self.constructs.filter_by_data(_dict=True).values(): construct.close() def iscyclic(self, identity, **kwargs): @@ -6245,7 +6231,7 @@ def concatenate(cls, fields, axis=0, _preserve=True): # Concatenate constructs with data # ------------------------------------------------------------ for key, construct in field0.constructs.filter_by_data( - view=True + _dict=True ).items(): construct_axes = field0.get_data_axes(key) @@ -6356,9 +6342,7 @@ def cyclic(self, identity=None, iscyclic=True, period=None, **kwargs): """ if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, "cyclic", kwargs - ) # pragma: no cover + _DEPRECATION_ERROR_KWARGS(self, "cyclic", kwargs) # pragma: no cover data = self.get_data(None, _fill_value=False) if data is None: @@ -6761,7 +6745,7 @@ def weights( # coordinates pass - domain_axes = self.domain_axes(view=True) + domain_axes = self.domain_axes(_dict=True) for da_key in domain_axes: if self._weights_geometry_area( @@ -7093,23 +7077,27 @@ def weights( field.del_data() field.del_data_axes() - not_needed_axes = set(field.domain_axes(view=True)).difference( + not_needed_axes = set(field.domain_axes(_dict=True)).difference( weights_axes ) - for key in self.cell_methods(view=True): + for key in self.cell_methods(_dict=True).copy(): field.del_construct(key) - for key in self.field_ancillaries(view=True): + for key in self.field_ancillaries(_dict=True).copy(): field.del_construct(key) - for key in field.coordinate_references(view=True): + for key in field.coordinate_references(_dict=True).copy(): if field.coordinate_reference_domain_axes(key).intersection( not_needed_axes ): field.del_coordinate_reference(key) - for key in field.constructs.filter_by_axis("or", *not_needed_axes): + for key in tuple( + field.constructs.filter_by_axis( + *not_needed_axes, mode="or", view=True + ).keys() + ): field.del_construct(key) for key in not_needed_axes: @@ -7989,14 +7977,7 @@ def bin( digitized = (digitized,) for f in digitized[::-1]: - logger.info( - f" Digitized field input : {f!r}" # DCH - ) # pragma: no cover - f = self._conform_for_data_broadcasting(f) - logger.info( - f" conformed: {f!r}" # DCH - ) # pragma: no cover if not self._is_broadcastable(f.shape): raise ValueError( @@ -8152,7 +8133,9 @@ def bin( # Create a cell method (if possible) # ------------------------------------------------------------ standard_names = [] - domain_axes = self.domain_axes(view=True).filter_by_size(ge(2)) + domain_axes = self.domain_axes(view=True).filter_by_size( + ge(2), view=True + ) for da_key in domain_axes: dim = self.dimension_coordinate(da_key, default=None) @@ -8480,7 +8463,7 @@ def del_coordinate_reference( default, f"Can't identify construct from {construct!r}" ) - for key, ref in tuple(self.coordinate_references(view=True).items()): + for key, ref in tuple(self.coordinate_references(_dict=True).items()): if c_key in ref.coordinates(): self.del_coordinate_reference( key, construct=None, default=default @@ -8634,7 +8617,7 @@ def del_domain_axis( self.squeeze(dakey, inplace=True) for ckey, construct in self.constructs.filter_by_data( - view=True + _dict=True ).items(): data = construct.get_data(None, _fill_value=False) if data is None: @@ -8756,7 +8739,7 @@ def get_coordinate_reference( ) for cr_key, ref in tuple( - self.coordinate_references(view=True).items() + self.coordinate_references(_dict=True).items() ): if c_key in [ ref.coordinates(), @@ -8825,8 +8808,8 @@ def set_coordinate_reference( # Still here? ref = coordinate_reference.copy() - coordinates = field.coordinates(view=True) - domain_ancillaries = field.domain_ancillaries(view=True) + coordinates = field.coordinates(_dict=True) + domain_ancillaries = field.domain_ancillaries(_dict=True) ckeys = [] for value in coordinate_reference.coordinates(): @@ -10494,7 +10477,9 @@ def collapse( ): collapse_axes = collapse_axes_all_sizes.copy() else: - collapse_axes = collapse_axes_all_sizes.filter_by_size(gt(1)) + collapse_axes = collapse_axes_all_sizes.filter_by_size( + gt(1), view=True + ) logger.info( " collapse_axes = {}".format(collapse_axes) @@ -10506,7 +10491,7 @@ def collapse( # Create null bounds if requested for axis in axes: dc = dimension_coordinates.filter_by_axis( - "and", axis + axis, mode="and", view=True ).value(None) if dc is not None and not dc.has_bounds(): dc.set_bounds(dc.create_bounds(cellsize=0)) @@ -10768,7 +10753,9 @@ def collapse( c = f.constructs.filter_by_type( "cell_measure", "domain_ancillary", view=True ) - for key, value in c.filter_by_axis("or", axis).items(): + for key, value in tuple( + c.filter_by_axis(axis, mode="or", view=True).items() + ): logger.info( f" Removing {value.construct_type}" ) # pragma: no cover @@ -10777,8 +10764,10 @@ def collapse( # REMOVE all 2+ dimensional auxiliary coordinates # which span this axis - c = auxiliary_coordinates.filter_by_naxes(gt(1)) - for key, value in c.filter_by_axis("or", axis).items(): + c = auxiliary_coordinates.filter_by_naxes(gt(1), view=True) + for key, value in tuple( + c.filter_by_axis(axis, mode="or", view=True).items() + ): logger.info( f" Removing {value.construct_type} {key!r}" ) # pragma: no cover @@ -10793,9 +10782,11 @@ def collapse( # one-dimensional auxiliary coordinates which span # this axis and have the same values in their data # array and bounds. - for key, aux in auxiliary_coordinates.filter_by_axis( - "exact", axis - ).items(): + for key, aux in tuple( + auxiliary_coordinates.filter_by_axis( + axis, mode="exact", view=True + ).items() + ): logger.info(f"key = {key}") # pragma: no cover d = aux[0] @@ -10819,13 +10810,13 @@ def collapse( aux.bounds.set_data(d.bounds.data, copy=False) # Reset the axis size - f.domain_axes(view=True)[axis].set_size(1) + f.domain_axes(_dict=True)[axis].set_size(1) logger.info( f"Changing axis size to 1: {axis}" ) # pragma: no cover dim = dimension_coordinates.filter_by_axis( - "exact", axis + axis, mode="exact", view=True ).value(None) if dim is None: continue @@ -11468,7 +11459,7 @@ def _group_weights(weights, iaxis, index): ) # pragma: no cover # Size of uncollapsed axis - axis_size = self.domain_axes(view=True)[axis].get_size() + axis_size = self.domain_axes(_dict=True)[axis].get_size() # Integer position of collapse axis iaxis = self.get_data_axes().index(axis) @@ -11536,7 +11527,7 @@ def _group_weights(weights, iaxis, index): # ---------------------------------------------------- coord = ( self.dimension_coordinates(view=True) - .filter_by_axis("exact", axis) + .filter_by_axis(axis, mode="exact", view=True) .value(None) ) if coord is None: @@ -11571,7 +11562,7 @@ def _group_weights(weights, iaxis, index): # ---------------------------------------------------- coord = ( self.dimension_coordinates(view=True) - .filter_by_axis("exact", axis) + .filter_by_axis(axis, mode="exact", view=True) .value(None) ) if coord is None: @@ -11626,13 +11617,13 @@ def _group_weights(weights, iaxis, index): # ---------------------------------------------------- coord = ( self.dimension_coordinates(view=True) - .filter_by_axis("exact", axis) + .filter_by_axis(axis, mode="exact", view=True) .value(None) ) if coord is None: coord = ( self.auxiliary_coordinates(view=True) - .filter_by_axis("exact", axis) + .filter_by_axis(axis, mode="exact", view=True) .value(None) ) if coord is None: @@ -11665,7 +11656,7 @@ def _group_weights(weights, iaxis, index): # Over days # ---------------------------------------------------- coord = self.dimension_coordinates.filter_by_axis( - "exact", axis + axis, mode="exact", view=True ).value(None) if coord is None or not coord.Units.isreftime: raise ValueError( @@ -11784,7 +11775,7 @@ def _group_weights(weights, iaxis, index): # ---------------------------------------------------- coord = ( self.dimension_coordinates(view=True) - .filter_by_axis("exact", axis) + .filter_by_axis(axis, mode="exact", view=True) .value(None) ) if coord is None or not coord.Units.isreftime: @@ -11921,9 +11912,11 @@ def _group_weights(weights, iaxis, index): # ---------------------------------------------------- # Within days # ---------------------------------------------------- - coord = self.dimension_coordinates.filter_by_axis( - "exact", axis - ).value(None) + coord = ( + self.dimension_coordinates(view=True) + .filter_by_axis(axis, mode="exact", view=True) + .value(None) + ) if coord is None or not coord.Units.isreftime: raise ValueError( "Reference-time dimension coordinates are required " @@ -12011,7 +12004,7 @@ def _group_weights(weights, iaxis, index): # ---------------------------------------------------- coord = ( self.dimension_coordinates(view=True) - .filter_by_axis("exact", axis) + .filter_by_axis(axis, mode="exact", view=True) .value() ) if coord is None or not coord.Units.isreftime: @@ -12128,7 +12121,7 @@ def _group_weights(weights, iaxis, index): if group_span is not False: if isinstance(group_span, int): if ( - pc.domain_axes(view=True)[axis].get_size() + pc.domain_axes(_dict=True)[axis].get_size() != group_span ): classification[index] = ignore_n @@ -12234,7 +12227,7 @@ def _group_weights(weights, iaxis, index): try: c = ( g.dimension_coordinates(view=True) - .filter_by_axis("exact", axis) + .filter_by_axis(axis, mode="exact", view=True) .value() ) if not c.has_bounds(): @@ -12251,7 +12244,7 @@ def _group_weights(weights, iaxis, index): ): fl.sort( key=lambda g: g.dimension_coordinates(view=True) - .filter_by_axis("exact", axis) + .filter_by_axis(axis, mode="exact", view=True) .value() .datum(0), reverse=coord.decreasing, @@ -12265,7 +12258,7 @@ def _group_weights(weights, iaxis, index): except ValueError as error: raise ValueError(f"Can't collapse: {error}") - if squeeze and f.domain_axes(view=True)[axis].get_size() == 1: + if squeeze and f.domain_axes(_dict=True)[axis].get_size() == 1: # Remove a totally collapsed axis from the field's # data array f.squeeze(axis, inplace=True) @@ -12348,7 +12341,7 @@ def _update_cell_methods( if ( original_domain_axis.get_size() - == self.domain_axes(view=True)(view=True)[key].get_size() + == self.domain_axes(_dict=True)[key].get_size() ): if ( lastcm.get_axes(None) == axes @@ -12444,7 +12437,7 @@ def direction(self, identity, axes=None, **kwargs): if axis is None: return True - for key, coord in self.dimension_coordinates(view=True).items(): + for key, coord in self.dimension_coordinates(_dict=True).items(): if axis == self.get_data_axes(key)[0]: return coord.direction() @@ -12468,9 +12461,9 @@ def directions(self): {'dim1': True, 'dim0': False} """ - out = {key: True for key in self.domain_axes(view=True).keys()} + out = {key: True for key in self.domain_axes(_dict=True).keys()} - for key, dc in self.dimension_coordinates(view=True).items(): + for key, dc in self.dimension_coordinates(_dict=True).items(): direction = dc.direction() if not direction: axis = self.get_data_axes(key)[0] @@ -12769,7 +12762,7 @@ def indices(self, *mode, **kwargs): # Initialize indices indices = [slice(None)] * self.ndim - domain_axes = self.domain_axes(view=True) + domain_axes = self.domain_axes(_dict=True) constructs = self.constructs.filter_by_data(view=True) parsed = {} @@ -13291,7 +13284,7 @@ def set_data( # Construct new field f = _inplace_enabled_define_and_cleanup(self) - domain_axes = f.domain_axes(view=True) + domain_axes = f.domain_axes(_dict=True) if axes is None and not domain_axes: set_axes = False @@ -13367,9 +13360,11 @@ def set_data( if not domain_axes: raise ValueError("Can't set data: No domain axes exist") + domain_axes = f.domain_axes(view=True) + axes = [] for n in data_shape: - da = domain_axes.filter_by_size(n) + da = domain_axes.filter_by_size(n, view=True) if len(da) != 1: raise ValueError( "Can't insert data: Ambiguous data shape: " @@ -13443,7 +13438,7 @@ def domain_mask(self, **kwargs): mask.nc_del_variable(None) for key in self.constructs.filter_by_type( - "cell_method", "field_ancillary", view=True + "cell_method", "field_ancillary", _dict=True ): mask.del_construct(key) @@ -13568,7 +13563,7 @@ def compute_vertical_coordinates( """ f = _inplace_enabled_define_and_cleanup(self) - for cr in f.coordinate_references(view=True).values(): + for cr in f.coordinate_references(_dict=True).values(): # -------------------------------------------------------- # Compute the non-parametric vertical coordinates, if # possible. @@ -13911,7 +13906,7 @@ def match_by_rank(self, *ranks): if not ranks: return True - n_domain_axes = len(self.domain_axes(view=True)) + n_domain_axes = len(self.domain_axes(_dict=True)) for rank in ranks: ok = rank == n_domain_axes if ok: @@ -14992,7 +14987,7 @@ def flip(self, axes=None, inplace=False, i=False, **kwargs): super(Field, f).flip(iaxes, inplace=True) # Flip any constructs which span the flipped axes - for key, construct in f.constructs.filter_by_data(view=True).items(): + for key, construct in f.constructs.filter_by_data(_dict=True).items(): construct_axes = f.get_data_axes(key) construct_flip_axes = axes.intersection(construct_axes) if construct_flip_axes: @@ -15133,7 +15128,7 @@ def anchor( dim = ( f.dimension_coordinates(view=True) - .filter_by_axis("and", axis) + .filter_by_axis(axis, mode="and", view=True) .value(default=None) ) if dim is None: @@ -15155,7 +15150,7 @@ def anchor( f"Anchor value has incompatible units: {value.Units!r}" ) - axis_size = f.domain_axes(view=True)[axis].get_size() + axis_size = f.domain_axes(_dict=True)[axis].get_size() if axis_size <= 1: # Don't need to roll a size one axis if dry_run: @@ -15179,7 +15174,7 @@ def anchor( dim = ( f.dimension_coordinates(view=True) - .filter_by_axis("and", axis) + .filter_by_axis(axis, mode="and", view=True) .value() ) n = ((value - dim.data[0]) / period).ceil() @@ -15332,36 +15327,29 @@ def autocyclic(self, verbose=None): >>> f.autocyclic() """ - dims = self.dimension_coordinates(view=True)("X") + dims = self.dimension_coordinates(view=True).filter_by_identity("X", view=True)._dictionary() if len(dims) != 1: - logger.debug( - f"Not one 'X' dimension coordinate construct: {len(dims)}" - ) # pragma: no cover return False - key, dim = dict(dims).popitem() + key, dim = dims.popitem() if not dim.Units.islongitude: - logger.debug(0) if dim.get_property("standard_name", None) not in ( "longitude", "grid_longitude", ): self.cyclic(key, iscyclic=False) - logger.debug(1) # pragma: no cover return False bounds = dim.get_bounds(None) if bounds is None: self.cyclic(key, iscyclic=False) - logger.debug(2) # pragma: no cover return False bounds_data = bounds.get_data(None, _fill_value=False) if bounds_data is None: self.cyclic(key, iscyclic=False) - logger.debug(3) # pragma: no cover return False bounds = bounds_data.array @@ -15372,11 +15360,9 @@ def autocyclic(self, verbose=None): if abs(bounds[-1, -1] - bounds[0, 0]) != period.array: self.cyclic(key, iscyclic=False) - logger.debug(4) # pragma: no cover return False self.cyclic(key, iscyclic=True, period=period) - logger.debug(5) # pragma: no cover return True @@ -15432,7 +15418,7 @@ def axes(self, axes=None, **kwargs): out = set(out) out.discard(None) - return self.domain_axes().filter_by_key(*out) + return self.domain_axes().filter_by_key(*out, view=True) @_deprecated_kwarg_check("i") def squeeze(self, axes=None, inplace=False, i=False, **kwargs): @@ -15488,7 +15474,7 @@ def squeeze(self, axes=None, inplace=False, i=False, **kwargs): data_axes = self.get_data_axes() if axes is None: - domain_axes = self.domain_axes(view=True) + domain_axes = self.domain_axes(_dict=True) axes = [ axis for axis in data_axes @@ -15775,7 +15761,7 @@ def unsqueeze(self, inplace=False, i=False, axes=None, **kwargs): f = _inplace_enabled_define_and_cleanup(self) - size_1_axes = self.domain_axes(view=True).filter_by_size(1) + size_1_axes = self.domain_axes(view=True).filter_by_size(1, view=True) for axis in set(size_1_axes).difference(self.get_data_axes()): f.insert_dimension(axis, position=0, inplace=True) @@ -15896,7 +15882,9 @@ def auxiliary_coordinate( if not c: da_key = self.domain_axis(identity, key=True, default=None) if da_key is not None: - c = auxiliary_coordinates.filter_by_axis("exact", da_key) + c = auxiliary_coordinates.filter_by_axis( + da_key, mode="exact", view=True + ) if key: out = c.key(default=None) @@ -16033,20 +16021,18 @@ def construct(self, identity=None, default=ValueError(), key=False): c = self.constructs if identity is not None: - c = c(identity) + c = c(identity, view=True) if key: out = c.key(default=None) if out is None: - return self._default( - default, "No {!r} construct".format(identity) - ) + return self._default(default, f"No {identity!r} construct") return out out = c.value(default=None) if out is None: - return self._default(default, "No {!r} construct".format(identity)) + return self._default(default, f"No {identity!r} construct") return out @@ -16156,11 +16142,13 @@ def domain_ancillary(self, identity=None, default=ValueError(), key=False): c = domain_ancillaries if identity is not None: - c = c(identity) + c = c(identity, view=True) if not c: da_key = self.domain_axis(identity, key=True, default=None) if da_key is not None: - c = domain_ancillaries.filter_by_axis("exact", da_key) + c = domain_ancillaries.filter_by_axis( + da_key, mode="exact", view=True + ) if key: return c.key(default=default) @@ -16269,11 +16257,13 @@ def cell_measure(self, identity=None, default=ValueError(), key=False): c = cell_measures if identity is not None: - c = c(identity) + c = c(identity, view=True) if not c: da_key = self.domain_axis(identity, key=True, default=None) if da_key is not None: - c = cell_measures.filter_by_axis("exact", da_key) + c = cell_measures.filter_by_axis( + da_key, mode="exact", view=True + ) if key: return c.key(default=default) @@ -16488,11 +16478,13 @@ def coordinate(self, identity=None, default=ValueError(), key=False): c = coordinates if identity is not None: - c = c(identity) + c = c(identity, view=True) if not c: da_key = self.domain_axis(identity, key=True, default=None) if da_key is not None: - c = coordinates(view=True).filter_by_axis("exact", da_key) + c = coordinates.filter_by_axis( + da_key, mode="exact", view=True + ) if key: return c.key(default=default) @@ -16714,11 +16706,13 @@ def field_ancillary(self, identity=None, default=ValueError(), key=False): c = field_ancillaries if identity is not None: - c = c(identity) + c = c(identity, view=True) if not c: da_key = self.domain_axis(identity, key=True, default=None) if da_key is not None: - c = field_ancillaries.filter_by_axis("exact", da_key) + c = field_ancillaries.filter_by_axis( + da_key, mode="exact", view=True + ) if key: return c.key(default=default) @@ -16831,11 +16825,13 @@ def dimension_coordinate( c = dimension_coordinates if identity is not None: - c = c(identity) + c = c(identity, view=True) if not c: da_key = self.domain_axis(identity, key=True, default=None) if da_key is not None: - c = dimension_coordinates.filter_by_axis("exact", da_key) + c = dimension_coordinates.filter_by_axis( + da_key, mode="exact", view=True + ) if key: return c.key(default=default) @@ -16930,7 +16926,7 @@ def domain_axis(self, identity, key=False, default=ValueError()): TODO """ - # Try for index + # Try for integer index try: da_key = self.get_data_axes(default=None)[identity] except TypeError: @@ -16945,7 +16941,7 @@ def domain_axis(self, identity, key=False, default=ValueError()): self_domain_axes = self.domain_axes(view=True) - domain_axes = self_domain_axes(identity, view=True) + domain_axes = self_domain_axes.filter_by_identity(identity, view=True) if len(domain_axes) == 1: # identity is a unique domain axis construct identity da_key = domain_axes.key() @@ -17083,7 +17079,7 @@ def axes_names(self, *identities, **kwargs): self, "axes_names", kwargs ) # pragma: no cover - out = dict(self.domain_axes(view=True)) + out = self.domain_axes(_dict=True).copy() for key in tuple(out): value = self.constructs.domain_axis_identity(key) @@ -17192,7 +17188,7 @@ def axis_size(self, identity, default=ValueError(), axes=None, **kwargs): axis = self.domain_axis(identity, key=True) - domain_axes = self.domain_axes(view=True) + domain_axes = self.domain_axes(_dict=True) da = domain_axes.get(axis) if da is not None: @@ -17344,7 +17340,7 @@ def set_construct( self._conform_cell_methods() elif construct_type == "coordinate_reference": - for ckey in self.coordinates(view=True): + for ckey in self.coordinates(_dict=True): self._conform_coordinate_references(ckey, coordref=construct) # Return the construct key @@ -17737,7 +17733,7 @@ def halo( d.set_size(d.get_size() + 2 * h) # Add halos to metadata constructs - for key, c in f.constructs.filter_by_data(view=True).items(): + for key, c in f.constructs.filter_by_data(_dict=True).items(): construct_axes = f.get_data_axes(key) construct_size = { construct_axes.index(axis): h @@ -17981,7 +17977,7 @@ def percentile( for axis in [ axis - for axis in self.domain_axes(view=True) + for axis in self.domain_axes(_dict=True) if axis not in data_axes ]: out.set_construct(self._DomainAxis(1), key=axis) @@ -18011,7 +18007,7 @@ def percentile( if axes: for key, c in ( self.dimension_coordinates(view=True) - .filter_by_axis("subset", *axes) + .filter_by_axis(*axes, mode="subset", view=True) .items() ): c_axes = self.get_data_axes(key) @@ -18039,7 +18035,7 @@ def percentile( other_axes = set( [ axis - for axis in self.domain_axes(view=True) + for axis in self.domain_axes(_dict=True) if axis not in axes or self.domain_axis(axis).size == 1 ] ) @@ -18049,7 +18045,7 @@ def percentile( # ------------------------------------------------------------ if other_axes: for key, c in self.constructs.filter_by_axis( - "subset", *other_axes + *other_axes, mode="subset", view=True ).items(): c_axes = self.get_data_axes(key) out.set_construct(c, axes=c_axes, key=key) @@ -18057,10 +18053,10 @@ def percentile( # ------------------------------------------------------------ # Copy coordinate reference constructs to the output field # ------------------------------------------------------------ - out_coordinates = out.coordinates(view=True) - out_domain_ancillaries = out.domain_ancillaries(view=True) + out_coordinates = out.coordinates(_dict=True) + out_domain_ancillaries = out.domain_ancillaries(_dict=True) - for cr_key, ref in self.coordinate_references(view=True).items(): + for cr_key, ref in self.coordinate_references(_dict=True).items(): ref = ref.copy() for c_key in ref.coordinates(): @@ -18499,7 +18495,7 @@ def flatten(self, axes=None, return_axis=False, inplace=False): f.set_data_axes(new_data_axes) # Modify or remove cell methods that span the flatten axes - for key, cm in tuple(f.cell_methods(view=True).items()): + for key, cm in tuple(f.cell_methods(_dict=True).items()): cm_axes = set(cm.get_axes(())) if not cm_axes or cm_axes.isdisjoint(axes): continue @@ -18519,7 +18515,7 @@ def flatten(self, axes=None, return_axis=False, inplace=False): ): for c in ( f.constructs.filter_by_type(ctype, view=True) - .filter_by_axis("exact", a) + .filter_by_axis(a, mode="exact", view=True) .values() ): sn = c.get_property("standard_name", None) @@ -18550,7 +18546,9 @@ def flatten(self, axes=None, return_axis=False, inplace=False): # Flatten the constructs that span all of the flattened axes, # and no others. - for key, c in f.constructs.filter_by_axis("and", *axes).items(): + for key, c in f.constructs.filter_by_axis( + *axes, mode="and", view=True + ).items(): c_axes = f.get_data_axes(key) c_iaxes = sorted( [c_axes.index(axis) for axis in axes if axis in c_axes] @@ -18564,7 +18562,9 @@ def flatten(self, axes=None, return_axis=False, inplace=False): # Remove constructs that span some, but not all, of the # flattened axes - for key in f.constructs.filter_by_axis("or", *axes): + for key in tuple( + f.constructs.filter_by_axis(*axes, mode="or", view=True).keys() + ): f.del_construct(key) # Remove the domain axis constructs for the flattened axes @@ -18631,7 +18631,7 @@ def roll(self, axis, shift, inplace=False, i=False, **kwargs): ) f = _inplace_enabled_define_and_cleanup(self) - domain_axes = self.domain_axes(view=True) + domain_axes = self.domain_axes(_dict=True) if domain_axes[axis].get_size() <= 1: if inplace: f = None @@ -18640,7 +18640,7 @@ def roll(self, axis, shift, inplace=False, i=False, **kwargs): dim = ( self.dimension_coordinates(view=True) - .filter_by_axis("exact", axis) + .filter_by_axis(axis, mode="exact", view=True) .value(None) ) if dim is not None and dim.period() is None: @@ -18659,7 +18659,7 @@ def roll(self, axis, shift, inplace=False, i=False, **kwargs): super(Field, f).roll(iaxis, shift, inplace=True) - for key, construct in f.constructs.filter_by_data(view=True).items(): + for key, construct in f.constructs.filter_by_data(_dict=True).items(): axes = f.get_data_axes(key, default=()) if axis in axes: construct.roll(axes.index(axis), shift, inplace=True) @@ -20735,7 +20735,7 @@ def derivative( raise ValueError("Invalid axis specifier") dims = self.dimension_coordinates(view=True).filter_by_axis( - "exact", axis + axis, mode="exact", view=True ) len_dims = len(dims) if not len_dims: @@ -20743,7 +20743,7 @@ def derivative( elif len_dims != 1: raise ValueError("Axis specified is not unique.") - dckey, coord = dict(dims).popitem() + dckey, coord = dims._dictionary().popitem() # Get the axis index axis_index = self.get_data_axes().index(axis) @@ -20860,7 +20860,7 @@ def coord(self, identity, default=ValueError(), key=False, **kwargs): "Use methods of the 'coordinates' attribute instead.", ) # pragma: no cover - if identity in self.domain_axes(view=True): + if identity in self.domain_axes(_dict=True): # Allow an identity to be the domain axis construct key # spanned by a dimension coordinate construct return self.dimension_coordinate( diff --git a/cf/mixin/coordinate.py b/cf/mixin/coordinate.py index 6551d4172a..24897235e7 100644 --- a/cf/mixin/coordinate.py +++ b/cf/mixin/coordinate.py @@ -74,10 +74,12 @@ def T(self): True """ - out = self.Units.isreftime or self.get_property("axis", None) == "T" - - if out: + if self.Units.isreftime: return True + + axis = self.get_property("axis", None) + if axis is not None: + return axis == "T" # Still here? Then check the bounds. if self.has_bounds(): @@ -127,21 +129,24 @@ def X(self): False """ - standard_names = ( - "longitude", - "projection_x_coordinate", - "grid_longitude", - ) - units = self.Units - out = ( - units.islongitude - or self.get_property("axis", None) == "X" - or self.get_property("standard_name", None) in standard_names - ) - - if out: + standard_name = self.get_property("standard_name", None) + if ( + standard_name is not None + and standard_name in ( + "longitude", + "projection_x_coordinate", + "grid_longitude" + ) + ): + return True + + if self.Units.islongitude: return True + axis = self.get_property("axis", None) + if axis is not None: + return axis == "X" + # Still here? Then check the bounds. if self.has_bounds(): bounds = self.get_bounds(None) @@ -176,22 +181,24 @@ def Y(self): True """ - standard_names = ( - "latitude", - "projection_y_coordinate", - "grid_latitude", - ) - - units = self.Units - out = ( - units.islatitude - or self.get_property("axis", None) == "Y" - or self.get_property("standard_name", None) in standard_names - ) - - if out: + standard_name = self.get_property("standard_name", None) + if ( + standard_name is not None + and standard_name in ( + "latitude", + "projection_y_coordinate", + "grid_latitude", + ) + ): + return True + + if self.Units.islatitude: return True + axis = self.get_property("axis", None) + if axis is not None: + return axis == "Y" + # Still here? Then check the bounds. if self.has_bounds(): bounds = self.get_bounds(None) @@ -249,33 +256,38 @@ def Z(self): True """ - standard_names = ( - "atmosphere_ln_pressure_coordinate", - "atmosphere_sigma_coordinate", - "atmosphere_hybrid_sigma_pressure_coordinate", - "atmosphere_hybrid_height_coordinate", - "atmosphere_sleve_coordinate", - "ocean_sigma_coordinate", - "ocean_s_coordinate", - "ocean_s_coordinate_g1", - "ocean_s_coordinate_g2", - "ocean_sigma_z_coordinate", - "ocean_double_sigma_coordinate", - ) + standard_name = self.get_property("standard_name", None) + if ( + standard_name is not None + and standard_name in ( + "atmosphere_ln_pressure_coordinate", + "atmosphere_sigma_coordinate", + "atmosphere_hybrid_sigma_pressure_coordinate", + "atmosphere_hybrid_height_coordinate", + "atmosphere_sleve_coordinate", + "ocean_sigma_coordinate", + "ocean_s_coordinate", + "ocean_s_coordinate_g1", + "ocean_s_coordinate_g2", + "ocean_sigma_z_coordinate", + "ocean_double_sigma_coordinate", + ) + ): + return True units = self.Units - out = ( - units.ispressure - or ( - str(self.get_property("positive", "Z")).lower() - in ("up", "down") - ) - or self.get_property("axis", None) == "Z" - or (units and units.units in ("level", "layer" "sigma_level")) - or self.get_property("standard_name", None) in standard_names - ) - - if out: + if units.ispressure: + return True + + positive = self.get_property("positive", None) + if positive is not None: + return str(positive).lower() in ("up", "down") + + axis = self.get_property("axis", None) + if axis is not None: + return axis == "Z" + + if units and units.units in ("level", "layer" "sigma_level"): return True # Still here? Then check the bounds. @@ -294,11 +306,12 @@ def axis(self): """The axis CF property. The `axis` property may be used to specify the type of - coordinates. It may take one of the values `'X'`, `'Y'`, `'Z'` or - `'T'` which stand for a longitude, latitude, vertical, or time - axis respectively. A value of `'X'`, `'Y'` or `'Z'` may also also - used to identify generic spatial coordinates (the values `'X'` and - `'Y'` being used to identify horizontal coordinates). + coordinates. It may take one of the values `'X'`, `'Y'`, `'Z'` + or `'T'` which stand for a longitude, latitude, vertical, or + time axis respectively. A value of `'X'`, `'Y'` or `'Z'` may + also also used to identify generic spatial coordinates (the + values `'X'` and `'Y'` being used to identify horizontal + coordinates). **Examples:** @@ -437,16 +450,16 @@ def identity( :Parameters: default: optional - If no identity can be found then return the value of the - default parameter. + If no identity can be found then return the value of + the default parameter. strict: `bool`, optional - If True then the identity is the first found of only the - "standard_name" property or the "id" attribute. + If True then the identity is the first found of only + the "standard_name" property or the "id" attribute. relaxed: `bool`, optional - If True then the identity is the first found of only the - "standard_name" property, the "id" attribute, the + If True then the identity is the first found of only + the "standard_name" property, the "id" attribute, the "long_name" property or the netCDF variable name. nc_only: `bool`, optional @@ -501,41 +514,65 @@ def identity( def identities(self, generator=False, ctype="XTYZ"): """Return all possible identities. - The identities comprise: + The identities comprise: + + * The "standard_name" property. + * The "id" attribute, preceded by ``'id%'``. + * The "cf_role" property, preceded by ``'cf_role='``. + * The "axis" property, preceded by ``'axis='``. + * The "long_name" property, preceded by ``'long_name='``. + * All other properties (including "standard_name"), preceded by + the property name and an ``'='``. + * The coordinate type (``'X'``, ``'Y'``, ``'Z'`` or ``'T'``). + * The netCDF variable name, preceded by ``'ncvar%'``. + + .. versionadded:: 3.0.0 + + .. seealso:: `id`, `identity` + + :Parameters: - * The "standard_name" property. - * The "id" attribute, preceded by ``'id%'``. - * The "cf_role" property, preceded by ``'cf_role='``. - * The "axis" property, preceded by ``'axis='``. - * The "long_name" property, preceded by ``'long_name='``. - * All other properties (including "standard_name"), preceded by - the property name and an ``'='``. - * The coordinate type (``'X'``, ``'Y'``, ``'Z'`` or ``'T'``). - * The netCDF variable name, preceded by ``'ncvar%'``. + {{generator: `bool`, optional}} - .. versionadded:: 3.0.0 + ctype: (sequnce of) `str` + Restrict a coordinate type identies to be any of these + characters. Setting to a subset of ``'XTYZ'`` can give + performance improvements, as it will reduce the number + of coordinate types that are checked in circumstances + when particular coordinaete type have been ruled out a + priori. If a coordinate type is omitted then it will + not be in the returned identities even if the + coordinate construct is of that type. Coordinate types + are checked in the order given. - .. seealso:: `id`, `identity` - TODO - :Returns: - - `list` - The identities. - - **Examples:** - - >>> f.properties() - {'foo': 'bar', - 'long_name': 'Air Temperature', - 'standard_name': 'air_temperature'} - >>> f.nc_get_variable() - 'tas' - >>> f.identities() - ['air_temperature', - 'long_name=Air Temperature', - 'foo=bar', - 'standard_name=air_temperature', - 'ncvar%tas'] + *Parameter example:* + ``ctype='Y'`` + + *Parameter example:* + ``ctype='XY'`` + + *Parameter example:* + ``ctype=('T', 'X')`` + + :Returns: + + `list` + The identities. + + **Examples:** + + >>> f.properties() + {'foo': 'bar', + 'long_name': 'Air Temperature', + 'standard_name': 'air_temperature'} + >>> f.nc_get_variable() + 'tas' + >>> f.identities() + ['air_temperature', + 'long_name=Air Temperature', + 'foo=bar', + 'standard_name=air_temperature', + 'ncvar%tas'] """ diff --git a/cf/test/test_AuxiliaryCoordinate.py b/cf/test/test_AuxiliaryCoordinate.py index b6d2297469..4b20157716 100644 --- a/cf/test/test_AuxiliaryCoordinate.py +++ b/cf/test/test_AuxiliaryCoordinate.py @@ -93,7 +93,7 @@ def test_AuxiliaryCoordinate_properties(self): def test_AuxiliaryCoordinate_insert_dimension(self): f = cf.read(self.filename)[0] - d = f.dimension_coordinates(view=True)(view=True)("X").value() + d = f.dimension_coordinates(view=True)("X").value() x = cf.AuxiliaryCoordinate(source=d) self.assertEqual(x.shape, (9,)) diff --git a/cf/test/test_Field.py b/cf/test/test_Field.py index f832fe0e2d..aeeec3ccc4 100644 --- a/cf/test/test_Field.py +++ b/cf/test/test_Field.py @@ -955,10 +955,12 @@ def test_Field__add__(self): b = g + f axis = a.domain_axis("grid_longitude", key=1) - for key in a.field_ancillaries(view=True).filter_by_axis("or", axis): + for key in a.field_ancillaries(view=True).filter_by_axis( + axis, mode="or" + ): a.del_construct(key) - for key in a.cell_measures(view=True).filter_by_axis("or", axis): + for key in a.cell_measures(view=True).filter_by_axis(axis, mode="or"): a.del_construct(key) self.assertTrue(a.equals(b, verbose=2)) @@ -2154,12 +2156,12 @@ def test_Field_coordinate(self): if identity == "domainaxis2": key = ( f.dimension_coordinates(view=True) - .filter_by_axis("and", identity) + .filter_by_axis(identity, mode="and") .key() ) c = ( f.dimension_coordinates(view=True) - .filter_by_axis("and", identity) + .filter_by_axis(identity, mode="and") .value() ) else: @@ -2281,12 +2283,12 @@ def test_Field_dimension_coordinate(self): if identity == "domainaxis2": key = ( f.dimension_coordinates(view=True) - .filter_by_axis("and", identity) + .filter_by_axis(identity, mode="and") .key() ) c = ( f.dimension_coordinates(view=True) - .filter_by_axis("and", identity) + .filter_by_axis(identity, mode="and") .value() ) elif identity == "X": From 4d3b7d191f1a34296778d58f6277aa0a1fbeeb27 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Fri, 26 Mar 2021 19:44:46 +0000 Subject: [PATCH 07/53] cfdm updates (in progress) --- cf/field.py | 150 ++++++++++++++++++++++++++-------------------------- 1 file changed, 76 insertions(+), 74 deletions(-) diff --git a/cf/field.py b/cf/field.py index edc07ffaa9..09044b4467 100644 --- a/cf/field.py +++ b/cf/field.py @@ -494,7 +494,7 @@ def __getitem__(self, indices): # Set sizes of domain axes data_axes = new.get_data_axes() - domain_axes = new.domain_axes(_dict=True) + domain_axes = new.domain_axes(todict=True) for axis, size in zip(data_axes, new_data.shape): domain_axes[axis].set_size(size) @@ -741,7 +741,7 @@ def analyse_items(self, relaxed_identities=None): dimension_coordinates = self.dimension_coordinates(view=True) auxiliary_coordinates = self.auxiliary_coordinates(view=True) - for axis in self.domain_axes(_dict=True): + for axis in self.domain_axes(todict=True): dims = dimension_coordinates.filter_by_axis( axis, mode="and", view=True @@ -1512,16 +1512,16 @@ def _binary_operation_old(self, other, method): refs0 = dict(field0.coordinate_references) refs1 = dict(field1.coordinate_references) - field1_dimension_coordinates = field1.dimension_coordinates(_dict=True) - field1_auxiliary_coordinates = field1.auxiliary_coordinates(_dict=True) - field1_coordinate_references = field1.coordinate_references(_dict=True) - field1_domain_ancillaries = field1_domain_ancillaries(_dict=True) - field1_domain_axes = field1.domain_axes(_dict=True) + field1_dimension_coordinates = field1.dimension_coordinates(todict=True) + field1_auxiliary_coordinates = field1.auxiliary_coordinates(todict=True) + field1_coordinate_references = field1.coordinate_references(todict=True) + field1_domain_ancillaries = field1_domain_ancillaries(todict=True) + field1_domain_axes = field1.domain_axes(todict=True) # field0_auxiliary_coordinates = field0.auxiliary_coordinates(view=True) - field0_domain_ancillaries = field0_domain_ancillaries(_dict=True) + field0_domain_ancillaries = field0_domain_ancillaries(todict=True) c = field0.constructs.filter_by_type( - "auxiliary_coordinate", "domain_ancillary", _dict=True + "auxiliary_coordinate", "domain_ancillary", todict=True ) for axis0 in s["size1_broadcast_axes"] + s["new_size1_axes"]: @@ -1563,7 +1563,7 @@ def _binary_operation_old(self, other, method): *tuple( ref0.coordinate_conversion.domain_ancillaries().values() ), - _dict=True, + todict=True, ) ) @@ -1768,7 +1768,7 @@ def _binary_operation_old(self, other, method): "dimension_coordinate", "axuiliary_coordinate", "domain_ancillary", - _dict=True, + todict=True, ) ) for key1, item1 in identity_map.copy().items(): @@ -1907,7 +1907,7 @@ def _binary_operation(self, other, method): f_dimension_coordinates = f.dimension_coordinates(view=True) f_auxiliary_coordinates = f.auxiliary_coordinates(view=True) - for axis in f.domain_axes(_dict=True): + for axis in f.domain_axes(todict=True): identity = None key = None coord = None @@ -2224,7 +2224,7 @@ def _binary_operation(self, other, method): # Copy over coordinate reference constructs from field1, # including their domain ancillary constructs. # ------------------------------------------------------------ - for key, ref in field1.coordinate_references(_dict=True).items(): + for key, ref in field1.coordinate_references(todict=True).items(): axes = field1._coordinate_reference_axes(key) if axes.issubset(new_axes): refs_to_add_from_field1.append(ref) @@ -2343,7 +2343,7 @@ def _conform_coordinate_references(self, key, coordref=None): identity = self.constructs[key].identity(strict=True) if coordref is None: - refs = self.coordinate_references(_dict=True).values() + refs = self.coordinate_references(todict=True).values() else: refs = [coordref] @@ -2393,7 +2393,7 @@ def _conform_cell_methods(self): """ axis_map = {} - for cm in self.cell_methods(_dict=True).values(): + for cm in self.cell_methods(todict=True).values(): for axis in cm.get_axes(()): if axis in axis_map: continue @@ -2441,8 +2441,8 @@ def _equivalent_coordinate_references( `bool` """ - ref0 = self.coordinate_references(_dict=True)[key0] - ref1 = field1.coordinate_references(_dict=True)[key1] + ref0 = self.coordinate_references(todict=True)[key0] + ref1 = field1.coordinate_references(todict=True)[key1] if not ref0.equivalent(ref1, rtol=rtol, atol=atol, verbose=verbose): logger.info( @@ -2563,7 +2563,7 @@ def _set_construct_parse_axes(self, item, axes=None, allow_scalar=True): f"axes (got {len(set(axes))}, expected {ndim})" ) - domain_axes = self.domain_axes(_dict=True) + domain_axes = self.domain_axes(todict=True) axes2 = [] for axis, size in zip(axes, item.data.shape): dakey = self.domain_axis( @@ -2651,7 +2651,7 @@ def _conform_for_assignment(self, other, check_coordinates=False): # then other.data becomes Y X T # ------------------------------------------------------------ squeeze_axes1 = [] - other_domain_axes = other.domain_axes(_dict=True) + other_domain_axes = other.domain_axes(todict=True) for axis1 in v["undefined_axes"]: axis_size = other_domain_axes[axis1].get_size() @@ -3077,7 +3077,7 @@ def _regrid_get_latlong(self, name, axes=None): f"{name} field has multiple 'Y' dimension coordinates" ) - # TODO review for view/_dict. Is x_axis same as x_key? + # TODO review for view/todict. Is x_axis same as x_key? x = xdims.value() y = ydims.value() x_key = xdims.key() @@ -3160,7 +3160,7 @@ def _regrid_get_latlong(self, name, axes=None): ), ) - domain_axes = self.domain_axes(_dict=True) + domain_axes = self.domain_axes(todict=True) x_size = domain_axes[x_axis].get_size() y_size = domain_axes[y_axis].get_size() @@ -3779,7 +3779,7 @@ def _regrid_update_coordinate_references( domain_axes = None domain_ancillaries = None - for key, ref in self.coordinate_references(_dict=True).items(): + for key, ref in self.coordinate_references(todict=True).items(): ref_axes = [] for k in ref.coordinates(): ref_axes.extend(self.get_data_axes(k)) @@ -3792,7 +3792,7 @@ def _regrid_update_coordinate_references( term, value, ) in ref.coordinate_conversion.domain_ancillaries().items(): - # TODo review for view/_dict + # TODo review for view/todict domain_ancillaries = self.domain_ancillaries( view=True, cache=domain_ancillaries ) @@ -3845,7 +3845,7 @@ def _regrid_update_coordinate_references( d_axes = self.get_data_axes(key) domain_axes = self.domain_axes( - _dict=True, cache=domain_axes + todict=True, cache=domain_axes ) for k_s, new_size in zip( @@ -3877,7 +3877,7 @@ def _regrid_copy_coordinate_references(self, dst, dst_axis_keys): `None` """ - for ref in dst.coordinate_references(_dict=True).values(): + for ref in dst.coordinate_references(todict=True).values(): axes = set() for key in ref.coordinates(): axes.update(dst.get_data_axes(key)) @@ -3964,7 +3964,7 @@ def _regrid_update_coordinates( ): self.del_construct(key) - domain_axes = self.domain_axes(_dict=True) + domain_axes = self.domain_axes(todict=True) dst_auxiliary_coordinates = None if cartesian: @@ -4053,7 +4053,7 @@ def _regrid_update_coordinates( # Copy names of dimensions from destination to source field if not dst_dict: - dst_domain_axes = dst.domain_axes(_dict=True) + dst_domain_axes = dst.domain_axes(todict=True) for src_axis_key, dst_axis_key in zip( src_axis_keys, dst_axis_keys ): @@ -4324,8 +4324,8 @@ def _weights_field(self, fields, comp, weights_axes, methods=False): axis1_to_axis0 = {} - coordinate_references = self.coordinate_references(_dict=True) - w_coordinate_references = w.coordinate_references(_dict=True) + coordinate_references = self.coordinate_references(todict=True) + w_coordinate_references = w.coordinate_references(todict=True) for axis1 in w.get_data_axes(): identity = t["axis_to_id"].get(axis1, None) @@ -5404,7 +5404,7 @@ def Flags(self): def ncdimensions(self): """""" out = {} - for dim, domain_axis in self.domain_axes(_dict=True).items(): + for dim, domain_axis in self.domain_axes(todict=True).items(): ncdim = domain_axis.nc_get_dimension(None) if ncdim is not None: out[dim] = ncdim @@ -5444,7 +5444,7 @@ def rank(self): 4 """ - return len(self.domain_axes(_dict=True)) + return len(self.domain_axes(todict=True)) @property def varray(self): @@ -6011,7 +6011,7 @@ def radius(self, default=None): """ radii = [] - for cr in self.coordinate_references(_dict=True).values(): + for cr in self.coordinate_references(todict=True).values(): r = cr.datum.get_parameter("earth_radius", None) if r is not None: r = Data.asdata(r) @@ -6115,7 +6115,7 @@ def close(self): """ super().close() - for construct in self.constructs.filter_by_data(_dict=True).values(): + for construct in self.constructs.filter_by_data(todict=True).values(): construct.close() def iscyclic(self, identity, **kwargs): @@ -6231,7 +6231,7 @@ def concatenate(cls, fields, axis=0, _preserve=True): # Concatenate constructs with data # ------------------------------------------------------------ for key, construct in field0.constructs.filter_by_data( - _dict=True + todict=True ).items(): construct_axes = field0.get_data_axes(key) @@ -6745,7 +6745,7 @@ def weights( # coordinates pass - domain_axes = self.domain_axes(_dict=True) + domain_axes = self.domain_axes(todict=True) for da_key in domain_axes: if self._weights_geometry_area( @@ -7077,17 +7077,17 @@ def weights( field.del_data() field.del_data_axes() - not_needed_axes = set(field.domain_axes(_dict=True)).difference( + not_needed_axes = set(field.domain_axes(todict=True)).difference( weights_axes ) - for key in self.cell_methods(_dict=True).copy(): + for key in self.cell_methods(todict=True).copy(): field.del_construct(key) - for key in self.field_ancillaries(_dict=True).copy(): + for key in self.field_ancillaries(todict=True).copy(): field.del_construct(key) - for key in field.coordinate_references(_dict=True).copy(): + for key in field.coordinate_references(todict=True).copy(): if field.coordinate_reference_domain_axes(key).intersection( not_needed_axes ): @@ -8463,7 +8463,7 @@ def del_coordinate_reference( default, f"Can't identify construct from {construct!r}" ) - for key, ref in tuple(self.coordinate_references(_dict=True).items()): + for key, ref in tuple(self.coordinate_references(todict=True).items()): if c_key in ref.coordinates(): self.del_coordinate_reference( key, construct=None, default=default @@ -8617,7 +8617,7 @@ def del_domain_axis( self.squeeze(dakey, inplace=True) for ckey, construct in self.constructs.filter_by_data( - _dict=True + todict=True ).items(): data = construct.get_data(None, _fill_value=False) if data is None: @@ -8739,7 +8739,7 @@ def get_coordinate_reference( ) for cr_key, ref in tuple( - self.coordinate_references(_dict=True).items() + self.coordinate_references(todict=True).items() ): if c_key in [ ref.coordinates(), @@ -8808,8 +8808,8 @@ def set_coordinate_reference( # Still here? ref = coordinate_reference.copy() - coordinates = field.coordinates(_dict=True) - domain_ancillaries = field.domain_ancillaries(_dict=True) + coordinates = field.coordinates(todict=True) + domain_ancillaries = field.domain_ancillaries(todict=True) ckeys = [] for value in coordinate_reference.coordinates(): @@ -10810,7 +10810,7 @@ def collapse( aux.bounds.set_data(d.bounds.data, copy=False) # Reset the axis size - f.domain_axes(_dict=True)[axis].set_size(1) + f.domain_axes(todict=True)[axis].set_size(1) logger.info( f"Changing axis size to 1: {axis}" ) # pragma: no cover @@ -11459,7 +11459,7 @@ def _group_weights(weights, iaxis, index): ) # pragma: no cover # Size of uncollapsed axis - axis_size = self.domain_axes(_dict=True)[axis].get_size() + axis_size = self.domain_axes(todict=True)[axis].get_size() # Integer position of collapse axis iaxis = self.get_data_axes().index(axis) @@ -12121,7 +12121,7 @@ def _group_weights(weights, iaxis, index): if group_span is not False: if isinstance(group_span, int): if ( - pc.domain_axes(_dict=True)[axis].get_size() + pc.domain_axes(todict=True)[axis].get_size() != group_span ): classification[index] = ignore_n @@ -12258,7 +12258,7 @@ def _group_weights(weights, iaxis, index): except ValueError as error: raise ValueError(f"Can't collapse: {error}") - if squeeze and f.domain_axes(_dict=True)[axis].get_size() == 1: + if squeeze and f.domain_axes(todict=True)[axis].get_size() == 1: # Remove a totally collapsed axis from the field's # data array f.squeeze(axis, inplace=True) @@ -12341,7 +12341,7 @@ def _update_cell_methods( if ( original_domain_axis.get_size() - == self.domain_axes(_dict=True)[key].get_size() + == self.domain_axes(todict=True)[key].get_size() ): if ( lastcm.get_axes(None) == axes @@ -12437,7 +12437,7 @@ def direction(self, identity, axes=None, **kwargs): if axis is None: return True - for key, coord in self.dimension_coordinates(_dict=True).items(): + for key, coord in self.dimension_coordinates(todict=True).items(): if axis == self.get_data_axes(key)[0]: return coord.direction() @@ -12461,9 +12461,9 @@ def directions(self): {'dim1': True, 'dim0': False} """ - out = {key: True for key in self.domain_axes(_dict=True).keys()} + out = {key: True for key in self.domain_axes(todict=True).keys()} - for key, dc in self.dimension_coordinates(_dict=True).items(): + for key, dc in self.dimension_coordinates(todict=True).items(): direction = dc.direction() if not direction: axis = self.get_data_axes(key)[0] @@ -12762,7 +12762,7 @@ def indices(self, *mode, **kwargs): # Initialize indices indices = [slice(None)] * self.ndim - domain_axes = self.domain_axes(_dict=True) + domain_axes = self.domain_axes(todict=True) constructs = self.constructs.filter_by_data(view=True) parsed = {} @@ -13284,7 +13284,7 @@ def set_data( # Construct new field f = _inplace_enabled_define_and_cleanup(self) - domain_axes = f.domain_axes(_dict=True) + domain_axes = f.domain_axes(todict=True) if axes is None and not domain_axes: set_axes = False @@ -13438,7 +13438,7 @@ def domain_mask(self, **kwargs): mask.nc_del_variable(None) for key in self.constructs.filter_by_type( - "cell_method", "field_ancillary", _dict=True + "cell_method", "field_ancillary", todict=True ): mask.del_construct(key) @@ -13563,7 +13563,7 @@ def compute_vertical_coordinates( """ f = _inplace_enabled_define_and_cleanup(self) - for cr in f.coordinate_references(_dict=True).values(): + for cr in f.coordinate_references(todict=True).values(): # -------------------------------------------------------- # Compute the non-parametric vertical coordinates, if # possible. @@ -13906,7 +13906,7 @@ def match_by_rank(self, *ranks): if not ranks: return True - n_domain_axes = len(self.domain_axes(_dict=True)) + n_domain_axes = len(self.domain_axes(todict=True)) for rank in ranks: ok = rank == n_domain_axes if ok: @@ -14987,7 +14987,7 @@ def flip(self, axes=None, inplace=False, i=False, **kwargs): super(Field, f).flip(iaxes, inplace=True) # Flip any constructs which span the flipped axes - for key, construct in f.constructs.filter_by_data(_dict=True).items(): + for key, construct in f.constructs.filter_by_data(todict=True).items(): construct_axes = f.get_data_axes(key) construct_flip_axes = axes.intersection(construct_axes) if construct_flip_axes: @@ -15150,7 +15150,7 @@ def anchor( f"Anchor value has incompatible units: {value.Units!r}" ) - axis_size = f.domain_axes(_dict=True)[axis].get_size() + axis_size = f.domain_axes(todict=True)[axis].get_size() if axis_size <= 1: # Don't need to roll a size one axis if dry_run: @@ -15327,7 +15327,9 @@ def autocyclic(self, verbose=None): >>> f.autocyclic() """ - dims = self.dimension_coordinates(view=True).filter_by_identity("X", view=True)._dictionary() +# dims = self.dimension_coordinates(view=True).filter_by_identity("X", view=True)._dictionary() + + dims = self.dimension_coordinates("X", todict=True) if len(dims) != 1: return False @@ -15474,7 +15476,7 @@ def squeeze(self, axes=None, inplace=False, i=False, **kwargs): data_axes = self.get_data_axes() if axes is None: - domain_axes = self.domain_axes(_dict=True) + domain_axes = self.domain_axes(todict=True) axes = [ axis for axis in data_axes @@ -17079,7 +17081,7 @@ def axes_names(self, *identities, **kwargs): self, "axes_names", kwargs ) # pragma: no cover - out = self.domain_axes(_dict=True).copy() + out = self.domain_axes(todict=True).copy() for key in tuple(out): value = self.constructs.domain_axis_identity(key) @@ -17188,7 +17190,7 @@ def axis_size(self, identity, default=ValueError(), axes=None, **kwargs): axis = self.domain_axis(identity, key=True) - domain_axes = self.domain_axes(_dict=True) + domain_axes = self.domain_axes(todict=True) da = domain_axes.get(axis) if da is not None: @@ -17340,7 +17342,7 @@ def set_construct( self._conform_cell_methods() elif construct_type == "coordinate_reference": - for ckey in self.coordinates(_dict=True): + for ckey in self.coordinates(todict=True): self._conform_coordinate_references(ckey, coordref=construct) # Return the construct key @@ -17733,7 +17735,7 @@ def halo( d.set_size(d.get_size() + 2 * h) # Add halos to metadata constructs - for key, c in f.constructs.filter_by_data(_dict=True).items(): + for key, c in f.constructs.filter_by_data(todict=True).items(): construct_axes = f.get_data_axes(key) construct_size = { construct_axes.index(axis): h @@ -17977,7 +17979,7 @@ def percentile( for axis in [ axis - for axis in self.domain_axes(_dict=True) + for axis in self.domain_axes(todict=True) if axis not in data_axes ]: out.set_construct(self._DomainAxis(1), key=axis) @@ -18035,7 +18037,7 @@ def percentile( other_axes = set( [ axis - for axis in self.domain_axes(_dict=True) + for axis in self.domain_axes(todict=True) if axis not in axes or self.domain_axis(axis).size == 1 ] ) @@ -18053,10 +18055,10 @@ def percentile( # ------------------------------------------------------------ # Copy coordinate reference constructs to the output field # ------------------------------------------------------------ - out_coordinates = out.coordinates(_dict=True) - out_domain_ancillaries = out.domain_ancillaries(_dict=True) + out_coordinates = out.coordinates(todict=True) + out_domain_ancillaries = out.domain_ancillaries(todict=True) - for cr_key, ref in self.coordinate_references(_dict=True).items(): + for cr_key, ref in self.coordinate_references(todict=True).items(): ref = ref.copy() for c_key in ref.coordinates(): @@ -18495,7 +18497,7 @@ def flatten(self, axes=None, return_axis=False, inplace=False): f.set_data_axes(new_data_axes) # Modify or remove cell methods that span the flatten axes - for key, cm in tuple(f.cell_methods(_dict=True).items()): + for key, cm in tuple(f.cell_methods(todict=True).items()): cm_axes = set(cm.get_axes(())) if not cm_axes or cm_axes.isdisjoint(axes): continue @@ -18631,7 +18633,7 @@ def roll(self, axis, shift, inplace=False, i=False, **kwargs): ) f = _inplace_enabled_define_and_cleanup(self) - domain_axes = self.domain_axes(_dict=True) + domain_axes = self.domain_axes(todict=True) if domain_axes[axis].get_size() <= 1: if inplace: f = None @@ -18659,7 +18661,7 @@ def roll(self, axis, shift, inplace=False, i=False, **kwargs): super(Field, f).roll(iaxis, shift, inplace=True) - for key, construct in f.constructs.filter_by_data(_dict=True).items(): + for key, construct in f.constructs.filter_by_data(todict=True).items(): axes = f.get_data_axes(key, default=()) if axis in axes: construct.roll(axes.index(axis), shift, inplace=True) @@ -20860,7 +20862,7 @@ def coord(self, identity, default=ValueError(), key=False, **kwargs): "Use methods of the 'coordinates' attribute instead.", ) # pragma: no cover - if identity in self.domain_axes(_dict=True): + if identity in self.domain_axes(todict=True): # Allow an identity to be the domain axis construct key # spanned by a dimension coordinate construct return self.dimension_coordinate( From 8dad12034b6024204fe869700a6d0187a082d8ed Mon Sep 17 00:00:00 2001 From: David Hassell Date: Fri, 26 Mar 2021 23:43:50 +0000 Subject: [PATCH 08/53] devs --- cf/auxiliarycoordinate.py | 3 - cf/cfimplementation.py | 2 - cf/constants.py | 1 - cf/constructs.py | 24 +++-- cf/coordinateconversion.py | 3 - cf/coordinatereference.py | 1 - cf/dimensioncoordinate.py | 9 -- cf/domainancillary.py | 3 - cf/domainaxis.py | 3 - cf/field.py | 79 +++++++++------ cf/fieldancillary.py | 3 - cf/fieldlist.py | 12 --- cf/flags.py | 8 -- cf/formula_terms.py | 7 -- cf/functions.py | 27 +---- cf/index.py | 7 +- cf/interiorring.py | 7 +- cf/list.py | 3 - cf/maths.py | 1 - cf/mixin_container.py | 11 +- cf/nodecountproperties.py | 3 - cf/partnodecountproperties.py | 3 - cf/query.py | 182 ++++++++++++++++++---------------- cf/regrid.py | 35 +++---- cf/subspacefield.py | 3 - cf/timeduration.py | 12 +-- cf/units.py | 3 - 27 files changed, 180 insertions(+), 275 deletions(-) diff --git a/cf/auxiliarycoordinate.py b/cf/auxiliarycoordinate.py index 94490993f2..d04efd724d 100644 --- a/cf/auxiliarycoordinate.py +++ b/cf/auxiliarycoordinate.py @@ -57,6 +57,3 @@ def __repr__(self): """ return super().__repr__().replace("<", ">> d = c.filter_by_identity('ncvar%time') """ + if cache is not None: + return cache + # Allow keys without the 'key%' prefix for n, identity in enumerate(identities): if identity in self: @@ -251,5 +259,5 @@ def filter_by_identity(self, *identities, view=False, **kwargs): ctype = [i for i in "XTYZ" if i in identities] return super().filter_by_identity( - *identities, view=view, ctype=ctype, **kwargs + *identities, view=view, todict=todict, ctype=ctype, **identities_kwargs ) diff --git a/cf/coordinateconversion.py b/cf/coordinateconversion.py index 4849bd687e..f2a4650138 100644 --- a/cf/coordinateconversion.py +++ b/cf/coordinateconversion.py @@ -27,6 +27,3 @@ def __repr__(self): """ return super().__repr__().replace("<", " 1: + return self._default( + default, + "TODO 2" + ) + key, construct = c.popitem() if key: - return c.key(default=default) - - return c.value(default=default) + return key + + return construct + +# c = domain_ancillaries +# +# if identity is not None: +# c = c(identity, view=True) +# if not c: +# da_key = self.domain_axis(identity, key=True, default=None) +# if da_key is not None: +# c = domain_ancillaries.filter_by_axis( +# da_key, mode="exact", view=True +# ) +# +# if key: +# return c.key(default=default) +# +# return c.value(default=default) def cell_measure(self, identity=None, default=ValueError(), key=False): """Select a cell measure construct by its identity. diff --git a/cf/fieldancillary.py b/cf/fieldancillary.py index 5ee093f044..c9241863d8 100644 --- a/cf/fieldancillary.py +++ b/cf/fieldancillary.py @@ -38,6 +38,3 @@ def __repr__(self): """ return super().__repr__().replace("<", " 3: break - # --- End: for free_bytes = free_KiB * 1024 @@ -195,9 +194,6 @@ def _free_memory(): return float(virtual_memory().available) -# --- End: if - - def configuration( atol=None, rtol=None, @@ -1328,7 +1324,6 @@ def set_performance(chunksize=None, free_memory_factor=None): except ValueError: _cf_free_memory_factor(old[1]) raise - # --- End: if return old @@ -1579,9 +1574,6 @@ def open_files_threshold_exceeded(): ) -# --- End: if - - def close_files(file_format=None): """Close open files containing sub-arrays of data arrays. @@ -1626,9 +1618,8 @@ def close_files(file_format=None): fh.close() _file_to_fh[file_format].clear() - # --- End: if - + def close_one_file(file_format=None): """Close an arbitrary open file containing a sub-array of a data array. @@ -1922,7 +1913,6 @@ def parse_indices( if isinstance(arg0, str) and arg0 == "mask": mask_indices = indices[1] indices = indices[2:] - # --- End: if # Initialize the list of parsed indices as the input indices with any # Ellipsis objects expanded @@ -1967,8 +1957,6 @@ def parse_indices( "Scalar array can only be indexed with () or Ellipsis" ) - # --- End: if - for i, (index, size) in enumerate(zip(parsed_indices, shape)): is_slice = False if isinstance(index, slice): @@ -2011,7 +1999,6 @@ def parse_indices( # 3:6:-1 => 3:-4:-1 # 3:9:-1 => 3:-1:-1 stop -= size - # --- End: if if step > 0 and -size <= start < 0 and 0 <= stop <= size + start: # [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] @@ -2171,8 +2158,6 @@ def parse_indices( parsed_indices, shape ) ) - # --- End: if - # --- End: if if is_slice: if reverse and index.step < 0: @@ -2202,7 +2187,6 @@ def parse_indices( index = slice(start, stop, step) flip.append(i) - # --- End: if # If step is greater than one then make sure that # index.stop isn't bigger than it needs to be @@ -2211,7 +2195,6 @@ def parse_indices( div, mod = divmod(stop - start - 1, step) stop = start + div * step + 1 index = slice(start, stop, step) - # --- End: if # if envelope: @@ -2221,10 +2204,8 @@ def parse_indices( index = slice( start, stop, (1 if reverse else _numpy_sign(step)) ) - # --- End: if parsed_indices[i] = index - # --- End: for if not (cyclic or reverse or envelope or mask): return parsed_indices @@ -2571,7 +2552,6 @@ def load_stash2standard_name(table=None, delimiter="!", merge=True): stash2sn[key] += line else: stash2sn[key] = line - # --- End: for if not merge: _stash2standard_name.clear() @@ -2670,7 +2650,6 @@ def flat(x): yield sub else: yield a - # --- End: for def abspath(filename): @@ -2902,7 +2881,6 @@ def hash_array(array): array = array.filled() else: array = array.data - # --- End: if if not array.flags.c_contiguous: # array = array.copy() @@ -3171,8 +3149,6 @@ def loop_over_index(x, current_index, axis_indices, indices): indices[current_index] = slice(i, i + steps[current_index]) loop_over_index(x, current_index - 1, axis_indices, indices) - # --- End: def - # Retrieve the index of each axis defining the sections if data: if isinstance(axes, int): @@ -3190,7 +3166,6 @@ def loop_over_index(x, current_index, axis_indices, indices): axis_indices.append(x.get_data_axes().index(key)) except ValueError: pass - # --- End: if # find the size of each dimension sizes = x.shape diff --git a/cf/index.py b/cf/index.py index f88b5332f8..d558d0b0c4 100644 --- a/cf/index.py +++ b/cf/index.py @@ -17,9 +17,7 @@ class Index(mixin.PropertiesData, cfdm.Index): **NetCDF interface** - The netCDF variable name of the index variable may be accessed - with the `nc_set_variable`, `nc_get_variable`, `nc_del_variable` - and `nc_has_variable` methods. + {{netCDF variable}} The name of the netCDF dimension spanned by the index variable's data (which does not correspond to a domain axis construct) may be @@ -53,6 +51,3 @@ def __repr__(self): """ return super().__repr__().replace("<", " 12: m0 = 1 - # --- End: for return out diff --git a/cf/regrid.py b/cf/regrid.py index e8b617c8e9..fcf6482f58 100644 --- a/cf/regrid.py +++ b/cf/regrid.py @@ -29,19 +29,19 @@ def __init__( method="conservative_1st", ignore_degenerate=False, ): - """Creates a handle for regridding fields from a source grid to - a destination grid that can then be used by the run_regridding + """Creates a handle for regridding fields from a source grid to a + destination grid that can then be used by the run_regridding method. :Parameters: srcfield: ESMF.Field - The source field with an associated grid to be used for - regridding. + The source field with an associated grid to be used + for regridding. dstfield: ESMF.Field - The destination field with an associated grid to be used - for regridding. + The destination field with an associated grid to be + used for regridding. srcfracfield: ESMF.Field A field to hold the fraction of the source field that @@ -58,11 +58,11 @@ def __init__( used. If it is set to 'conservative_2nd' second order conservative regridding is used. If it is set to 'linear' then (multi)linear interpolation is used. If - it is set to 'patch' then higher-order patch recovery is - used. If it is set to 'nearest_stod' then nearest source - to destination interpolation is used. If it is set to - 'nearest_dtos' then nearest destination to source - interpolation is used. + it is set to 'patch' then higher-order patch recovery + is used. If it is set to 'nearest_stod' then nearest + source to destination interpolation is used. If it is + set to 'nearest_dtos' then nearest destination to + source interpolation is used. ignore_degenerate: `bool`, optional Whether to check for degenerate points. @@ -196,7 +196,6 @@ def create_grid( "The longitude and latitude coordinates" " must have the same shape." ) - # --- End: if if use_bounds: if not coords_2D: @@ -246,7 +245,6 @@ def create_grid( ) == Data(360, "degrees") except ValueError: pass - # --- End: if # Create empty grid max_index = numpy_array(shape, dtype="int32") @@ -302,7 +300,6 @@ def create_grid( y_bounds = y_bounds[:-1, :] gridCorner[x][...] = x_bounds gridCorner[y][...] = y_bounds - # --- End: if else: # Test the dimensionality of the list of coordinates ndim = len(coords) @@ -357,7 +354,7 @@ def create_grid( staggerLocs = [ESMF.StaggerLoc.CENTER] else: staggerLocs = [ESMF.StaggerLoc.CENTER_VCENTER] - # --- End: if + grid = ESMF.Grid( max_index, coord_sys=ESMF.CoordSys.CART, staggerloc=staggerLocs ) @@ -375,7 +372,6 @@ def create_grid( gridCentre[...] = coords[d].array.reshape( [shape[d] if x == d else 1 for x in range(0, ndim)] ) - # --- End: for # Populate grid corners if use_bounds: @@ -401,8 +397,6 @@ def create_grid( gridCorner[d][...] = boundsD.reshape( [shape[d] + 1 if x == d else 1 for x in range(0, ndim)] ) - # --- End: if - # --- End: if # Add the mask if appropriate if mask is not None: @@ -521,13 +515,11 @@ def reconstruct_sectioned_data(sections): ) new_key = k[:i] data_list = [sections[k]] - # --- End: for new_sections[new_key] = Regrid.concatenate_data( data_list, i ) sections = new_sections - # --- End: for @staticmethod def compute_mass_grid( @@ -580,6 +572,3 @@ def compute_mass_grid( mass = numpy_sum(areafield.data[ind] * valuefield.data[ind]) return mass - - -# --- End: class diff --git a/cf/subspacefield.py b/cf/subspacefield.py index c2bb936024..265484af99 100644 --- a/cf/subspacefield.py +++ b/cf/subspacefield.py @@ -291,6 +291,3 @@ def __call__(self, *args, **kwargs): return True return field[indices] - - -# --- End: class diff --git a/cf/timeduration.py b/cf/timeduration.py index 0479932ad9..636cc422fe 100644 --- a/cf/timeduration.py +++ b/cf/timeduration.py @@ -338,7 +338,6 @@ def __init__( units = self.duration.Units if not units.istime: raise ValueError("Bad units: {!r}".format(units)) - # --- End: if if not (units.iscalendartime or units.istime): raise ValueError( @@ -368,7 +367,7 @@ def __init__( # offset[4] = None # if units <= _seconds and duration < _one_minute: # offset[5] = None - # --- End: if + self.offset = Offset(*offset) # TODO should offset be None for all "higher" units @@ -845,8 +844,6 @@ def _dHMS(duration, other, calendar, op): d = op(Data(0.0, units), duration) return d.datetime_array.item(()) - # --- End: def - duration = self.duration units = duration.Units @@ -886,7 +883,6 @@ def _dHMS(duration, other, calendar, op): max_days = self.days_in_month(y, m, calendar) if d > max_days: d = max_days - # --- End: if # TODO When cftime==1.1.4 is ready use this one line: # return other.replace(year=y, month=m, day=d) @@ -914,7 +910,6 @@ def _data_arithmetic(self, other, method, inplace=False): out.append(None) else: out.append(getattr(self, method)(d)) - # --- End: for dt[...] = numpy.reshape(out, dt.shape) @@ -1439,8 +1434,6 @@ def _dHMS(duration, dt, end): else: return dt1, dt # dt1, dt.copy() - # --- End: def - calendar = getattr(dt, "calendar", _default_calendar) if calendar == "": calendar = _default_calendar @@ -1661,9 +1654,6 @@ def is_day_factor(self): return False -# --- End: class - - def Y(duration=1, month=1, day=1, hour=0, minute=0, second=0): """Return a time duration of calendar years in a `cf.TimeDuration` object. diff --git a/cf/units.py b/cf/units.py index 0207c43803..0587add35f 100644 --- a/cf/units.py +++ b/cf/units.py @@ -28,6 +28,3 @@ def __new__(cls, *args, **kwargs): @staticmethod def conform(*args, **kwargs): return cfUnits.conform(*args, **kwargs) - - -# --- End: class From 44581b0b7a0ac74a0a2b2269285b4d0f36ba4cef Mon Sep 17 00:00:00 2001 From: David Hassell Date: Sat, 27 Mar 2021 18:14:49 +0000 Subject: [PATCH 09/53] devs --- cf/constructs.py | 117 ++++++++++++++++++++++++++++++++++++++++++++++- cf/field.py | 45 +++++++++++------- 2 files changed, 145 insertions(+), 17 deletions(-) diff --git a/cf/constructs.py b/cf/constructs.py index dd46b2ed14..57c9f5de3e 100644 --- a/cf/constructs.py +++ b/cf/constructs.py @@ -155,7 +155,8 @@ def _matching_values(cls, value0, construct, value1, basic=False): # # return self.constructs[da_key] - def filter_by_identity(self, *identities, view=False, todict=False, cache=None, **identities_kwargs): + def filter_by_identity(self, *identities, view=False, + todict=False, cache=None, **identities_kwargs): """Select metadata constructs by identity. .. versionadded:: 3.0.0 @@ -261,3 +262,117 @@ def filter_by_identity(self, *identities, view=False, todict=False, cache=None, return super().filter_by_identity( *identities, view=view, todict=todict, ctype=ctype, **identities_kwargs ) + + @classmethod + def _filter_by_identity(cls, self, *identities, todict=False, + cache=None, _identities_config={}): + """Select metadata constructs by identity. + + .. versionadded:: 3.0.0 + + .. seealso:: `filter_by_axis`, `filter_by_data`, `filter_by_key`, + `filter_by_measure`, `filter_by_method`, + `filter_by_naxes`, `filter_by_ncdim`, + `filter_by_ncvar`, `filter_by_property`, + `filter_by_size`, `filter_by_type`, + `filters_applied`, `inverse_filter`, `unfilter` + + :Parameters: + + identities: optional + Select constructs that have any of the given identities or + construct keys. + + An identity is specified by a string (e.g. ``'latitude'``, + ``'long_name=time'``, etc.); or a compiled regular + expression (e.g. ``re.compile('^atmosphere')``), for which + all constructs whose identities match (via `re.search`) + are selected. + + If no identities are provided then all constructs are selected. + + Each construct has a number of identities, and is selected + if any of them match any of those provided. A construct's + identities are those returned by its `!identities` + method. In the following example, the construct ``x`` has + five identities: + + >>> x.identities() + ['time', 'long_name=Time', 'foo=bar', 'T', 'ncvar%t'] + + A construct key may optionally have the ``'key%'`` + prefix. For example ``'dimensioncoordinate2'`` and + ``'key%dimensioncoordinate2'`` are both acceptable keys. + + Note that the identifiers of a metadata construct in the + output of a `print` or `!dump` call are always one of its + identities, and so may always be used as an *identities* + argument. + + Domain axis constructs may also be identified by their + position in the field construct's data array. Positions + are specified by either integers. + + .. note:: This is an extension to the functionality of + `cfdm.Constucts.filter_by_identity`. + + {{view: `bool`, optional}} + + {{todict: `bool`, optional}} + + {{cache: optional}} + + identities_kwargs: optional + Additional parameters for configuring each construct's + `identities` method. By default ``generator=True`` is + passed by default, and ``ctype`` is inferred from the + *identities* parameter. + + .. versionadded:: 3.9.0 + + :Returns: + + `Constructs` + The selected constructs and their construct keys. + + **Examples:** + + Select constructs that have a "standard_name" property of + 'latitude': + + >>> d = c.filter_by_identity('latitude') + + Select constructs that have a "long_name" property of 'Height': + + >>> d = c.filter_by_identity('long_name=Height') + + Select constructs that have a "standard_name" property of + 'latitude' or a "foo" property of 'bar': + + >>> d = c.filter_by_identity('latitude', 'foo=bar') + + Select constructs that have a netCDF variable name of 'time': + + >>> d = c.filter_by_identity('ncvar%time') + + """ + if cache is not None: + return cache + + # Allow keys without the 'key%' prefix + for n, identity in enumerate(identities): + if identity in self: + identities = list(identities) + identities[n] = "key%" + identity + break + + config = {"ctype": [i for i in "XTYZ" if i in identities]} + config.update(_identities_config) + + return super(Constructs, cls)._filter_by_identity( + self, + *identities, + todict=todict, + cache=cache, + _identities_config=config + ) diff --git a/cf/field.py b/cf/field.py index afe82c8be3..7a080ea16b 100644 --- a/cf/field.py +++ b/cf/field.py @@ -493,7 +493,7 @@ def __getitem__(self, indices): construct_data_axes = new.constructs.data_axes() for key, construct in new.constructs.filter_by_axis( - *data_axes, mode="or", view=True + *data_axes, mode="or", todict=True ).items(): construct_axes = construct_data_axes[key] dice = [] @@ -726,18 +726,24 @@ def analyse_items(self, relaxed_identities=None): if relaxed_identities is None: relaxed_identities = cf_relaxed_identities() - dimension_coordinates = self.dimension_coordinates(view=True) - auxiliary_coordinates = self.auxiliary_coordinates(view=True) +# dimension_coordinates = self.dimension_coordinates(view=True) +# auxiliary_coordinates = self.auxiliary_coordinates(view=True) for axis in self.domain_axes(todict=True): - dims = dimension_coordinates.filter_by_axis( - axis, mode="and", view=True - ) +# dims = self.constructs.chain( +# "filter_by_type", +# ("dimension_coordinate",), "filter_by_axis", (axis,) +# mode="and", todict=True +# ) + dims = self.dimension_coordinates(axes=(axis,), + mode="and", todict=True) + if len(dims) == 1: # This axis of the domain has a dimension coordinate - key = dims.key() - dim = dims.value() + key, dim = dims.popitem() +# key = dims.key() +# dim = dims.value() identity = dim.identity(strict=True, default=None) if identity is None: @@ -754,7 +760,7 @@ def analyse_items(self, relaxed_identities=None): if identity: if identity in id_to_axis: warnings.append( - "Field has multiple {!r} axes".format(identity) + "Field has multiple {identity!r} axes" ) axis_to_id[axis] = identity @@ -766,14 +772,19 @@ def analyse_items(self, relaxed_identities=None): continue else: - auxs = auxiliary_coordinates.filter_by_axis( - axis, mode="exact", view=True + auxs = self.constructs.chain( + "filter_by_type", + ("auxiliary_coordinate",), "filter_by_axis", (axis,), + mode="and", todict=True ) +# auxs = self.auxiliary_coordinates.filter_by_axis( +# axis, mode="exact", todict=True +# ) if len(auxs) == 1: # This axis of the domain does not have a # dimension coordinate but it does have exactly # one 1-d auxiliary coordinate, so that will do. - key, aux = dict(auxs).popitem() + key, aux = auxs.popitem() identity = aux.identity(strict=True, default=None) @@ -783,7 +794,7 @@ def analyse_items(self, relaxed_identities=None): if identity and aux.has_data(): if identity in id_to_axis: warnings.append( - "Field has multiple {!r} axes".format(identity) + f"Field has multiple {identity!r} axes" ) axis_to_id[axis] = identity @@ -13348,11 +13359,12 @@ def set_data( if not domain_axes: raise ValueError("Can't set data: No domain axes exist") - domain_axes = f.domain_axes(view=True) +# domain_axes = f.domain_axes(view=True) axes = [] for n in data_shape: - da = domain_axes.filter_by_size(n, view=True) +# da = domain_axes.filter_by_size(n, todict=True) + da = f.domain_axes(filter_by_size=(n,), todict=True) if len(da) != 1: raise ValueError( "Can't insert data: Ambiguous data shape: " @@ -13360,7 +13372,8 @@ def set_data( "Consider setting the axes parameter." ) - axes.append(da.key()) + da_key, _ = da.popitem() + axes.append(da_key) else: # -------------------------------------------------------- From 598045c480e2a1da052e8785e4b55622c4820518 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Mon, 29 Mar 2021 17:59:33 +0100 Subject: [PATCH 10/53] devs --- cf/field.py | 536 +++++++++++++++++++++++++++++++--------------------- 1 file changed, 322 insertions(+), 214 deletions(-) diff --git a/cf/field.py b/cf/field.py index 7a080ea16b..5ab459e3f0 100644 --- a/cf/field.py +++ b/cf/field.py @@ -736,8 +736,8 @@ def analyse_items(self, relaxed_identities=None): # ("dimension_coordinate",), "filter_by_axis", (axis,) # mode="and", todict=True # ) - dims = self.dimension_coordinates(axes=(axis,), - mode="and", todict=True) + dims = self.dimension_coordinates(filter_by_axes=(axis,), + axis_mode="and", todict=True) if len(dims) == 1: # This axis of the domain has a dimension coordinate @@ -1512,16 +1512,17 @@ def _binary_operation_old(self, other, method): refs1 = dict(field1.coordinate_references) field1_dimension_coordinates = field1.dimension_coordinates(todict=True) - field1_auxiliary_coordinates = field1.auxiliary_coordinates(todict=True) +# field1_auxiliary_coordinates = field1.auxiliary_coordinates(todict=True) field1_coordinate_references = field1.coordinate_references(todict=True) field1_domain_ancillaries = field1_domain_ancillaries(todict=True) field1_domain_axes = field1.domain_axes(todict=True) # field0_auxiliary_coordinates = field0.auxiliary_coordinates(view=True) - field0_domain_ancillaries = field0_domain_ancillaries(todict=True) - c = field0.constructs.filter_by_type( - "auxiliary_coordinate", "domain_ancillary", todict=True - ) +# field0_domain_ancillaries = field0_domain_ancillaries(todict=True) + +# c = field0.constructs.filter_by_type( +# "auxiliary_coordinate", "domain_ancillary", +# ) for axis0 in s["size1_broadcast_axes"] + s["new_size1_axes"]: axis1 = axis0_to_axis1[axis0] @@ -1533,8 +1534,11 @@ def _binary_operation_old(self, other, method): if axis1 in field1_dimension_coordinates: insert_dim[axis1] = [axis0] - for key1 in field1_auxiliary_coordinates.filter_by_axis( - axis1, mode="exact", view=True +# for key1 in field1_auxiliary_coordinates.filter_by_axis( +# axis1, mode="exact", view=True +# ): + for key1 in field1.auxiliary_coordinates(filter_by_axis=( + axis1,), axis_mode="exact", todict=True ): insert_aux[key1] = [axis0] @@ -1548,7 +1552,13 @@ def _binary_operation_old(self, other, method): # Remove all field0 auxiliary coordinates and domain # ancillaries which span this axis - remove_items.update(c.filter_by_axis(axis0, mode="and", view=True)) +# remove_items.update(c.filter_by_axis("and", axis0, todict=True)) + remove_items.update(field0.constructs.filter( + filter_by_type=("auxiliary_coordinate", "domain_ancillary"), + filter_by_axis=(axis0,), + todict=True, + ) + ) # Remove all field0 coordinate references which span this # axis, and their domain ancillaries (even if those domain @@ -1558,7 +1568,7 @@ def _binary_operation_old(self, other, method): ref0 = refs0.pop(key0) remove_items.add(key0) remove_items.update( - field0_domain_ancillaries( + field0.domain_ancillaries( *tuple( ref0.coordinate_conversion.domain_ancillaries().values() ), @@ -1581,7 +1591,9 @@ def _binary_operation_old(self, other, method): # spanning the same axes which has the same identity and a # size-1 data array. # ------------------------------------------------------------- - auxs1 = dict(field1_auxiliary_coordinates.items()) + field1_auxiliary_coordinates = field1.auxiliary_coordinates(todict=True) + auxs1 = field1_auxiliary_coordinates.copy() +# auxs1 = dict(field1_auxiliary_coordinates.items()) logger.debug( "5: remove_items = {}".format(remove_items) ) # pragma: no cover @@ -1609,7 +1621,7 @@ def _binary_operation_old(self, other, method): continue found_equivalent_auxiliary_coordinates = False - for key1, aux1 in auxs1.copy().items(): + for key1, aux1 in tuple(auxs1.items()): if key1 in v["id_to_aux"].values(): # Field1 auxiliary coordinate has already been checked del auxs1[key1] @@ -1649,11 +1661,12 @@ def _binary_operation_old(self, other, method): # Copy field1 auxiliary coordinates which do not span any # matching axes to field0 # ------------------------------------------------------------ + filed1_data_axes = field1.constructs.data_axes() for key1 in field1_auxiliary_coordinates: if key1 in insert_aux: continue - axes1 = field1.constructs.data_axes()[key1] + axes1 = field1_data_axes[key1] if set(axes1).isdisjoint(matching_axis1_to_axis0): insert_aux[key1] = [axis1_to_axis0[axis1] for axis1 in axes1] @@ -1904,16 +1917,16 @@ def _binary_operation(self, other, method): for i, (f, out) in enumerate(zip((field0, field1), (out0, out1))): data_axes = f.get_data_axes() - f_dimension_coordinates = f.dimension_coordinates(view=True) - f_auxiliary_coordinates = f.auxiliary_coordinates(view=True) +# f_dimension_coordinates = f.dimension_coordinates(todict=True) +# f_auxiliary_coordinates = f.auxiliary_coordinates(todict=True) for axis in f.domain_axes(todict=True): identity = None key = None coord = None coord_type = None - coords = f_dimension_coordinates.filter_by_axis( - axis, mode="exact", view=True + coords = f.dimension_coordinates(filter_by_axis=( + axis,), axis_mode="exact", todict=True ) if len(coords) == 1: # This axis of the domain has a dimension coordinate @@ -1932,8 +1945,8 @@ def _binary_operation(self, other, method): if identity is None and relaxed_identities: identity = coord.identity(relaxed=True, default=None) else: - coords = f_auxiliary_coordinates.filter_by_axis( - axis, mode="exact", view=True + coords = f.auxiliary_coordinates(filter_by_axis=( + axis,), axis_mode="exact", todict=True ) if len(coords) == 1: # This axis of the domain does not have a @@ -2142,11 +2155,9 @@ def _binary_operation(self, other, method): if identity in out0 and isinstance(identity, str): a = out0[identity] if y.size > 1 and a.size == 1: - for key0, c in tuple( - field0.constructs.filter_by_axis( - a.axis, mode="or", view=True - ).items() - ): + for key0, c in field0.constructs.filter_by_axis( + "or", a.axis, todict=True + ).items(): removed_refs0 = field0.del_coordinate_reference( construct=key0, default=None ) @@ -2204,15 +2215,23 @@ def _binary_operation(self, other, method): logger.info("\nnew_axes =", new_axes) if new_axes: - constructs = field1.constructs.filter_by_type( - "dimension_coordinate", - "auxiliary_coordinate", - "cell_measure", - view=True, - ) - constructs = constructs.filter_by_axis( - *new_axes, mode="subset", view=True + constructs = field1.constructs.filter( + filter_by_type=("dimension_coordinate", + "auxiliary_coordinate", + "cell_measure"), + filter_by_axis=new_axes, + axis_mode="subset", + todict=True ) +# constructs = field1.constructs.filter_by_type( +# "dimension_coordinate", +# "auxiliary_coordinate", +# "cell_measure", +# view=True, +# ) +# constructs = constructs.filter_by_axis( +# *new_axes, mode="subset", view=True +# ) for key, c in constructs.items(): c_axes = field1.get_data_axes(key) axes = [axis_map[axis1] for axis1 in c_axes] @@ -2451,8 +2470,10 @@ def _equivalent_coordinate_references( return False # Compare the domain ancillaries - domain_ancillaries = self.domain_ancillaries(view=True) - field1_domain_ancillaries = field1.domain_ancillaries(view=True) +# domain_ancillaries = self.domain_ancillaries(todict=True) +# field1_domain_ancillaries = field1.domain_ancillaries(todict=True) + +# TODO consider case of None key ? for ( term, @@ -2463,19 +2484,19 @@ def _equivalent_coordinate_references( identifier1 = ref1.coordinate_conversion.domain_ancillaries()[term] - key0 = domain_ancillaries.filter_by_key(identifier0).key() - key1 = field1_domain_ancillaries.filter_by_key(identifier1).key() +# key0 = domain_ancillaries.filter_by_key(identifier0).key() +# key1 = field1_domain_ancillaries.filter_by_key(identifier1).key() if not self._equivalent_construct_data( - field1, - key0=key0, - key1=key1, - rtol=rtol, - atol=atol, - s=s, - t=t, - verbose=verbose, - axis_map=axis_map, + field1, + key0=identifier0, #key0, + key1=identifier1, #key1, + rtol=rtol, + atol=atol, + s=s, + t=t, + verbose=verbose, + axis_map=axis_map, ): # add traceback TODO return False @@ -2518,7 +2539,7 @@ def _set_construct_parse_axes(self, item, axes=None, allow_scalar=True): f"{shape}. Consider setting the 'axes' parameter." ) - domain_axes = self.domain_axes(view=True) + domain_axes = self.domain_axes(todict=True) axes = [] axes_sizes = [ domain_axis.get_size(None) @@ -2532,9 +2553,13 @@ def _set_construct_parse_axes(self, item, axes=None, allow_scalar=True): ) if axes_sizes.count(n) == 1: - axes.append( - domain_axes.filter_by_size(n, view=True).key() + domain_axes = self.domain_axes( + filter_by_size=(n,), todict=True ) + key, _ = domain_axes.popitem() + axes.append(key) +# domain_axes.filter_by_size(n, view=True).key() +# ) else: raise ValueError( f"Can't insert {item!r}: Ambiguous shape: " @@ -2794,12 +2819,12 @@ def _conform_for_assignment(self, other, check_coordinates=False): # contain the defining coordinate. refs0 = [ key - for key, ref in self.coordinate_references(view=True).items() + for key, ref in self.coordinate_references(todict=True).items() if key0 in ref.coordinates() ] refs1 = [ key - for key, ref in other.coordinate_references(view=True).items() + for key, ref in other.coordinate_references(todict=True).items() if key1 in ref.coordinates() ] @@ -3040,7 +3065,7 @@ def _regrid_get_latlong(self, name, axes=None): if axes is None: # Retrieve the field construct's X and Y dimension coordinates - xdims = self.dimension_coordinates(view=True)("X") + xdims = self.dimension_coordinates("X", todict=True) len_x = len(xdims) if not len_x: raise ValueError( @@ -3058,7 +3083,7 @@ def _regrid_get_latlong(self, name, axes=None): "coordinates" ) - ydims = self.dimension_coordinates(view=True)("Y") + ydims = self.dimension_coordinates("Y", todict=True) len_y = len(ydims) if not len_y: @@ -3076,11 +3101,8 @@ def _regrid_get_latlong(self, name, axes=None): f"{name} field has multiple 'Y' dimension coordinates" ) - # TODO review for view/todict. Is x_axis same as x_key? - x = xdims.value() - y = ydims.value() - x_key = xdims.key() - y_key = ydims.key() + x_key, x = xdims.popitem() + y_key, y = xdims.popitem() x_axis = self.domain_axis(x_key, key=True) y_axis = self.domain_axis(y_key, key=True) @@ -3103,14 +3125,11 @@ def _regrid_get_latlong(self, name, axes=None): if axes["X"] == axes["Y"]: raise ValueError("TODO") - auxiliary_coordinates = self.auxiliary_coordinates( - view=True, cache=auxiliary_coordinates + x = self.auxiliary_coordinates( + "X", filter_by_naxes=(2,) todict=True ) - x = auxiliary_coordinates("X", view=True).filter_by_naxes( - 2, view=True - ) - y = auxiliary_coordinates("Y", view=True).filter_by_naxes( - 2, view=True + y = self.auxiliary_coordinates( + "Y", filter_by_naxes=(2,) todict=True ) if len(x) != 1: raise ValueError("TODO") @@ -3176,12 +3195,12 @@ def _regrid_get_latlong(self, name, axes=None): lon_found = False lat_found = False - auxiliary_coordinates = self.auxiliary_coordinates( - view=True, cache=auxiliary_coordinates - ) + # auxiliary_coordinates = self.auxiliary_coordinates( +# view=True, cache=auxiliary_coordinates +# ) - for key, aux in auxiliary_coordinates.filter_by_naxes( - 2, view=True + for key, aux in self.auxiliary_coordinates( + filter_by_naxes=(2,), todict=True ).items(): if aux.Units.islongitude: if lon_found: @@ -3515,15 +3534,27 @@ def _regrid_get_reordered_sections( # possibibly reduce the number of trasnistions between different masks # - each change is slow. - dimensions_coordinates = self.dimension_coordinates(view=True) +# dimensions_coordinates = self.dimension_coordinates(view=True) axis_indices = [] if axis_order is not None: for axis in axis_order: # axis_key = self.dim(axis, key=True) - axis_key = dimension_coordinates.filter_by_axis( - axis, mode="exact", view=True - ).key(None) +# dims = self.dimension_coordinates( +# filter_by_axis=(axis,), axis_mode="exact", todict=True +# ) +# if len(dims)!= 1: +# axis_key = None +# else: +# axis_key, _ = dims.popitem() + + axis_key = self.dimension_coordinate( + filter_by_axis=(axis,), + axis_mode="exact", + default=None, + key=True, + todict=True + ) if axis_key is not None: if axis_key in regrid_axes: raise ValueError("Cannot loop over regridding axes.") @@ -3787,17 +3818,21 @@ def _regrid_update_coordinate_references( self.del_construct(key) continue + domain_ancillaries = self.domain_ancillaries(todict=True) + for ( term, value, ) in ref.coordinate_conversion.domain_ancillaries().items(): - # TODo review for view/todict - domain_ancillaries = self.domain_ancillaries( - view=True, cache=domain_ancillaries - ) +# domain_ancillaries = self.domain_ancillaries( +# view=True, cache=domain_ancillaries +# ) +# +# key = domain_ancillaries(value, view=True).key(default=None) - key = domain_ancillaries(value, view=True).key(default=None) - if key is None: + if value in domain_ancillaries: + key = value + else: continue # If this domain ancillary spans both X and Y axes @@ -3808,9 +3843,15 @@ def _regrid_update_coordinate_references( # if domain_ancillaries.filter_by_key(key).filter_by_axis( # x, y, mode="exact", view=True # ): - if domain_ancillaries.filter_by_axis( - x, y, mode="exact", view=True - ).get(key): +# if len( +# self.domain_ancillaries( +# filter_by_axis=(x, y), axis_mode="exact", +# todict=True +# ) +# ) == 1: + if self.domain_ancillary(filter_by_axis=(x, y), + axis_mode="exact", key=True, + todict=True, default=False): # Convert the domain ancillary into an independent # field value = self.convert(key) @@ -3956,10 +3997,8 @@ def _regrid_update_coordinates( # Remove the source coordinates of new field # self.remove_items(axes=src_axis_keys) # for key in self.constructs.filter_by_axis('or', *src_axis_keys): - for key in tuple( - self.coordinates(view=True) - .filter_by_axis(*src_axis_keys, mode="or", view=True) - .keys() + for key in self.coordinates( + filter_by_axis=src_axis_keys, axis_mode="or", todict=True ): self.del_construct(key) @@ -3985,12 +4024,12 @@ def _regrid_update_coordinates( domain_axes[k_s].set_size(d.size) self.set_construct(d, axes=[k_s]) - dst_auxiliary_coordinates = dst.auxiliary_coordinates( - view=True, cache=dst_auxiliary_coordinates - ) +# dst_auxiliary_coordinates = dst.auxiliary_coordinates( +# view=True, cache=dst_auxiliary_coordinates +# ) - for aux_key, aux in dst_auxiliary_coordinates.filter_by_axis( - *dst_axis_keys, mode="subset", view=True + for aux_key, aux in dst.auxiliary_coordinates( + filter_by_axis=dst_axis_keys, axis_mode="subset", todict=True ).items(): aux_axes = [ axis_map[k_d] for k_d in dst.get_data_axes(aux_key) @@ -4016,9 +4055,9 @@ def _regrid_update_coordinates( for coord, axis_key in zip(dst_coords, src_axis_keys): self.set_construct(coord, axes=[axis_key]) else: - dst_auxiliary_coordinates = dst.auxiliary_coordinates( - view=True, cache=dst_auxiliary_coordinates - ) +# dst_auxiliary_coordinates = dst.auxiliary_coordinates( +# view=True, cache=dst_auxiliary_coordinates +# )# for src_axis_key, dst_axis_key in zip( src_axis_keys, dst_axis_keys @@ -4036,13 +4075,15 @@ def _regrid_update_coordinates( if dim_coord is not None: self.set_construct(dim_coord, axes=[src_axis_key]) - for aux in dst_auxiliary_coordinates.filter_by_axis( - dst_axis_key, mode="exact", view=True + for aux in dst.auxiliary_coordinates( + filter_by_axis=(dst_axis_key,), + axis_mode="exact", todict=True ).values(): self.set_construct(aux, axes=[src_axis_key]) - for aux_key, aux in dst_auxiliary_coordinates.filter_by_axis( - *dst_axis_keys, mode="exact", view=True + for aux_key, aux in dst.auxiliary_coordinates( + filter_by_axis=dst_axis_keys, + axis_mode="exact", todict=True ).items(): aux_axes = dst.get_data_axes(aux_key) if aux_axes == tuple(dst_axis_keys): @@ -4097,10 +4138,10 @@ def _weights_area_XY( `bool` or `None` """ - dimension_coordinates = self.dimension_coordinates(view=True) +# dimension_coordinates = self.dimension_coordinates(view=True) - xdims = dict(dimension_coordinates("X", view=True)) - ydims = dict(dimension_coordinates("Y", view=True)) + xdims = self.dimension_coordinates("X", todict=True) + ydims = self.dimension_coordinates("Y", todict=True) if not (xdims and ydims): if auto: @@ -4305,21 +4346,24 @@ def _weights_field(self, fields, comp, weights_axes, methods=False): """Creates a weights field.""" s = self.analyse_items() - domain_axes = self.domain_axes(view=True) +# domain_axes = self.domain_axes(todict=True) + domain_axes_size_1 = self.domain_axes(filger_by_size=(1,), todict=True) for w in fields: t = w.analyse_items() + domain_axes_size_1 = w.domain_axes(filter_by_size=(1,), todict=True) + if t["undefined_axes"]: - if set( - t.domain_axes.filter_by_size(gt(1), view=True) - ).intersection(t["undefined_axes"]): +# if set( +# t.domain_axes.filter_by_size(gt(1), view=True) +# ).intersection(t["undefined_axes"]): + if set(domain_axes_size_1).intersection(t["undefined_axes"]): raise ValueError("345jn456jn TODO") - # TODO BUG: "t.domain_axes" w = w.squeeze() - w_domain_axes = w.domain_axes(view=True) + w_domain_axes = w.domain_axes(todict=True) axis1_to_axis0 = {} @@ -5125,7 +5169,7 @@ def _weights_measure( `bool` """ - m = self.cell_measures(view=True).filter_by_measure(measure, view=True) + m = self.cell_measures(filter_by_measure=(measure,), todict=True) len_m = len(m) if not len_m: @@ -5237,9 +5281,8 @@ def _weights_yyy( y_axis = None z_axis = None - auxiliary_coordinates = self.auxiliary_coordinates(view=True) - auxiliary_coordinates = auxiliary_coordinates.filter_by_naxes( - 1, view=True + auxiliary_coordinates = self.auxiliary_coordinates( + filter_by_naxes=(1,), todict=True ) for key, aux in auxiliary_coordinates.items(): @@ -7092,10 +7135,8 @@ def weights( ): field.del_coordinate_reference(key) - for key in tuple( - field.constructs.filter_by_axis( - *not_needed_axes, mode="or", view=True - ).keys() + for key in field.constructs.filter_by_axis( + "or", *not_needed_axes, todict=True ): field.del_construct(key) @@ -8132,9 +8173,7 @@ def bin( # Create a cell method (if possible) # ------------------------------------------------------------ standard_names = [] - domain_axes = self.domain_axes(view=True).filter_by_size( - ge(2), view=True - ) + domain_axes = self.domain_axes(filter_by_size=(ge(2),), todict=True) for da_key in domain_axes: dim = self.dimension_coordinate(da_key, default=None) @@ -10386,8 +10425,8 @@ def collapse( all_axes = [] for axes in input_axes: if axes is None: - domain_axes = self.domain_axes(view=True, cache=domain_axes) - all_axes.append(list(domain_axes.keys())) + domain_axes = self.domain_axes(todict=True, cache=domain_axes) + all_axes.append(list(domain_axes)) continue axes2 = [] @@ -10427,7 +10466,7 @@ def collapse( # ------------------------------------------------------------ # # ------------------------------------------------------------ - domain_axes = f.domain_axes(view=True, cache=domain_axes) + domain_axes = f.domain_axes(todict=True, cache=domain_axes) auxiliary_coordinates = f.auxiliary_coordinates(view=True) dimension_coordinates = f.dimension_coordinates(view=True) @@ -10489,11 +10528,17 @@ def collapse( if _create_zero_size_cell_bounds: # Create null bounds if requested for axis in axes: - dc = dimension_coordinates.filter_by_axis( - axis, mode="and", view=True - ).value(None) - if dc is not None and not dc.has_bounds(): - dc.set_bounds(dc.create_bounds(cellsize=0)) +# dc = f.dimension_coordinates( +# filter_by_axis=(axis,), axis_mode="and", todict=Tru#e +# ).value(None) + dc = f.dimension_coordinates( + filter_by_axis=(axis,), axis_mode="and", + todict=True + ) + if len(dc) == 1: + _, dc = dc.popitem() + if not dc.has_bounds(): + dc.set_bounds(dc.create_bounds(cellsize=0)) continue @@ -15770,9 +15815,9 @@ def unsqueeze(self, inplace=False, i=False, axes=None, **kwargs): return f - def auxiliary_coordinate( - self, identity=None, default=ValueError(), key=False - ): + def auxiliary_coordinate( self, identity=None, + default=ValueError(), key=False, axis_mode=None, + property_mode=None, **filters ): """Return an auxiliary coordinate construct, or its key. .. versionadded:: 3.0.0 @@ -15876,37 +15921,15 @@ def auxiliary_coordinate( TODO """ - auxiliary_coordinates = self.auxiliary_coordinates(view=True) - - c = auxiliary_coordinates - - if identity is not None: - c = c(identity, view=True) - if not c: - da_key = self.domain_axis(identity, key=True, default=None) - if da_key is not None: - c = auxiliary_coordinates.filter_by_axis( - da_key, mode="exact", view=True - ) - - if key: - out = c.key(default=None) - if out is None: - return self._default( - default, f"No {identity!r} auxiliary coordinate construct" - ) - - return out - - out = c.value(default=None) - if out is None: - return self._default( - default, f"No {identity!r} auxiliary coordinate construct" - ) - - return out - - def construct(self, identity=None, default=ValueError(), key=False): + return self._construct(("auxiliary_coordinate",), + "auxiliary_coordinate", + identity=identity, key=key, + default=default, axis_mode=axis_mode, + property_mode=property_mode, **filters) + + def construct(self, identity=None, default=ValueError(), + key=False, axis_mode=None, + property_mode=None, **filters): """Select a metadata construct by its identity. .. seealso:: `del_construct`, `get_construct`, `has_construct`, @@ -16021,23 +16044,10 @@ def construct(self, identity=None, default=ValueError(), key=False): TypeError: No height coordinates """ - c = self.constructs - - if identity is not None: - c = c(identity, view=True) - - if key: - out = c.key(default=None) - if out is None: - return self._default(default, f"No {identity!r} construct") - - return out - - out = c.value(default=None) - if out is None: - return self._default(default, f"No {identity!r} construct") - - return out + return self._construct((), "construct", identity=identity, + key=key, default=default, + axis_mode=axis_mode, + property_mode=property_mode, **filters) def domain_ancillary(self, identity=None, default=ValueError(), key=False): """Return a domain ancillary construct, or its key. @@ -16756,7 +16766,7 @@ def field_ancillary(self, identity=None, default=ValueError(), key=False): return c.value(default=default) def dimension_coordinate( - self, identity=None, key=False, default=ValueError() + self, identity=None, key=False, default=ValueError(), axis_mode=None, property_mode=None, **filters ): """Return a dimension coordinate construct, or its key. @@ -16857,24 +16867,126 @@ def dimension_coordinate( TODO """ - dimension_coordinates = self.dimension_coordinates(view=True) - c = dimension_coordinates + return self._construct(("dimension_coordinate",), + "dimension_coordinate", + identity=identity, key=key, + default=default, axis_mode=axis_mode, + property_mode=property_mode, **filters) + + def _construct(self, _ctypes, _method, identity=None, key=False, + default=ValueError(), axis_mode=None, + property_mode=None, **filters): + """Return a domain axis construct, or its key. - if identity is not None: - c = c(identity, view=True) - if not c: - da_key = self.domain_axis(identity, key=True, default=None) - if da_key is not None: - c = dimension_coordinates.filter_by_axis( - da_key, mode="exact", view=True - ) + .. versionadded:: 3.0.0 - if key: - return c.key(default=default) + .. seealso:: `construct`, `auxiliary_coordinate`, `cell_measure`, + `cell_method`, `coordinate`, `coordinate_reference`, + `dimension_coordinate`, `domain_ancillary`, + `domain_axes`, `field_ancillary` - return c.value(default=default) + :Parameters: + + identity: + Select the domain axis construct by one of: + + * An identity or key of a 1-d coordinate construct that + whose data spans the domain axis construct. - def domain_axis(self, identity, key=False, default=ValueError()): + * A domain axis construct identity or key. + + * The position of the domain axis construct in the field + construct's data. + + A construct identity is specified by a string + (e.g. ``'latitude'``, ``'long_name=time'``, + ``'ncvar%lat'``, etc.); or a compiled regular expression + (e.g. ``re.compile('^atmosphere')``) that selects the + relevant constructs whose identities match via + `re.search`. + + Each construct has a number of identities, and is selected + if any of them match any of those provided. A construct's + identities are those returned by its `!identities` + method. In the following example, the construct ``x`` has + six identities: + + >>> x.identities() + ['time', 'long_name=Time', 'foo=bar', 'standard_name=time', 'ncvar%t', 'T'] + + A construct key may optionally have the ``'key%'`` + prefix. For example ``'dimensioncoordinate2'`` and + ``'key%dimensioncoordinate2'`` are both acceptable keys. + + A position of a domain axis construct in the field + construct's data is specified by an integer index. + + Note that in the output of a `print` call or `!dump` + method, a construct is always described by one of its + identities, and so this description may always be used as + an *identity* argument. + + *Parameter example:* + ``identity='long_name=Latitude'`` + + *Parameter example:* + ``identity='dimensioncoordinate1'`` + + *Parameter example:* + ``identity='domainaxis2'`` + + *Parameter example:* + ``identity='key%domainaxis2'`` + + *Parameter example:* + ``identity='ncdim%y'`` + + *Parameter example:* + ``identity=2`` + + key: `bool`, optional + If True then return the selected construct key. By + default the construct itself is returned. + + default: optional + Return the value of the *default* parameter if a construct + can not be found. If set to an `Exception` instance then + it will be raised instead. + + :Returns: + + `DomainAxis` or `str` + The selected domain axis construct, or its key. + + **Examples:** + + TODO + + """ + if ctypes and "filter_by_type" in filters: + raise TypeError( + f"{_method}() got an unexpected keyword argument " + "'filter_by_type'" + ) + + kwargs = { + "identity": identity, + "default": default, + "axis_mode": axis_mode, + "property_mode": property_mode, + "todict": True, + "filter_by_type": ctypes, + } + kwargs.update(filters) + + if key: + return self.construct_key(**kwargs) + + return self.construct(**kwargs) + + def domain_axis(self, identity=None, key=False, + default=ValueError(), axis_mode=None, + property_mode=None, **filters): """Return a domain axis construct, or its key. .. versionadded:: 3.0.0 @@ -16973,30 +17085,26 @@ def domain_axis(self, identity, key=False, default=ValueError()): "Index does not exist for field construct data dimenions", ) else: + # TODO consider using filter_by_key identity = da_key - self_domain_axes = self.domain_axes(view=True) - - domain_axes = self_domain_axes.filter_by_identity(identity, view=True) - if len(domain_axes) == 1: - # identity is a unique domain axis construct identity - da_key = domain_axes.key() - else: - # identity is not a unique domain axis construct identity - da_key = self.domain_axis_key(identity, default=None) - - if da_key is None: - return self._default( - default, - "No unique domain axis construct is identifable from " - f"{identity!r}", - ) - - if key: - return da_key - - return self_domain_axes[da_key] + c = self._construct(("domain_axis",), "domain_axis", + identity=identity, key=key, default=None, + axis_mode=axis_mode, + property_mode=property_mode, **filters) + if c is not None: + return c + + da_key = self.domain_axis_key(identity, default=None) + if da_key is not None: + return self.domain_axes(todict=True)[da_key] + + return self._default( + default, + "No unique domain axis construct is identifable" + ) + def domain_axis_position(self, identity): """Return the position in the data of a domain axis construct. From 3c8b7a813c0280bce7eaac1ba6d1ce1c3a55b0bc Mon Sep 17 00:00:00 2001 From: David Hassell Date: Tue, 30 Mar 2021 23:09:35 +0100 Subject: [PATCH 11/53] devs --- cf/constructs.py | 336 ++++++++++++++++++----------------------------- 1 file changed, 131 insertions(+), 205 deletions(-) diff --git a/cf/constructs.py b/cf/constructs.py index 57c9f5de3e..db59fc9db7 100644 --- a/cf/constructs.py +++ b/cf/constructs.py @@ -155,210 +155,122 @@ def _matching_values(cls, value0, construct, value1, basic=False): # # return self.constructs[da_key] - def filter_by_identity(self, *identities, view=False, - todict=False, cache=None, **identities_kwargs): - """Select metadata constructs by identity. - - .. versionadded:: 3.0.0 - - .. seealso:: `filter_by_axis`, `filter_by_data`, `filter_by_key`, - `filter_by_measure`, `filter_by_method`, - `filter_by_naxes`, `filter_by_ncdim`, - `filter_by_ncvar`, `filter_by_property`, - `filter_by_size`, `filter_by_type`, - `filters_applied`, `inverse_filter`, `unfilter` - - :Parameters: - - identities: optional - Select constructs that have any of the given identities or - construct keys. - - An identity is specified by a string (e.g. ``'latitude'``, - ``'long_name=time'``, etc.); or a compiled regular - expression (e.g. ``re.compile('^atmosphere')``), for which - all constructs whose identities match (via `re.search`) - are selected. - - If no identities are provided then all constructs are selected. - - Each construct has a number of identities, and is selected - if any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - five identities: - - >>> x.identities() - ['time', 'long_name=Time', 'foo=bar', 'T', 'ncvar%t'] - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. - - Note that the identifiers of a metadata construct in the - output of a `print` or `!dump` call are always one of its - identities, and so may always be used as an *identities* - argument. - - Domain axis constructs may also be identified by their - position in the field construct's data array. Positions - are specified by either integers. - - .. note:: This is an extension to the functionality of - `cfdm.Constucts.filter_by_identity`. - - {{view: `bool`, optional}} - - {{todict: `bool`, optional}} - - {{cache: optional}} - - identities_kwargs: optional - Additional parameters for configuring each construct's - `identities` method. By default ``generator=True`` is - passed by default, and ``ctype`` is inferred from the - *identities* parameter. - - .. versionadded:: 3.9.0 - - :Returns: - - `Constructs` - The selected constructs and their construct keys. - - **Examples:** - - Select constructs that have a "standard_name" property of - 'latitude': - - >>> d = c.filter_by_identity('latitude') - - Select constructs that have a "long_name" property of 'Height': - - >>> d = c.filter_by_identity('long_name=Height') - - Select constructs that have a "standard_name" property of - 'latitude' or a "foo" property of 'bar': - - >>> d = c.filter_by_identity('latitude', 'foo=bar') - - Select constructs that have a netCDF variable name of 'time': - - >>> d = c.filter_by_identity('ncvar%time') - - """ - if cache is not None: - return cache - - # Allow keys without the 'key%' prefix - for n, identity in enumerate(identities): - if identity in self: - identities = list(identities) - identities[n] = "key%" + identity - break - - ctype = [i for i in "XTYZ" if i in identities] - - return super().filter_by_identity( - *identities, view=view, todict=todict, ctype=ctype, **identities_kwargs - ) - +# def filter_by_identity(self, *identities, view=False, +# todict=False, cache=None, **identities_kwargs): +# """Select metadata constructs by identity. +# +# .. versionadded:: 3.0.0 +# +# .. seealso:: `filter_by_axis`, `filter_by_data`, `filter_by_key`, +# `filter_by_measure`, `filter_by_method`, +# `filter_by_naxes`, `filter_by_ncdim`, +# `filter_by_ncvar`, `filter_by_property`, +# `filter_by_size`, `filter_by_type`, +# `filters_applied`, `inverse_filter`, `unfilter` +# +# :Parameters: +# +# identities: optional +# Select constructs that have any of the given identities or +# construct keys. +# +# An identity is specified by a string (e.g. ``'latitude'``, +# ``'long_name=time'``, etc.); or a compiled regular +# expression (e.g. ``re.compile('^atmosphere')``), for which +# all constructs whose identities match (via `re.search`) +# are selected. +# +# If no identities are provided then all constructs are selected. +# +# Each construct has a number of identities, and is selected +# if any of them match any of those provided. A construct's +# identities are those returned by its `!identities` +# method. In the following example, the construct ``x`` has +# five identities: +# +# >>> x.identities() +# ['time', 'long_name=Time', 'foo=bar', 'T', 'ncvar%t'] +# +# A construct key may optionally have the ``'key%'`` +# prefix. For example ``'dimensioncoordinate2'`` and +# ``'key%dimensioncoordinate2'`` are both acceptable keys. +# +# Note that the identifiers of a metadata construct in the +# output of a `print` or `!dump` call are always one of its +# identities, and so may always be used as an *identities* +# argument. +# +# Domain axis constructs may also be identified by their +# position in the field construct's data array. Positions +# are specified by either integers. +# +# .. note:: This is an extension to the functionality of +# `cfdm.Constucts.filter_by_identity`. +# +# {{view: `bool`, optional}} +# +# {{todict: `bool`, optional}} +# +# {{cache: optional}} +# +# identities_kwargs: optional +# Additional parameters for configuring each construct's +# `identities` method. By default ``generator=True`` is +# passed by default, and ``ctype`` is inferred from the +# *identities* parameter. +# +# .. versionadded:: 3.9.0 +# +# :Returns: +# +# `Constructs` +# The selected constructs and their construct keys. +# +# **Examples:** +# +# Select constructs that have a "standard_name" property of +# 'latitude': +# +# >>> d = c.filter_by_identity('latitude') +# +# Select constructs that have a "long_name" property of 'Height': +# +# >>> d = c.filter_by_identity('long_name=Height') +# +# Select constructs that have a "standard_name" property of +# 'latitude' or a "foo" property of 'bar': +# +# >>> d = c.filter_by_identity('latitude', 'foo=bar') +# +# Select constructs that have a netCDF variable name of 'time': +# +# >>> d = c.filter_by_identity('ncvar%time') +# +# """ +# if cache is not None: +# return cache +# +# # Allow keys without the 'key%' prefix +# for n, identity in enumerate(identities): +# if identity in self: +# identities = list(identities) +# identities[n] = "key%" + identity +# break +# +# ctype = [i for i in "XTYZ" if i in identities] +# +# return super().filter_by_identity( +# identities, todict=todict, +# _config={"identities_kwargs": {"ctype": ctype}, +# "bypass": lambda x: x in ctype} +# ) + @classmethod def _filter_by_identity(cls, self, *identities, todict=False, - cache=None, _identities_config={}): - """Select metadata constructs by identity. - - .. versionadded:: 3.0.0 - - .. seealso:: `filter_by_axis`, `filter_by_data`, `filter_by_key`, - `filter_by_measure`, `filter_by_method`, - `filter_by_naxes`, `filter_by_ncdim`, - `filter_by_ncvar`, `filter_by_property`, - `filter_by_size`, `filter_by_type`, - `filters_applied`, `inverse_filter`, `unfilter` - - :Parameters: - - identities: optional - Select constructs that have any of the given identities or - construct keys. - - An identity is specified by a string (e.g. ``'latitude'``, - ``'long_name=time'``, etc.); or a compiled regular - expression (e.g. ``re.compile('^atmosphere')``), for which - all constructs whose identities match (via `re.search`) - are selected. - - If no identities are provided then all constructs are selected. - - Each construct has a number of identities, and is selected - if any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - five identities: - - >>> x.identities() - ['time', 'long_name=Time', 'foo=bar', 'T', 'ncvar%t'] - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. - - Note that the identifiers of a metadata construct in the - output of a `print` or `!dump` call are always one of its - identities, and so may always be used as an *identities* - argument. - - Domain axis constructs may also be identified by their - position in the field construct's data array. Positions - are specified by either integers. - - .. note:: This is an extension to the functionality of - `cfdm.Constucts.filter_by_identity`. - - {{view: `bool`, optional}} - - {{todict: `bool`, optional}} - - {{cache: optional}} - - identities_kwargs: optional - Additional parameters for configuring each construct's - `identities` method. By default ``generator=True`` is - passed by default, and ``ctype`` is inferred from the - *identities* parameter. - - .. versionadded:: 3.9.0 - - :Returns: - - `Constructs` - The selected constructs and their construct keys. - - **Examples:** - - Select constructs that have a "standard_name" property of - 'latitude': - - >>> d = c.filter_by_identity('latitude') - - Select constructs that have a "long_name" property of 'Height': - - >>> d = c.filter_by_identity('long_name=Height') - - Select constructs that have a "standard_name" property of - 'latitude' or a "foo" property of 'bar': - - >>> d = c.filter_by_identity('latitude', 'foo=bar') - - Select constructs that have a netCDF variable name of 'time': - - >>> d = c.filter_by_identity('ncvar%time') + _config={}): + """TODO. """ - if cache is not None: - return cache - # Allow keys without the 'key%' prefix for n, identity in enumerate(identities): if identity in self: @@ -366,13 +278,27 @@ def _filter_by_identity(cls, self, *identities, todict=False, identities[n] = "key%" + identity break - config = {"ctype": [i for i in "XTYZ" if i in identities]} - config.update(_identities_config) - - return super(Constructs, cls)._filter_by_identity( + ctype = [i for i in "XTYZ" if i in identities] + + config = { "identities_kwargs": {"ctype": ctype}} + + if ctype: + # Exclude a ctype from the short circuit test + config["short_circuit_test"] = ( + lambda x: ( + x not in ctype + and "=" not in x + and ":" not in x + and "%" not in x + ) + ) + + config.update(_config) + + return super()._filter_by_identity( self, *identities, todict=todict, cache=cache, - _identities_config=config + _config=config ) From 4a775ee4aaa2e7f91b0cf0fe790e8bcab82f6cb4 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Thu, 1 Apr 2021 11:46:42 +0100 Subject: [PATCH 12/53] docs --- cf/aggregate.py | 67 +++-- cf/constructs.py | 36 +-- cf/field.py | 416 +++++++++------------------- cf/mixin/coordinate.py | 25 +- cf/mixin/propertiesdata.py | 2 +- cf/test/test_AuxiliaryCoordinate.py | 14 +- cf/test/test_DimensionCoordinate.py | 12 +- cf/test/test_Field.py | 125 +++++---- 8 files changed, 273 insertions(+), 424 deletions(-) diff --git a/cf/aggregate.py b/cf/aggregate.py index 67a4d998a3..97f212bf87 100644 --- a/cf/aggregate.py +++ b/cf/aggregate.py @@ -301,7 +301,7 @@ def __init__( # Dictionaries mapping auxiliary coordinate identifiers # to their auxiliary coordinate objects - aux_1d = dict(f.auxiliary_coordinates(view=True).filter_by_naxes(1)) + aux_1d = f.auxiliary_coordinates(filter_by_naxes=(1,), todict=True) # A set containing the identity of each coordinate # @@ -314,13 +314,13 @@ def __init__( # ------------------------------------------------------------ # Coordinate references (formula_terms and grid mappings) # ------------------------------------------------------------ - refs = f.coordinate_references(view=True) + refs = f.coordinate_references(todict=True) if not refs: self.coordrefs = () else: self.coordrefs = list(refs.values()) - for axis in f.domain_axes(view=True): + for axis in f.domain_axes(todict=True): # List some information about each 1-d coordinate which # spans this axis. The order of elements is arbitrary, as @@ -333,11 +333,20 @@ def __init__( info_dim = [] # dim_coord = item(axis) - dim_coords = f.dimension_coordinates(view=True).filter_by_axis( - axis, mode="and", view=True +# dim_coords = f.dimension_coordinates(view=True).filter_by_axis( +# axis, mode="and", view=True +# ) + dim_coords = f.dimension_coordinates( + filter_by_axis=(axis,), axis_mode="exact", + todict=True ) - dim_coord = dim_coords.value(None) - dim_coord_key = dim_coords.key(None) + if len(dim_coords) == 1: + dim_coord_key, dim_coord = dim_coords.popitem() + else: + dim_coord_key, dim_coord = None, None + +# dim_coord = dim_coords.value(None) +# dim_coord_key = dim_coords.key(None) dim_identity = None if dim_coord is not None: @@ -488,9 +497,11 @@ def __init__( # ------------------------------------------------------------ self.nd_aux = {} for key, nd_aux_coord in ( - f.auxiliary_coordinates(view=True).filter_by_naxes(gt(1)).items() - ): - + f.auxiliary_coordinates( + filter_by_naxes=(gt(1),), + todict=True + ).items() + ): # Find axes' canonical identities axes = [self.axis_to_id[axis] for axis in f.get_data_axes(key)] axes = tuple(sorted(axes)) @@ -526,7 +537,7 @@ def __init__( # Field ancillaries # ------------------------------------------------------------ self.field_anc = {} - for key, field_anc in f.field_ancillaries(view=True).items(): + for key, field_anc in f.field_ancillaries(todict=True).items(): # Find this field ancillary's identity identity = self.field_ancillary_has_identity_and_data(field_anc) @@ -568,12 +579,12 @@ def __init__( # Firstly process domain ancillaries which are used in # coordinate references - for ref in f.coordinate_references(view=True).values(): + for ref in f.coordinate_references(todict=True).values(): for ( term, identifier, ) in ref.coordinate_conversion.domain_ancillaries().items(): - key = f.domain_ancillaries(view=True)(identifier).key(None) + key = f.domain_ancillary(identifier, key=True, default=None) if key is None: continue @@ -607,7 +618,7 @@ def __init__( # Secondly process domain ancillaries which are not being used # in coordinate references - for key, anc in f.domain_ancillaries(view=True).items(): + for key, anc in f.domain_ancillaries(todict=True).items(): if key in ancs_in_refs: continue @@ -639,7 +650,7 @@ def __init__( self.msr = {} info_msr = {} copied_field = False - for key, msr in f.cell_measures(view=True).items(): + for key, msr in f.cell_measures(todict=True).items(): # If the measure is an external variable, remove it because # the dimensions are not known so there is no way to tell if the # aggregation should have changed it. (This is sufficiently @@ -874,7 +885,9 @@ def canonical_cell_methods(self, rtol=None, atol=None): """ _canonical_cell_methods = self._canonical_cell_methods - cell_methods = self.field.cell_methods(view=True).ordered() + cell_methods = self.field.cell_methods().ordered() + # TODO get rid or ordered when Python 3.6 has gone + # cms = getattr(self.field, 'CellMethods', None) # TODO if not cell_methods: return None @@ -1941,13 +1954,14 @@ def aggregate( aggregating_axes = [] axis_items = meta[0].axis.items() for axis in axes: - # TODO IMPORTANT: should this be filter_by_axis ???? - coords = ( - meta[0] - .field.coordinates(view=True) - .filter_by_identity("exact", axis) + # TODO IMPORTANT: should this be filter_by_axis ???? Yes, surely ... + coord = ( + meta[0].field.coordinate( + filter_by_axis=(axis,), axis_mode="exact", + default=None + ) ) - coord = coords.value(default=None) +# coord = coords.value(default=None) if coord is None: continue @@ -2200,9 +2214,12 @@ def _create_hash_and_first_values( continue # Still here? - dim_coord = m.field.dimension_coordinates( - view=True - ).filter_by_axis(axis, mode="and", view=True) +# dim_coord = m.field.dimension_coordinates( +# ).filter_by_axis(axis, mode="and", view=True) + dim_coord = m.field.dimension_coordinate( + filter_by_axis=(axis,), axis_mode="exact", + default=None + ) # Find the sort indices for this axis ... if dim_coord is not None: diff --git a/cf/constructs.py b/cf/constructs.py index db59fc9db7..fb0a89f57f 100644 --- a/cf/constructs.py +++ b/cf/constructs.py @@ -1,3 +1,5 @@ +from functools import partial + import cfdm from .query import Query @@ -44,7 +46,7 @@ def __repr__(self): """ return super().__repr__().replace("<", " 1: - return self._default( - default, - "TODO 2" - ) - - key, construct = c.popitem() - if key: - return key - - return construct - -# c = domain_ancillaries -# -# if identity is not None: -# c = c(identity, view=True) -# if not c: -# da_key = self.domain_axis(identity, key=True, default=None) -# if da_key is not None: -# c = domain_ancillaries.filter_by_axis( -# da_key, mode="exact", view=True -# ) -# -# if key: -# return c.key(default=default) -# -# return c.value(default=default) + return self._construct(("domain_ancillary",), + "domain_ancillary", identity=identity, + key=key, default=default, + **filter_kwargs) - def cell_measure(self, identity=None, default=ValueError(), key=False): + def cell_measure(self, identity=None, default=ValueError(), + key=False, **filter_kwargs): """Select a cell measure construct by its identity. .. versionadded:: 3.0.0 @@ -16299,22 +16266,9 @@ def cell_measure(self, identity=None, default=ValueError(), key=False): TODO """ - cell_measures = self.cell_measures(view=True) - c = cell_measures - - if identity is not None: - c = c(identity, view=True) - if not c: - da_key = self.domain_axis(identity, key=True, default=None) - if da_key is not None: - c = cell_measures.filter_by_axis( - da_key, mode="exact", view=True - ) - - if key: - return c.key(default=default) - - return c.value(default=default) + return self._construct(("cell_measure",), "cell_meausure", + identity=identity, key=key, + default=default, **filter_kwargs) def cell_method(self, identity=None, default=ValueError(), key=False): """Select a cell method construct by its identity. @@ -16422,7 +16376,8 @@ def cell_method(self, identity=None, default=ValueError(), key=False): return c.value(default=default) - def coordinate(self, identity=None, default=ValueError(), key=False): + def coordinate(self, identity=None, default=ValueError(), + key=False, **filter_kwargs): """Return a dimension coordinate construct, or its key. .. versionadded:: 3.0.0 @@ -16520,26 +16475,13 @@ def coordinate(self, identity=None, default=ValueError(), key=False): TODO """ - coordinates = self.coordinates(view=True) - c = coordinates - - if identity is not None: - c = c(identity, view=True) - if not c: - da_key = self.domain_axis(identity, key=True, default=None) - if da_key is not None: - c = coordinates.filter_by_axis( - da_key, mode="exact", view=True - ) - - if key: - return c.key(default=default) - - return c.value(default=default) + return self._construct(("dimension_coordinate", + "auxiliary_coordinate"), "coordinate", + identity=identity, key=key, + default=default, **filter_kwargs) - def coordinate_reference( - self, identity=None, default=ValueError(), key=False - ): + def coordinate_reference( self, identity=None, + default=ValueError(), key=False, **filter_kwargs ): """Return a coordinate reference construct, or its key. .. versionadded:: 3.0.0 @@ -16631,21 +16573,13 @@ def coordinate_reference( TODO """ - coordinate_references = self.coordinate_references(view=True) - c = coordinate_references - - if identity is not None: - c = c.filter_by_identity(identity, view=False) - for cr_key, cr in coordinate_references.items(): - if cr.match(identity): - c._set_construct(cr, key=cr_key, copy=False) - - if key: - return c.key(default=default) - - return c.value(default=default) + return self._construct(("coordinate_reference",), + "coordinate_reference", + identity=identity, key=key, + default=default, **filter_kwargs) - def field_ancillary(self, identity=None, default=ValueError(), key=False): + def field_ancillary(self, identity=None, default=ValueError(), + key=False, **filter_kwargs): """Return a field ancillary construct, or its key. .. versionadded:: 3.0.0 @@ -16748,26 +16682,13 @@ def field_ancillary(self, identity=None, default=ValueError(), key=False): TODO """ - field_ancillaries = self.field_ancillaries(view=True) - c = field_ancillaries - - if identity is not None: - c = c(identity, view=True) - if not c: - da_key = self.domain_axis(identity, key=True, default=None) - if da_key is not None: - c = field_ancillaries.filter_by_axis( - da_key, mode="exact", view=True - ) - - if key: - return c.key(default=default) - - return c.value(default=default) + return self._construct(("field_ancillary",), + "field_ancillary", identity=identity, + key=key, default=default, + **filter_kwargs) - def dimension_coordinate( - self, identity=None, key=False, default=ValueError(), axis_mode=None, property_mode=None, **filters - ): + def dimension_coordinate( self, identity=None, key=False, + default=ValueError(), **filter_kwargs ): """Return a dimension coordinate construct, or its key. .. versionadded:: 3.0.0 @@ -16870,123 +16791,37 @@ def dimension_coordinate( return self._construct(("dimension_coordinate",), "dimension_coordinate", identity=identity, key=key, - default=default, axis_mode=axis_mode, - property_mode=property_mode, **filters) + default=default, **filter_kwargs) - def _construct(self, _ctypes, _method, identity=None, key=False, - default=ValueError(), axis_mode=None, - property_mode=None, **filters): - """Return a domain axis construct, or its key. - - .. versionadded:: 3.0.0 - - .. seealso:: `construct`, `auxiliary_coordinate`, `cell_measure`, - `cell_method`, `coordinate`, `coordinate_reference`, - `dimension_coordinate`, `domain_ancillary`, - `domain_axes`, `field_ancillary` - - :Parameters: - - identity: - Select the domain axis construct by one of: - - * An identity or key of a 1-d coordinate construct that - whose data spans the domain axis construct. - - * A domain axis construct identity or key. - - * The position of the domain axis construct in the field - construct's data. - - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); or a compiled regular expression - (e.g. ``re.compile('^atmosphere')``) that selects the - relevant constructs whose identities match via - `re.search`. - - Each construct has a number of identities, and is selected - if any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: - - >>> x.identities() - ['time', 'long_name=Time', 'foo=bar', 'standard_name=time', 'ncvar%t', 'T'] - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. - - A position of a domain axis construct in the field - construct's data is specified by an integer index. - - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. - - *Parameter example:* - ``identity='long_name=Latitude'`` - - *Parameter example:* - ``identity='dimensioncoordinate1'`` - - *Parameter example:* - ``identity='domainaxis2'`` - - *Parameter example:* - ``identity='key%domainaxis2'`` - - *Parameter example:* - ``identity='ncdim%y'`` - - *Parameter example:* - ``identity=2`` - - key: `bool`, optional - If True then return the selected construct key. By - default the construct itself is returned. - - default: optional - Return the value of the *default* parameter if a construct - can not be found. If set to an `Exception` instance then - it will be raised instead. - - :Returns: - - `DomainAxis` or `str` - The selected domain axis construct, or its key. - - **Examples:** - - TODO + def _construct(self, _ctypes, _method, identity=None, key=None, + default=None, **filter_kwargs): + """ TODO. """ - if ctypes and "filter_by_type" in filters: - raise TypeError( - f"{_method}() got an unexpected keyword argument " - "'filter_by_type'" - ) + if not _ctypes: + kwargs = filter_kwargs + else: + # Ensure that filter_by_types is the first filter + # applied, as it's the cheapest + kwargs = {"filter_by_type": _ctypes} - kwargs = { - "identity": identity, - "default": default, - "axis_mode": axis_mode, - "property_mode": property_mode, - "todict": True, - "filter_by_type": ctypes, - } - kwargs.update(filters) + if filter_kwargs: + if "filter_by_type" in filter_kwargs: + raise TypeError( + f"{_method}() got an unexpected keyword argument " + "'filter_by_type'" + ) + + kwargs.update(filter_kwargs) if key: - return self.construct_key(**kwargs) + return super().construct_key(identity, default=default, + **kwargs) - return self.construct(**kwargs) + return super().construct(identity, default=default, **kwargs) def domain_axis(self, identity=None, key=False, - default=ValueError(), axis_mode=None, - property_mode=None, **filters): + default=ValueError(), **filter_kwargs): """Return a domain axis construct, or its key. .. versionadded:: 3.0.0 @@ -17090,15 +16925,16 @@ def domain_axis(self, identity=None, key=False, c = self._construct(("domain_axis",), "domain_axis", identity=identity, key=key, default=None, - axis_mode=axis_mode, - property_mode=property_mode, **filters) - + **filter_kwargs) if c is not None: return c - + da_key = self.domain_axis_key(identity, default=None) if da_key is not None: - return self.domain_axes(todict=True)[da_key] + if key: + return da_key + + return self.constructs[da_key] return self._default( default, @@ -17457,14 +17293,16 @@ def set_construct( ) if construct_type == "dimension_coordinate": - dimension_coordinates = self.dimension_coordinates(view=True) - for dim, dim_axes in tuple( - dimension_coordinates.data_axes().items() - ): + data_axes = self.constructs.data_axes() +# dimension_coordinates = self.dimension_coordinates(todict=True) +# for dim, dim_axes in tuple( +# dimension_coordinates.data_axes().items() +# ): + for dim in self.dimension_coordinates(todict=True): if dim == key: continue - if dim_axes == tuple(axes): + if data_axes.get(dim) == tuple(axes): self.del_construct(dim, default=None) out = super().set_construct(construct, key=key, axes=axes, copy=copy) @@ -18782,11 +18620,7 @@ def roll(self, axis, shift, inplace=False, i=False, **kwargs): return f - dim = ( - self.dimension_coordinates(view=True) - .filter_by_axis(axis, mode="exact", view=True) - .value(None) - ) + dim = self.dimension_coordinate(filter_by_axis=(axis,), axis_mode="exact", default=None) if dim is not None and dim.period() is None: raise ValueError( f"Can't roll: {dim.identity()!r} axis has non-periodic " diff --git a/cf/mixin/coordinate.py b/cf/mixin/coordinate.py index 24897235e7..1f766e393b 100644 --- a/cf/mixin/coordinate.py +++ b/cf/mixin/coordinate.py @@ -511,7 +511,7 @@ def identity( return default - def identities(self, generator=False, ctype="XTYZ"): + def identities(self, generator=False, ctypes="XTYZ", **kwargs): """Return all possible identities. The identities comprise: @@ -534,13 +534,13 @@ def identities(self, generator=False, ctype="XTYZ"): {{generator: `bool`, optional}} - ctype: (sequnce of) `str` - Restrict a coordinate type identies to be any of these + ctype: (sequence of) `str` + Restrict coordinate type identities to be any of these characters. Setting to a subset of ``'XTYZ'`` can give performance improvements, as it will reduce the number of coordinate types that are checked in circumstances - when particular coordinaete type have been ruled out a - priori. If a coordinate type is omitted then it will + when particular coordinate types have been ruled out a + priori. If a coordinate type is omitted then it will not be in the returned identities even if the coordinate construct is of that type. Coordinate types are checked in the order given. @@ -576,19 +576,16 @@ def identities(self, generator=False, ctype="XTYZ"): """ - def _ctype_iter(self, ctype): - stop = False - for c in ctype: - if stop: - break - + def _ctypes_iter(self, ctypes): + for c in ctypes: if getattr(self, c): - stop = True + # This coordinate constructs is of this type yield c + break identities = super().identities(generator=True) - - g = chain(identities, _ctype_iter(self, ctype)) + + g = chain(identities, _ctypes_iter(self, ctypes)) if generator: return g diff --git a/cf/mixin/propertiesdata.py b/cf/mixin/propertiesdata.py index 049f3cd901..ab120039a4 100644 --- a/cf/mixin/propertiesdata.py +++ b/cf/mixin/propertiesdata.py @@ -4813,7 +4813,7 @@ def identities(self, generator=False, **kwargs): if i is not None: id_identity = (f"id%{i}",) - identities = super().identities(generator=True) + identities = super().identities(generator=True, **kwargs) g = chain(id_identity, identities) if generator: diff --git a/cf/test/test_AuxiliaryCoordinate.py b/cf/test/test_AuxiliaryCoordinate.py index 4b20157716..40f7431b32 100644 --- a/cf/test/test_AuxiliaryCoordinate.py +++ b/cf/test/test_AuxiliaryCoordinate.py @@ -60,7 +60,7 @@ def test_AuxiliaryCoordinate_chunk(self): def test_AuxiliaryCoordinate__repr__str__dump(self): f = cf.read(self.filename)[0] - x = f.auxiliary_coordinates(view=True)("latitude").value() + x = f.auxiliary_coordinate("latitude") _ = repr(x) _ = str(x) @@ -68,7 +68,7 @@ def test_AuxiliaryCoordinate__repr__str__dump(self): def test_AuxiliaryCoordinate_bounds(self): f = cf.read(self.filename)[0] - d = f.dimension_coordinates(view=True)("X").value() + d = f.dimension_coordinate("X") x = cf.AuxiliaryCoordinate(source=d) _ = x.upper_bounds @@ -76,7 +76,7 @@ def test_AuxiliaryCoordinate_bounds(self): def test_AuxiliaryCoordinate_properties(self): f = cf.read(self.filename)[0] - x = f.auxiliary_coordinates(view=True)("latitude").value() + x = f.auxiliary_coordinate("latitude") x.positive = "up" self.assertEqual(x.positive, "up") @@ -88,12 +88,12 @@ def test_AuxiliaryCoordinate_properties(self): del x.axis self.assertIsNone(getattr(x, "axis", None)) - d = f.dimension_coordinates(view=True)("X").value() + d = f.dimension_coordinate("X") x = cf.AuxiliaryCoordinate(source=d) def test_AuxiliaryCoordinate_insert_dimension(self): f = cf.read(self.filename)[0] - d = f.dimension_coordinates(view=True)("X").value() + d = f.dimension_coordinate("X") x = cf.AuxiliaryCoordinate(source=d) self.assertEqual(x.shape, (9,)) @@ -109,7 +109,7 @@ def test_AuxiliaryCoordinate_insert_dimension(self): def test_AuxiliaryCoordinate_transpose(self): f = cf.read(self.filename)[0] - x = f.auxiliary_coordinates(view=True)("longitude").value() + x = f.auxiliary_coordinate("longitude") bounds = cf.Bounds( data=cf.Data(numpy.arange(9 * 10 * 4).reshape(9, 10, 4)) @@ -129,7 +129,7 @@ def test_AuxiliaryCoordinate_transpose(self): def test_AuxiliaryCoordinate_squeeze(self): f = cf.read(self.filename)[0] - x = f.auxiliary_coordinates(view=True)("longitude").value() + x = f.auxiliary_coordinate("longitude") bounds = cf.Bounds( data=cf.Data(numpy.arange(9 * 10 * 4).reshape(9, 10, 4)) diff --git a/cf/test/test_DimensionCoordinate.py b/cf/test/test_DimensionCoordinate.py index fe7aa0885f..e6a41d1fa0 100644 --- a/cf/test/test_DimensionCoordinate.py +++ b/cf/test/test_DimensionCoordinate.py @@ -46,7 +46,7 @@ class DimensionCoordinateTest(unittest.TestCase): def test_DimensionCoordinate__repr__str__dump(self): f = cf.read(self.filename)[0] - x = f.dimension_coordinates(view=True)("X").value() + x = f.dimension_coordinate("X") _ = repr(x) _ = str(x) @@ -136,8 +136,8 @@ def test_DimensionCoordinate_convert_reference_time(self): def test_DimensionCoordinate_roll(self): f = cf.read(self.filename)[0] - x = f.dimension_coordinates(view=True)("X").value() - y = f.dimension_coordinates(view=True)("Y").value() + x = f.dimension_coordinate("X") + y = f.dimension_coordinate("Y") _ = x.roll(0, 3) with self.assertRaises(Exception): @@ -248,7 +248,7 @@ def test_DimensionCoordinate_override_calendar(self): def test_DimensionCoordinate_bounds(self): f = cf.read(self.filename)[0] - x = f.dimension_coordinates(view=True)("X").value() + x = f.dimension_coordinate("X") _ = x.upper_bounds _ = x.lower_bounds @@ -269,7 +269,7 @@ def test_DimensionCoordinate_bounds(self): def test_DimensionCoordinate_properties(self): f = cf.read(self.filename)[0] - x = f.dimension_coordinates(view=True)("X").value() + x = f.dimension_coordinate("X") x.positive = "up" self.assertEqual(x.positive, "up") @@ -284,7 +284,7 @@ def test_DimensionCoordinate_properties(self): def test_DimensionCoordinate_insert_dimension(self): f = cf.read(self.filename)[0] - x = f.dimension_coordinates(view=True)("X").value() + x = f.dimension_coordinate("X") self.assertEqual(x.shape, (9,)) self.assertEqual(x.bounds.shape, (9, 2)) diff --git a/cf/test/test_Field.py b/cf/test/test_Field.py index aeeec3ccc4..d7388d8b3d 100644 --- a/cf/test/test_Field.py +++ b/cf/test/test_Field.py @@ -955,12 +955,12 @@ def test_Field__add__(self): b = g + f axis = a.domain_axis("grid_longitude", key=1) - for key in a.field_ancillaries(view=True).filter_by_axis( - axis, mode="or" + for key in a.field_ancillaries( + filter_by_axis=(axis,), axis_mode="or" ): a.del_construct(key) - for key in a.cell_measures(view=True).filter_by_axis(axis, mode="or"): + for key in a.cell_measures(filter_by_axis=(axis,), axis_mode="or"): a.del_construct(key) self.assertTrue(a.equals(b, verbose=2)) @@ -1143,21 +1143,21 @@ def test_Field_cell_area(self): ca = f.cell_area() self.assertEqual(ca.ndim, 2) - self.assertEqual(len(ca.dimension_coordinates(view=True)), 2) - self.assertEqual(len(ca.domain_ancillaries(view=True)), 0) - self.assertEqual(len(ca.coordinate_references(view=True)), 1) + self.assertEqual(len(ca.dimension_coordinate()), 2) + self.assertEqual(len(ca.domain_ancillaries()), 0) + self.assertEqual(len(ca.coordinate_references()), 1) f.del_construct("cellmeasure0") y = f.dimension_coordinate("Y") y.set_bounds(y.create_bounds()) - self.assertEqual(len(f.cell_measures(view=True)), 0) + self.assertEqual(len(f.cell_measures()), 0) ca = f.cell_area() self.assertEqual(ca.ndim, 2) - self.assertEqual(len(ca.dimension_coordinates(view=True)), 2) - self.assertEqual(len(ca.domain_ancillaries(view=True)), 0) - self.assertEqual(len(ca.coordinate_references(view=True)), 1) + self.assertEqual(len(ca.dimension_coordinates()), 2) + self.assertEqual(len(ca.domain_ancillaries()), 0) + self.assertEqual(len(ca.coordinate_references()), 1) self.assertTrue(ca.Units.equivalent(cf.Units("m2")), ca.Units) y = f.dimension_coordinate("Y") @@ -2004,7 +2004,7 @@ def test_Field_moving_window(self): ) self.assertEqual( - len(g.cell_methods(view=True)), len(f.cell_methods(view=True)) + 1 + len(g.cell_methods()), len(f.cell_methods()) + 1 ) def test_Field_derivative(self): @@ -2050,24 +2050,24 @@ def test_Field_convert(self): c = f.convert("grid_latitude") self.assertTrue(c.ndim == 1) self.assertTrue(c.standard_name == "grid_latitude") - self.assertTrue(len(c.dimension_coordinates(view=True)) == 1) - self.assertTrue(len(c.auxiliary_coordinates(view=True)) == 1) - self.assertTrue(len(c.cell_measures(view=True)) == 0) - self.assertTrue(len(c.coordinate_references(view=True)) == 1) - self.assertTrue(len(c.domain_ancillaries(view=True)) == 0) - self.assertTrue(len(c.field_ancillaries(view=True)) == 0) - self.assertTrue(len(c.cell_methods(view=True)) == 0) + self.assertTrue(len(c.dimension_coordinates()) == 1) + self.assertTrue(len(c.auxiliary_coordinates()) == 1) + self.assertTrue(len(c.cell_measures()) == 0) + self.assertTrue(len(c.coordinate_references()) == 1) + self.assertTrue(len(c.domain_ancillaries()) == 0) + self.assertTrue(len(c.field_ancillaries()) == 0) + self.assertTrue(len(c.cell_methods()) == 0) c = f.convert("latitude") self.assertTrue(c.ndim == 2) self.assertTrue(c.standard_name == "latitude") - self.assertTrue(len(c.dimension_coordinates(view=True)) == 2) - self.assertTrue(len(c.auxiliary_coordinates(view=True)) == 3) - self.assertTrue(len(c.cell_measures(view=True)) == 1) - self.assertTrue(len(c.coordinate_references(view=True)) == 1) - self.assertTrue(len(c.domain_ancillaries(view=True)) == 0) - self.assertTrue(len(c.field_ancillaries(view=True)) == 0) - self.assertTrue(len(c.cell_methods(view=True)) == 0) + self.assertTrue(len(c.dimension_coordinates()) == 2) + self.assertTrue(len(c.auxiliary_coordinates()) == 3) + self.assertTrue(len(c.cell_measures()) == 1) + self.assertTrue(len(c.coordinate_references()) == 1) + self.assertTrue(len(c.domain_ancillaries()) == 0) + self.assertTrue(len(c.field_ancillaries()) == 0) + self.assertTrue(len(c.cell_methods()) == 0) # Cellsize c = f.convert("grid_longitude", cellsize=True) @@ -2135,7 +2135,7 @@ def test_Field_auxiliary_coordinate(self): self.assertEqual(f.auxiliary_coordinate(identity, key=True), key) identities = ["latitude", "longitude"] - auxiliary_coordinates = f.auxiliary_coordinates(view=True) + auxiliary_coordinates = f.auxiliary_coordinates() auxiliary_coordinates(*identities) auxiliary_coordinates() auxiliary_coordinates(identities[0]) @@ -2154,16 +2154,21 @@ def test_Field_coordinate(self): "dimensioncoordinate1", ): if identity == "domainaxis2": - key = ( - f.dimension_coordinates(view=True) - .filter_by_axis(identity, mode="and") - .key() - ) - c = ( - f.dimension_coordinates(view=True) - .filter_by_axis(identity, mode="and") - .value() - ) + key, c = f.dimension_coordinates( + filter_by_axis=(identity,), axis_mode="exact", + todict=True + ).popitem() + +# key = ( +# f.dimension_coordinates() +# .filter_by_axis(identity, mode="and") +# .key() +# ) +# c = ( +# f.dimension_coordinates(view=True) +# .filter_by_axis(identity, mode="and") +# .value() +# ) else: key = f.construct_key(identity) c = f.construct(identity) @@ -2172,7 +2177,7 @@ def test_Field_coordinate(self): self.assertTrue(f.coordinate(identity, key=True) == key) identities = ["auxiliarycoordinate1", "dimensioncoordinate1"] - coordinates = f.coordinates(view=True) + coordinates = f.coordinates() coordinates(*identities) coordinates() coordinates(identities[0]) @@ -2218,8 +2223,8 @@ def test_Field_coordinate_reference(self): # Delete self.assertIsNone(f.del_coordinate_reference("qwerty", default=None)) - self.assertTrue(len(f.coordinate_references(view=True)) == 2) - self.assertTrue(len(f.domain_ancillaries(view=True)) == 3) + self.assertTrue(len(f.coordinate_references()) == 2) + self.assertTrue(len(f.domain_ancillaries()) == 3) c = f.coordinate_reference( "standard_name:atmosphere_hybrid_height_coordinate" ) @@ -2227,13 +2232,13 @@ def test_Field_coordinate_reference(self): "standard_name:atmosphere_hybrid_height_coordinate" ) self.assertTrue(cr.equals(c, verbose=2)) - self.assertTrue(len(f.coordinate_references(view=True)) == 1) - self.assertTrue(len(f.domain_ancillaries(view=True)) == 0) + self.assertTrue(len(f.coordinate_references()) == 1) + self.assertTrue(len(f.domain_ancillaries()) == 0) f.del_coordinate_reference( "grid_mapping_name:rotated_latitude_longitude" ) - self.assertTrue(len(f.coordinate_references(view=True)) == 0) + self.assertTrue(len(f.coordinate_references()) == 0) # Set f = self.f.copy() @@ -2246,7 +2251,7 @@ def test_Field_coordinate_reference(self): "grid_mapping_name:rotated_latitude_longitude" ) f.set_coordinate_reference(cr, field=g) - self.assertTrue(len(f.coordinate_references(view=True)) == 1) + self.assertTrue(len(f.coordinate_references()) == 1) cr = g.coordinate_reference( "standard_name:atmosphere_hybrid_height_coordinate" @@ -2256,8 +2261,8 @@ def test_Field_coordinate_reference(self): "foo", "domainancillary99" ) f.set_coordinate_reference(cr, field=g) - self.assertTrue(len(f.coordinate_references(view=True)) == 2) - self.assertTrue(len(f.domain_ancillaries(view=True)) == 3) + self.assertTrue(len(f.coordinate_references()) == 2) + self.assertTrue(len(f.domain_ancillaries()) == 3) f.del_construct("coordinatereference0") f.del_construct("coordinatereference1") @@ -2266,7 +2271,7 @@ def test_Field_coordinate_reference(self): "grid_mapping_name:rotated_latitude_longitude" ) f.set_coordinate_reference(cr) - self.assertTrue(len(f.coordinate_references(view=True)) == 1) + self.assertTrue(len(f.coordinate_references()) == 1) def test_Field_dimension_coordinate(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: @@ -2281,16 +2286,20 @@ def test_Field_dimension_coordinate(self): "dimensioncoordinate1", ): if identity == "domainaxis2": - key = ( - f.dimension_coordinates(view=True) - .filter_by_axis(identity, mode="and") - .key() - ) - c = ( - f.dimension_coordinates(view=True) - .filter_by_axis(identity, mode="and") - .value() - ) + key, c = f.dimension_coordinates( + filter_by_axis=(identity,), axis_mode="exact", + todict=True + ).popitem() +# key = ( +# f.dimension_coordinates(view=True) +# .filter_by_axis(identity, mode="and") +# .key() +# ) +# c = ( +# f.dimension_coordinates(view=True) +# .filter_by_axis(identity, mode="and") +# .value() +# ) elif identity == "X": key = f.construct_key("grid_longitude") c = f.construct("grid_longitude") @@ -2304,7 +2313,7 @@ def test_Field_dimension_coordinate(self): self.assertTrue(f.dimension_coordinate(identity, key=True) == key) identities = ["grid_latitude", "X"] - dimension_coordinates = f.dimension_coordinates(view=True) + dimension_coordinates = f.dimension_coordinates() dimension_coordinates(*identities) dimension_coordinates() dimension_coordinates(identities[0]) @@ -2387,7 +2396,7 @@ def test_Field_field_ancillary(self): self.assertTrue(f.field_ancillary(identity, key=True) == key) identities = ["ancillary1", "ancillary3"] - field_ancillaries = f.field_ancillaries(view=True) + field_ancillaries = f.field_ancillaries() field_ancillaries(*identities) field_ancillaries() field_ancillaries(identities[0]) From f765187ab7f7bdaa8fb4e1d67b20217ff39695bb Mon Sep 17 00:00:00 2001 From: David Hassell Date: Thu, 1 Apr 2021 13:36:11 +0100 Subject: [PATCH 13/53] devs --- cf/aggregate.py | 42 +-- cf/constructs.py | 228 ++++++------ cf/field.py | 779 +++++++++++++++++++++++------------------ cf/functions.py | 2 +- cf/mixin/coordinate.py | 69 ++-- cf/test/test_Field.py | 61 ++-- 6 files changed, 632 insertions(+), 549 deletions(-) diff --git a/cf/aggregate.py b/cf/aggregate.py index 97f212bf87..e6076ed672 100644 --- a/cf/aggregate.py +++ b/cf/aggregate.py @@ -333,20 +333,19 @@ def __init__( info_dim = [] # dim_coord = item(axis) -# dim_coords = f.dimension_coordinates(view=True).filter_by_axis( -# axis, mode="and", view=True -# ) + # dim_coords = f.dimension_coordinates(view=True).filter_by_axis( + # axis, mode="and", view=True + # ) dim_coords = f.dimension_coordinates( - filter_by_axis=(axis,), axis_mode="exact", - todict=True + filter_by_axis=(axis,), axis_mode="exact", todict=True ) if len(dim_coords) == 1: dim_coord_key, dim_coord = dim_coords.popitem() else: dim_coord_key, dim_coord = None, None - -# dim_coord = dim_coords.value(None) -# dim_coord_key = dim_coords.key(None) + + # dim_coord = dim_coords.value(None) + # dim_coord_key = dim_coords.key(None) dim_identity = None if dim_coord is not None: @@ -496,12 +495,9 @@ def __init__( # N-d auxiliary coordinates # ------------------------------------------------------------ self.nd_aux = {} - for key, nd_aux_coord in ( - f.auxiliary_coordinates( - filter_by_naxes=(gt(1),), - todict=True - ).items() - ): + for key, nd_aux_coord in f.auxiliary_coordinates( + filter_by_naxes=(gt(1),), todict=True + ).items(): # Find axes' canonical identities axes = [self.axis_to_id[axis] for axis in f.get_data_axes(key)] axes = tuple(sorted(axes)) @@ -887,7 +883,7 @@ def canonical_cell_methods(self, rtol=None, atol=None): cell_methods = self.field.cell_methods().ordered() # TODO get rid or ordered when Python 3.6 has gone - + # cms = getattr(self.field, 'CellMethods', None) # TODO if not cell_methods: return None @@ -1955,13 +1951,10 @@ def aggregate( axis_items = meta[0].axis.items() for axis in axes: # TODO IMPORTANT: should this be filter_by_axis ???? Yes, surely ... - coord = ( - meta[0].field.coordinate( - filter_by_axis=(axis,), axis_mode="exact", - default=None - ) + coord = meta[0].field.coordinate( + filter_by_axis=(axis,), axis_mode="exact", default=None ) -# coord = coords.value(default=None) + # coord = coords.value(default=None) if coord is None: continue @@ -2214,11 +2207,10 @@ def _create_hash_and_first_values( continue # Still here? -# dim_coord = m.field.dimension_coordinates( -# ).filter_by_axis(axis, mode="and", view=True) + # dim_coord = m.field.dimension_coordinates( + # ).filter_by_axis(axis, mode="and", view=True) dim_coord = m.field.dimension_coordinate( - filter_by_axis=(axis,), axis_mode="exact", - default=None + filter_by_axis=(axis,), axis_mode="exact", default=None ) # Find the sort indices for this axis ... diff --git a/cf/constructs.py b/cf/constructs.py index fb0a89f57f..4cea7471fd 100644 --- a/cf/constructs.py +++ b/cf/constructs.py @@ -46,7 +46,7 @@ def __repr__(self): """ return super().__repr__().replace("<", ">> x.identities() -# ['time', 'long_name=Time', 'foo=bar', 'T', 'ncvar%t'] -# -# A construct key may optionally have the ``'key%'`` -# prefix. For example ``'dimensioncoordinate2'`` and -# ``'key%dimensioncoordinate2'`` are both acceptable keys. -# -# Note that the identifiers of a metadata construct in the -# output of a `print` or `!dump` call are always one of its -# identities, and so may always be used as an *identities* -# argument. -# -# Domain axis constructs may also be identified by their -# position in the field construct's data array. Positions -# are specified by either integers. -# -# .. note:: This is an extension to the functionality of -# `cfdm.Constucts.filter_by_identity`. -# -# {{view: `bool`, optional}} -# -# {{todict: `bool`, optional}} -# -# {{cache: optional}} -# -# identities_kwargs: optional -# Additional parameters for configuring each construct's -# `identities` method. By default ``generator=True`` is -# passed by default, and ``ctype`` is inferred from the -# *identities* parameter. -# -# .. versionadded:: 3.9.0 -# -# :Returns: -# -# `Constructs` -# The selected constructs and their construct keys. -# -# **Examples:** -# -# Select constructs that have a "standard_name" property of -# 'latitude': -# -# >>> d = c.filter_by_identity('latitude') -# -# Select constructs that have a "long_name" property of 'Height': -# -# >>> d = c.filter_by_identity('long_name=Height') -# -# Select constructs that have a "standard_name" property of -# 'latitude' or a "foo" property of 'bar': -# -# >>> d = c.filter_by_identity('latitude', 'foo=bar') -# -# Select constructs that have a netCDF variable name of 'time': -# -# >>> d = c.filter_by_identity('ncvar%time') -# -# """ -# if cache is not None: -# return cache -# -# # Allow keys without the 'key%' prefix -# for n, identity in enumerate(identities): -# if identity in self: -# identities = list(identities) -# identities[n] = "key%" + identity -# break -# -# ctype = [i for i in "XTYZ" if i in identities] -# -# return super().filter_by_identity( -# identities, todict=todict, -# _config={"identities_kwargs": {"ctype": ctype}, -# "bypass": lambda x: x in ctype} -# ) - + # def filter_by_identity(self, *identities, view=False, + # todict=False, cache=None, **identities_kwargs): + # """Select metadata constructs by identity. + # + # .. versionadded:: 3.0.0 + # + # .. seealso:: `filter_by_axis`, `filter_by_data`, `filter_by_key`, + # `filter_by_measure`, `filter_by_method`, + # `filter_by_naxes`, `filter_by_ncdim`, + # `filter_by_ncvar`, `filter_by_property`, + # `filter_by_size`, `filter_by_type`, + # `filters_applied`, `inverse_filter`, `unfilter` + # + # :Parameters: + # + # identities: optional + # Select constructs that have any of the given identities or + # construct keys. + # + # An identity is specified by a string (e.g. ``'latitude'``, + # ``'long_name=time'``, etc.); or a compiled regular + # expression (e.g. ``re.compile('^atmosphere')``), for which + # all constructs whose identities match (via `re.search`) + # are selected. + # + # If no identities are provided then all constructs are selected. + # + # Each construct has a number of identities, and is selected + # if any of them match any of those provided. A construct's + # identities are those returned by its `!identities` + # method. In the following example, the construct ``x`` has + # five identities: + # + # >>> x.identities() + # ['time', 'long_name=Time', 'foo=bar', 'T', 'ncvar%t'] + # + # A construct key may optionally have the ``'key%'`` + # prefix. For example ``'dimensioncoordinate2'`` and + # ``'key%dimensioncoordinate2'`` are both acceptable keys. + # + # Note that the identifiers of a metadata construct in the + # output of a `print` or `!dump` call are always one of its + # identities, and so may always be used as an *identities* + # argument. + # + # Domain axis constructs may also be identified by their + # position in the field construct's data array. Positions + # are specified by either integers. + # + # .. note:: This is an extension to the functionality of + # `cfdm.Constucts.filter_by_identity`. + # + # {{view: `bool`, optional}} + # + # {{todict: `bool`, optional}} + # + # {{cache: optional}} + # + # identities_kwargs: optional + # Additional parameters for configuring each construct's + # `identities` method. By default ``generator=True`` is + # passed by default, and ``ctype`` is inferred from the + # *identities* parameter. + # + # .. versionadded:: 3.9.0 + # + # :Returns: + # + # `Constructs` + # The selected constructs and their construct keys. + # + # **Examples:** + # + # Select constructs that have a "standard_name" property of + # 'latitude': + # + # >>> d = c.filter_by_identity('latitude') + # + # Select constructs that have a "long_name" property of 'Height': + # + # >>> d = c.filter_by_identity('long_name=Height') + # + # Select constructs that have a "standard_name" property of + # 'latitude' or a "foo" property of 'bar': + # + # >>> d = c.filter_by_identity('latitude', 'foo=bar') + # + # Select constructs that have a netCDF variable name of 'time': + # + # >>> d = c.filter_by_identity('ncvar%time') + # + # """ + # if cache is not None: + # return cache + # + # # Allow keys without the 'key%' prefix + # for n, identity in enumerate(identities): + # if identity in self: + # identities = list(identities) + # identities[n] = "key%" + identity + # break + # + # ctype = [i for i in "XTYZ" if i in identities] + # + # return super().filter_by_identity( + # identities, todict=todict, + # _config={"identities_kwargs": {"ctype": ctype}, + # "bypass": lambda x: x in ctype} + # ) + def _filter_by_identity(self, arg, todict, _config, identities): """Worker function for `filter_by_identity` and `filter`. @@ -287,10 +287,10 @@ def _filter_by_identity(self, arg, todict, _config, identities): config = {"identities_kwargs": {"ctypes": ctypes}} if ctypes: # Exclude a ctype from the short circuit test - config["short_circuit_test"] = ( - lambda x: (x not in ctypes and self._short_circuit_test(x)) + config["short_circuit_test"] = lambda x: ( + x not in ctypes and self._short_circuit_test(x) ) - + config.update(_config) return super()._filter_by_identity(arg, todict, config, identities) diff --git a/cf/field.py b/cf/field.py index 783ee83008..a2d8ed6805 100644 --- a/cf/field.py +++ b/cf/field.py @@ -279,7 +279,7 @@ class Field(mixin.PropertiesData, cfdm.Field): {{netCDF global attributes}} {{netCDF group attributes}} - + {{netCDF geometry group}} Some components exist within multiple constructs, but when written @@ -493,7 +493,7 @@ def __getitem__(self, indices): construct_data_axes = new.constructs.data_axes() for key, construct in new.constructs.filter_by_axis( - "or", *data_axes, todict=True + "or", *data_axes, todict=True ).items(): construct_axes = construct_data_axes[key] dice = [] @@ -726,24 +726,25 @@ def analyse_items(self, relaxed_identities=None): if relaxed_identities is None: relaxed_identities = cf_relaxed_identities() -# dimension_coordinates = self.dimension_coordinates(view=True) -# auxiliary_coordinates = self.auxiliary_coordinates(view=True) + # dimension_coordinates = self.dimension_coordinates(view=True) + # auxiliary_coordinates = self.auxiliary_coordinates(view=True) for axis in self.domain_axes(todict=True): -# dims = self.constructs.chain( -# "filter_by_type", -# ("dimension_coordinate",), "filter_by_axis", (axis,) -# mode="and", todict=True -# ) - dims = self.dimension_coordinates(filter_by_axis=(axis,), - axis_mode="and", todict=True) - + # dims = self.constructs.chain( + # "filter_by_type", + # ("dimension_coordinate",), "filter_by_axis", (axis,) + # mode="and", todict=True + # ) + dims = self.dimension_coordinates( + filter_by_axis=(axis,), axis_mode="and", todict=True + ) + if len(dims) == 1: # This axis of the domain has a dimension coordinate key, dim = dims.popitem() -# key = dims.key() -# dim = dims.value() + # key = dims.key() + # dim = dims.value() identity = dim.identity(strict=True, default=None) if identity is None: @@ -759,9 +760,7 @@ def analyse_items(self, relaxed_identities=None): if identity: if identity in id_to_axis: - warnings.append( - "Field has multiple {identity!r} axes" - ) + warnings.append("Field has multiple {identity!r} axes") axis_to_id[axis] = identity id_to_axis[identity] = axis @@ -774,12 +773,15 @@ def analyse_items(self, relaxed_identities=None): else: auxs = self.constructs.chain( "filter_by_type", - ("auxiliary_coordinate",), "filter_by_axis", (axis,), - mode="and", todict=True + ("auxiliary_coordinate",), + "filter_by_axis", + (axis,), + mode="and", + todict=True, ) -# auxs = self.auxiliary_coordinates.filter_by_axis( -# axis, mode="exact", todict=True -# ) + # auxs = self.auxiliary_coordinates.filter_by_axis( + # axis, mode="exact", todict=True + # ) if len(auxs) == 1: # This axis of the domain does not have a # dimension coordinate but it does have exactly @@ -1511,18 +1513,22 @@ def _binary_operation_old(self, other, method): refs0 = dict(field0.coordinate_references) refs1 = dict(field1.coordinate_references) - field1_dimension_coordinates = field1.dimension_coordinates(todict=True) -# field1_auxiliary_coordinates = field1.auxiliary_coordinates(todict=True) - field1_coordinate_references = field1.coordinate_references(todict=True) + field1_dimension_coordinates = field1.dimension_coordinates( + todict=True + ) + # field1_auxiliary_coordinates = field1.auxiliary_coordinates(todict=True) + field1_coordinate_references = field1.coordinate_references( + todict=True + ) field1_domain_ancillaries = field1_domain_ancillaries(todict=True) field1_domain_axes = field1.domain_axes(todict=True) # field0_auxiliary_coordinates = field0.auxiliary_coordinates(view=True) -# field0_domain_ancillaries = field0_domain_ancillaries(todict=True) - -# c = field0.constructs.filter_by_type( -# "auxiliary_coordinate", "domain_ancillary", -# ) + # field0_domain_ancillaries = field0_domain_ancillaries(todict=True) + + # c = field0.constructs.filter_by_type( + # "auxiliary_coordinate", "domain_ancillary", + # ) for axis0 in s["size1_broadcast_axes"] + s["new_size1_axes"]: axis1 = axis0_to_axis1[axis0] @@ -1534,11 +1540,11 @@ def _binary_operation_old(self, other, method): if axis1 in field1_dimension_coordinates: insert_dim[axis1] = [axis0] -# for key1 in field1_auxiliary_coordinates.filter_by_axis( -# axis1, mode="exact", view=True -# ): - for key1 in field1.auxiliary_coordinates(filter_by_axis=( - axis1,), axis_mode="exact", todict=True + # for key1 in field1_auxiliary_coordinates.filter_by_axis( + # axis1, mode="exact", view=True + # ): + for key1 in field1.auxiliary_coordinates( + filter_by_axis=(axis1,), axis_mode="exact", todict=True ): insert_aux[key1] = [axis0] @@ -1552,12 +1558,16 @@ def _binary_operation_old(self, other, method): # Remove all field0 auxiliary coordinates and domain # ancillaries which span this axis -# remove_items.update(c.filter_by_axis("and", axis0, todict=True)) - remove_items.update(field0.constructs.filter( - filter_by_type=("auxiliary_coordinate", "domain_ancillary"), - filter_by_axis=(axis0,), - todict=True, - ) + # remove_items.update(c.filter_by_axis("and", axis0, todict=True)) + remove_items.update( + field0.constructs.filter( + filter_by_type=( + "auxiliary_coordinate", + "domain_ancillary", + ), + filter_by_axis=(axis0,), + todict=True, + ) ) # Remove all field0 coordinate references which span this @@ -1591,9 +1601,11 @@ def _binary_operation_old(self, other, method): # spanning the same axes which has the same identity and a # size-1 data array. # ------------------------------------------------------------- - field1_auxiliary_coordinates = field1.auxiliary_coordinates(todict=True) + field1_auxiliary_coordinates = field1.auxiliary_coordinates( + todict=True + ) auxs1 = field1_auxiliary_coordinates.copy() -# auxs1 = dict(field1_auxiliary_coordinates.items()) + # auxs1 = dict(field1_auxiliary_coordinates.items()) logger.debug( "5: remove_items = {}".format(remove_items) ) # pragma: no cover @@ -1917,8 +1929,8 @@ def _binary_operation(self, other, method): for i, (f, out) in enumerate(zip((field0, field1), (out0, out1))): data_axes = f.get_data_axes() -# f_dimension_coordinates = f.dimension_coordinates(todict=True) -# f_auxiliary_coordinates = f.auxiliary_coordinates(todict=True) + # f_dimension_coordinates = f.dimension_coordinates(todict=True) + # f_auxiliary_coordinates = f.auxiliary_coordinates(todict=True) for axis in f.domain_axes(todict=True): identity = None key = None @@ -1926,14 +1938,13 @@ def _binary_operation(self, other, method): coord_type = None coords = f.dimension_coordinates( - filter_by_axis=(axis,), axis_mode="exact", - todict=True + filter_by_axis=(axis,), axis_mode="exact", todict=True ) if len(coords) == 1: # This axis of the domain has a dimension coordinate key, coord = coords.popitem() -# key = coords.key() - # coord = coords.value() + # key = coords.key() + # coord = coords.value() identity = coord.identity(strict=True, default=None) if identity is None: @@ -1948,16 +1959,15 @@ def _binary_operation(self, other, method): identity = coord.identity(relaxed=True, default=None) else: coords = f.auxiliary_coordinates( - filter_by_axis=(axis,), axis_mode="exact", - todict=True + filter_by_axis=(axis,), axis_mode="exact", todict=True ) if len(coords) == 1: # This axis of the domain does not have a # dimension coordinate but it does have exactly # one 1-d auxiliary coordinate, so that will do. key, coord = coords.popitem() -# key = coords.key() -# coord = coords.value() + # key = coords.key() + # coord = coords.value() identity = coord.identity(strict=True, default=None) @@ -2160,7 +2170,7 @@ def _binary_operation(self, other, method): a = out0[identity] if y.size > 1 and a.size == 1: for key0, c in field0.constructs.filter_by_axis( - "or", a.axis, todict=True + "or", a.axis, todict=True ).items(): removed_refs0 = field0.del_coordinate_reference( construct=key0, default=None @@ -2220,22 +2230,24 @@ def _binary_operation(self, other, method): if new_axes: constructs = field1.constructs.filter( - filter_by_type=("dimension_coordinate", - "auxiliary_coordinate", - "cell_measure"), + filter_by_type=( + "dimension_coordinate", + "auxiliary_coordinate", + "cell_measure", + ), filter_by_axis=new_axes, axis_mode="subset", - todict=True + todict=True, ) -# constructs = field1.constructs.filter_by_type( -# "dimension_coordinate", -# "auxiliary_coordinate", -# "cell_measure", -# view=True, -# ) -# constructs = constructs.filter_by_axis( -# *new_axes, mode="subset", view=True -# ) + # constructs = field1.constructs.filter_by_type( + # "dimension_coordinate", + # "auxiliary_coordinate", + # "cell_measure", + # view=True, + # ) + # constructs = constructs.filter_by_axis( + # *new_axes, mode="subset", view=True + # ) for key, c in constructs.items(): c_axes = field1.get_data_axes(key) axes = [axis_map[axis1] for axis1 in c_axes] @@ -2474,10 +2486,10 @@ def _equivalent_coordinate_references( return False # Compare the domain ancillaries -# domain_ancillaries = self.domain_ancillaries(todict=True) -# field1_domain_ancillaries = field1.domain_ancillaries(todict=True) + # domain_ancillaries = self.domain_ancillaries(todict=True) + # field1_domain_ancillaries = field1.domain_ancillaries(todict=True) -# TODO consider case of None key ? + # TODO consider case of None key ? for ( term, @@ -2488,19 +2500,19 @@ def _equivalent_coordinate_references( identifier1 = ref1.coordinate_conversion.domain_ancillaries()[term] -# key0 = domain_ancillaries.filter_by_key(identifier0).key() -# key1 = field1_domain_ancillaries.filter_by_key(identifier1).key() + # key0 = domain_ancillaries.filter_by_key(identifier0).key() + # key1 = field1_domain_ancillaries.filter_by_key(identifier1).key() if not self._equivalent_construct_data( - field1, - key0=identifier0, #key0, - key1=identifier1, #key1, - rtol=rtol, - atol=atol, - s=s, - t=t, - verbose=verbose, - axis_map=axis_map, + field1, + key0=identifier0, # key0, + key1=identifier1, # key1, + rtol=rtol, + atol=atol, + s=s, + t=t, + verbose=verbose, + axis_map=axis_map, ): # add traceback TODO return False @@ -2562,8 +2574,8 @@ def _set_construct_parse_axes(self, item, axes=None, allow_scalar=True): ) key, _ = domain_axes.popitem() axes.append(key) -# domain_axes.filter_by_size(n, view=True).key() -# ) + # domain_axes.filter_by_size(n, view=True).key() + # ) else: raise ValueError( f"Can't insert {item!r}: Ambiguous shape: " @@ -2828,7 +2840,9 @@ def _conform_for_assignment(self, other, check_coordinates=False): ] refs1 = [ key - for key, ref in other.coordinate_references(todict=True).items() + for key, ref in other.coordinate_references( + todict=True + ).items() if key1 in ref.coordinates() ] @@ -3199,12 +3213,12 @@ def _regrid_get_latlong(self, name, axes=None): lon_found = False lat_found = False - # auxiliary_coordinates = self.auxiliary_coordinates( -# view=True, cache=auxiliary_coordinates -# ) + # auxiliary_coordinates = self.auxiliary_coordinates( + # view=True, cache=auxiliary_coordinates + # ) for key, aux in self.auxiliary_coordinates( - filter_by_naxes=(2,), todict=True + filter_by_naxes=(2,), todict=True ).items(): if aux.Units.islongitude: if lon_found: @@ -3538,27 +3552,27 @@ def _regrid_get_reordered_sections( # possibibly reduce the number of trasnistions between different masks # - each change is slow. -# dimensions_coordinates = self.dimension_coordinates(view=True) + # dimensions_coordinates = self.dimension_coordinates(view=True) axis_indices = [] if axis_order is not None: for axis in axis_order: # axis_key = self.dim(axis, key=True) -# dims = self.dimension_coordinates( -# filter_by_axis=(axis,), axis_mode="exact", todict=True -# ) -# if len(dims)!= 1: -# axis_key = None -# else: -# axis_key, _ = dims.popitem() - + # dims = self.dimension_coordinates( + # filter_by_axis=(axis,), axis_mode="exact", todict=True + # ) + # if len(dims)!= 1: + # axis_key = None + # else: + # axis_key, _ = dims.popitem() + axis_key = self.dimension_coordinate( filter_by_axis=(axis,), axis_mode="exact", default=None, key=True, - todict=True - ) + todict=True, + ) if axis_key is not None: if axis_key in regrid_axes: raise ValueError("Cannot loop over regridding axes.") @@ -3828,11 +3842,11 @@ def _regrid_update_coordinate_references( term, value, ) in ref.coordinate_conversion.domain_ancillaries().items(): -# domain_ancillaries = self.domain_ancillaries( -# view=True, cache=domain_ancillaries -# ) -# -# key = domain_ancillaries(value, view=True).key(default=None) + # domain_ancillaries = self.domain_ancillaries( + # view=True, cache=domain_ancillaries + # ) + # + # key = domain_ancillaries(value, view=True).key(default=None) if value in domain_ancillaries: key = value @@ -3847,15 +3861,19 @@ def _regrid_update_coordinate_references( # if domain_ancillaries.filter_by_key(key).filter_by_axis( # x, y, mode="exact", view=True # ): -# if len( -# self.domain_ancillaries( -# filter_by_axis=(x, y), axis_mode="exact", -# todict=True -# ) -# ) == 1: - if self.domain_ancillary(filter_by_axis=(x, y), - axis_mode="exact", key=True, - todict=True, default=False): + # if len( + # self.domain_ancillaries( + # filter_by_axis=(x, y), axis_mode="exact", + # todict=True + # ) + # ) == 1: + if self.domain_ancillary( + filter_by_axis=(x, y), + axis_mode="exact", + key=True, + todict=True, + default=False, + ): # Convert the domain ancillary into an independent # field value = self.convert(key) @@ -4028,12 +4046,14 @@ def _regrid_update_coordinates( domain_axes[k_s].set_size(d.size) self.set_construct(d, axes=[k_s]) -# dst_auxiliary_coordinates = dst.auxiliary_coordinates( -# view=True, cache=dst_auxiliary_coordinates -# ) + # dst_auxiliary_coordinates = dst.auxiliary_coordinates( + # view=True, cache=dst_auxiliary_coordinates + # ) for aux_key, aux in dst.auxiliary_coordinates( - filter_by_axis=dst_axis_keys, axis_mode="subset", todict=True + filter_by_axis=dst_axis_keys, + axis_mode="subset", + todict=True, ).items(): aux_axes = [ axis_map[k_d] for k_d in dst.get_data_axes(aux_key) @@ -4059,9 +4079,9 @@ def _regrid_update_coordinates( for coord, axis_key in zip(dst_coords, src_axis_keys): self.set_construct(coord, axes=[axis_key]) else: -# dst_auxiliary_coordinates = dst.auxiliary_coordinates( -# view=True, cache=dst_auxiliary_coordinates -# )# + # dst_auxiliary_coordinates = dst.auxiliary_coordinates( + # view=True, cache=dst_auxiliary_coordinates + # )# for src_axis_key, dst_axis_key in zip( src_axis_keys, dst_axis_keys @@ -4080,14 +4100,16 @@ def _regrid_update_coordinates( self.set_construct(dim_coord, axes=[src_axis_key]) for aux in dst.auxiliary_coordinates( - filter_by_axis=(dst_axis_key,), - axis_mode="exact", todict=True + filter_by_axis=(dst_axis_key,), + axis_mode="exact", + todict=True, ).values(): self.set_construct(aux, axes=[src_axis_key]) for aux_key, aux in dst.auxiliary_coordinates( - filter_by_axis=dst_axis_keys, - axis_mode="exact", todict=True + filter_by_axis=dst_axis_keys, + axis_mode="exact", + todict=True, ).items(): aux_axes = dst.get_data_axes(aux_key) if aux_axes == tuple(dst_axis_keys): @@ -4142,7 +4164,7 @@ def _weights_area_XY( `bool` or `None` """ -# dimension_coordinates = self.dimension_coordinates(view=True) + # dimension_coordinates = self.dimension_coordinates(view=True) xdims = self.dimension_coordinates("X", todict=True) ydims = self.dimension_coordinates("Y", todict=True) @@ -4350,18 +4372,20 @@ def _weights_field(self, fields, comp, weights_axes, methods=False): """Creates a weights field.""" s = self.analyse_items() -# domain_axes = self.domain_axes(todict=True) + # domain_axes = self.domain_axes(todict=True) domain_axes_size_1 = self.domain_axes(filter_by_size=(1,), todict=True) for w in fields: t = w.analyse_items() -# TODO CHECK this with org - domain_axes_size_1 = w.domain_axes(filter_by_size=(1,), todict=True) + # TODO CHECK this with org + domain_axes_size_1 = w.domain_axes( + filter_by_size=(1,), todict=True + ) if t["undefined_axes"]: -# if set( -# t.domain_axes.filter_by_size(gt(1), view=True) -# ).intersection(t["undefined_axes"]): + # if set( + # t.domain_axes.filter_by_size(gt(1), view=True) + # ).intersection(t["undefined_axes"]): if set(domain_axes_size_1).intersection(t["undefined_axes"]): raise ValueError("345jn456jn TODO") @@ -6388,7 +6412,9 @@ def cyclic(self, identity=None, iscyclic=True, period=None, **kwargs): """ if kwargs: - _DEPRECATION_ERROR_KWARGS(self, "cyclic", kwargs) # pragma: no cover + _DEPRECATION_ERROR_KWARGS( + self, "cyclic", kwargs + ) # pragma: no cover data = self.get_data(None, _fill_value=False) if data is None: @@ -7140,7 +7166,7 @@ def weights( field.del_coordinate_reference(key) for key in field.constructs.filter_by_axis( - "or", *not_needed_axes, todict=True + "or", *not_needed_axes, todict=True ): field.del_construct(key) @@ -10471,8 +10497,8 @@ def collapse( # # ------------------------------------------------------------ domain_axes = f.domain_axes(todict=False, cache=domain_axes) -# auxiliary_coordinates = f.auxiliary_coordinates(view=True) -# dimension_coordinates = f.dimension_coordinates(view=True) + # auxiliary_coordinates = f.auxiliary_coordinates(view=True) + # dimension_coordinates = f.dimension_coordinates(view=True) for method, axes, within, over, axes_in in zip( all_methods, all_axes, all_within, all_over, input_axes @@ -10486,7 +10512,7 @@ def collapse( method = method2 -# collapse_axes_all_sizes = domain_axes.filter_by_key(*axes) + # collapse_axes_all_sizes = domain_axes.filter_by_key(*axes) collapse_axes_all_sizes = f.domain_axes( filter_by_key=axes, todict=False ) @@ -10520,7 +10546,7 @@ def collapse( "root_mean_square", "sum_of_squares", ): - collapse_axes = collapse_axes_all_sizes.todict() #copy() + collapse_axes = collapse_axes_all_sizes.todict() # copy() else: collapse_axes = collapse_axes_all_sizes.filter_by_size( gt(1), todict=True @@ -10535,12 +10561,13 @@ def collapse( if _create_zero_size_cell_bounds: # Create null bounds if requested for axis in axes: -# dc = f.dimension_coordinates( -# filter_by_axis=(axis,), axis_mode="and", todict=Tru#e -# ).value(None) + # dc = f.dimension_coordinates( + # filter_by_axis=(axis,), axis_mode="and", todict=Tru#e + # ).value(None) dc = f.dimension_coordinate( - filter_by_axis=(axis,), axis_mode="exact", - default=None + filter_by_axis=(axis,), + axis_mode="exact", + default=None, ) if dc is not None: _, dc = dc.popitem() @@ -10803,8 +10830,9 @@ def collapse( # which span this axis c = f.constructs.filter( filter_by_type=("cell_measure", "domain_ancillary"), - filter_by_axis=(axis,), axis_mode="or", - todict=True + filter_by_axis=(axis,), + axis_mode="or", + todict=True, ) for key, value in c.items(): logger.info( @@ -10815,12 +10843,17 @@ def collapse( # REMOVE all 2+ dimensional auxiliary coordinates # which span this axis -# c = auxiliary_coordinates.filter_by_naxes(gt(1), view=True) + # c = auxiliary_coordinates.filter_by_naxes(gt(1), view=True) c = f.auxiliary_coordinates( - filter_by_naxes=(gt(1,),), - filter_by_axis=(axis,), axis_mode="or", - todict=True - ) + filter_by_naxes=( + gt( + 1, + ), + ), + filter_by_axis=(axis,), + axis_mode="or", + todict=True, + ) for key, value in c.items(): logger.info( f" Removing {value.construct_type} {key!r}" @@ -10837,8 +10870,7 @@ def collapse( # this axis and have the same values in their data # array and bounds. c = f.auxiliary_coordinates( - filter_by_axis=(axis,), axis_mode="exact", - todict=True + filter_by_axis=(axis,), axis_mode="exact", todict=True ) for key, aux in c.items(): logger.info(f"key = {key}") # pragma: no cover @@ -10869,11 +10901,12 @@ def collapse( f"Changing axis size to 1: {axis}" ) # pragma: no cover -# dim = dimension_coordinates.filter_by_axis( -# axis, mode="exact", view=True -# ).value(None) - dim = f.dimension_coordinate(filter_by_axis=(axis,), - axis_mode="exact", default=None) + # dim = dimension_coordinates.filter_by_axis( + # axis, mode="exact", view=True + # ).value(None) + dim = f.dimension_coordinate( + filter_by_axis=(axis,), axis_mode="exact", default=None + ) if dim is None: continue @@ -12342,7 +12375,7 @@ def _update_cell_methods( method: `str` - domain_axes: `Constructs` + domain_axes: `Constructs` or `dict` {{verbose: `int` or `str` or `None`, optional}} @@ -12351,7 +12384,7 @@ def _update_cell_methods( `None` """ - original_cell_methods = self.cell_methods(view=True).ordered() + original_cell_methods = self.cell_methods().ordered() logger.info(" Update cell methods:") # pragma: no cover logger.info( " Original cell methods = {}".format(original_cell_methods) @@ -12517,7 +12550,7 @@ def directions(self): {'dim1': True, 'dim0': False} """ - out = {key: True for key in self.domain_axes(todict=True).keys()} + out = {key: True for key in self.domain_axes(todict=True)} for key, dc in self.dimension_coordinates(todict=True).items(): direction = dc.direction() @@ -12819,7 +12852,7 @@ def indices(self, *mode, **kwargs): indices = [slice(None)] * self.ndim domain_axes = self.domain_axes(todict=True) - constructs = self.constructs.filter_by_data(view=True) +# constructs = self.constructs.filter_by_data(view=True) parsed = {} unique_axes = set() @@ -12830,14 +12863,19 @@ def indices(self, *mode, **kwargs): key = None construct = None else: - c = constructs.filter_by_identity(identity, view=True) +# c = constructs.filter_by_identity(identity, view=True) + c = self.constructs.filter( + filter_by_data=True, + filter_by_identity=(identity,), + todict=True + ) if len(c) != 1: raise ValueError( "Can't find indices: Ambiguous axis or axes: " f"{identity!r}" ) - key, construct = dict(c).popitem() + key, construct = c.popitem() axes = self.get_data_axes(key) @@ -13186,7 +13224,7 @@ def indices(self, *mode, **kwargs): # Create a full index for this axis start = 0 # stop = self.axis_size(axis) - stop = self.domain_axes(view=True)[axis].get_size() + stop = domain_axes[axis].get_size() size = stop - start index = slice(start, stop) else: @@ -13416,11 +13454,11 @@ def set_data( if not domain_axes: raise ValueError("Can't set data: No domain axes exist") -# domain_axes = f.domain_axes(view=True) + # domain_axes = f.domain_axes(view=True) axes = [] for n in data_shape: -# da = domain_axes.filter_by_size(n, todict=True) + # da = domain_axes.filter_by_size(n, todict=True) da = f.domain_axes(filter_by_size=(n,), todict=True) if len(da) != 1: raise ValueError( @@ -13827,7 +13865,7 @@ def match_by_construct(self, *identities, OR=False, **conditions): n = 0 - self_cell_methods = self.cell_methods(view=True) + self_cell_methods = self.cell_methods() # TODO for identity in identities: cms = False @@ -13854,9 +13892,7 @@ def match_by_construct(self, *identities, OR=False, **conditions): "cell_method" }: key = tuple(self_cell_methods.ordered())[-1] - filtered = self_cell_methods(key, view=True)( - identity, view=True - ) + filtered = self_cell_methods(key)(identity) # TODO if not filtered: if not OR: return False @@ -13991,9 +14027,9 @@ def moving_window( Moving mean, sum, and integral calculations are possible. - By default moving means are unweighted, but weights based on the - axis cell sizes (or custom weights) may applied to the calculation - via the *weights* parameter. + By default moving means are unweighted, but weights based on + the axis cell sizes (or custom weights) may applied to the + calculation via the *weights* parameter. By default moving integrals must be weighted. @@ -14003,8 +14039,8 @@ def moving_window( .. note:: The `moving_window` method can not, in general, be emulated by the `convolution_filter` method, as the latter i) can not change the window weights as the - filter passes through the axis; and ii) does not update - the cell method constructs. + filter passes through the axis; and ii) does not + update the cell method constructs. .. versionadded:: 3.3.0 @@ -14014,8 +14050,8 @@ def moving_window( :Parameters: method: `str` - Define the moving window method. The method is given by - one of the following strings (see + Define the moving window method. The method is given + by one of the following strings (see https://ncas-cms.github.io/cf-python/analysis.html#collapse-methods for precise definitions): @@ -14030,48 +14066,50 @@ def moving_window( ``'integral'`` The integral of values. Always ================== ============================ ======== - * Methods that are "Never" weighted ignore the *weights* - parameter, even if it is set. + * Methods that are "Never" weighted ignore the + *weights* parameter, even if it is set. - * Methods that "May be" weighted will only be weighted if - the *weights* parameter is set. + * Methods that "May be" weighted will only be weighted + if the *weights* parameter is set. - * Methods that are "Always" weighted require the *weights* - parameter to be set. + * Methods that are "Always" weighted require the + *weights* parameter to be set. window_size: `int` Specify the size of the window used to calculate the moving window. *Parameter example:* - A 5-point moving window is set with ``window_size=5``. + A 5-point moving window is set with + ``window_size=5``. axis: `str` or `int` Select the domain axis over which the filter is to be applied, defined by that which would be selected by - passing the given axis description to a call of the field - construct's `domain_axis` method. For example, for a value - of ``'X'``, the domain axis construct returned by - ``f.domain_axis('X')`` is selected. + passing the given axis description to a call of the + field construct's `domain_axis` method. For example, + for a value of ``'X'``, the domain axis construct + returned by ``f.domain_axis('X')`` is selected. weights: optional Specify the weights for the moving window. The weights are, those that would be returned by this call of the - field construct's `weights` method: ``f.weights(weights, - axes=axis, radius=radius, great_circle=great_circle, - data=True)``. See the *axis*, *radius* and *great_circle* - parameters and `cf.Field.weights` for details. + field construct's `weights` method: + ``f.weights(weights, axes=axis, radius=radius, + great_circle=great_circle, data=True)``. See the + *axis*, *radius* and *great_circle* parameters and + `cf.Field.weights` for details. .. note:: By default *weights* is `None`, resulting in **unweighted calculations**. - .. note:: Setting *weights* to `True` is generally a good - way to ensure that the moving window + .. note:: Setting *weights* to `True` is generally a + good way to ensure that the moving window calculations are appropriately weighted - according to the field construct's metadata. In - this case, if it is not possible to create - weights for the selected *axis* then an - exception will be raised. + according to the field construct's + metadata. In this case, if it is not + possible to create weights for the selected + *axis* then an exception will be raised. *Parameter example:* To specify weights on the cell sizes of the selected @@ -14080,10 +14118,10 @@ def moving_window( mode: `str`, optional The *mode* parameter determines how the input array is extended when the filter overlaps an array border. The - default value is ``'constant'`` or, if the dimension being - convolved is cyclic (as ascertained by the `iscyclic` - method), ``'wrap'``. The valid values and their behaviours - are as follows: + default value is ``'constant'`` or, if the dimension + being convolved is cyclic (as ascertained by the + `iscyclic` method), ``'wrap'``. The valid values and + their behaviours are as follows: ============== ========================== =========================== *mode* Description Behaviour @@ -14110,75 +14148,78 @@ def moving_window( opposite edge. ============== ========================== =========================== - The position of the window relative to each value can be - changed by using the *origin* parameter. + The position of the window relative to each value can + be changed by using the *origin* parameter. cval: scalar, optional Value to fill past the edges of the array if *mode* is ``'constant'``. Ignored for other modes. Defaults to `None`, in which case the edges of the array will be - filled with missing data. The only other valid value is - ``0``. + filled with missing data. The only other valid value + is ``0``. *Parameter example:* - To extend the input by filling all values beyond the - edge with zero: ``cval=0`` + To extend the input by filling all values beyond + the edge with zero: ``cval=0`` origin: `int`, optional - Controls the placement of the filter. Defaults to 0, which - is the centre of the window. If the window size, defined - by the *window_size* parameter, is even then then a value - of 0 defines the index defined by ``window_size/2 -1``. + Controls the placement of the filter. Defaults to 0, + which is the centre of the window. If the window size, + defined by the *window_size* parameter, is even then + then a value of 0 defines the index defined by + ``window_size/2 -1``. *Parameter example:* - For a window size of 5, if ``origin=0`` then the window - is centred on each point. If ``origin=-2`` then the - window is shifted to include the previous four - points. If ``origin=1`` then the window is shifted to - include the previous point and the and the next three - points. + For a window size of 5, if ``origin=0`` then the + window is centred on each point. If ``origin=-2`` + then the window is shifted to include the previous + four points. If ``origin=1`` then the window is + shifted to include the previous point and the and + the next three points. radius: optional - Specify the radius used for calculating the areas of cells - defined in spherical polar coordinates. The radius is that - which would be returned by this call of the field - construct's `~cf.Field.radius` method: + Specify the radius used for calculating the areas of + cells defined in spherical polar coordinates. The + radius is that which would be returned by this call of + the field construct's `~cf.Field.radius` method: ``f.radius(radius)``. See the `cf.Field.radius` for details. - By default *radius* is ``'earth'`` which means that if and - only if the radius can not found from the datums of any - coordinate reference constucts, then the default radius - taken as 6371229 metres. + By default *radius* is ``'earth'`` which means that if + and only if the radius can not found from the datums + of any coordinate reference constucts, then the + default radius taken as 6371229 metres. great_circle: `bool`, optional - If True then allow, if required, the derivation of i) area - weights from polygon geometry cells by assuming that each - cell part is a spherical polygon composed of great circle - segments; and ii) and the derivation of line-length - weights from line geometry cells by assuming that each - line part is composed of great circle segments. + If True then allow, if required, the derivation of i) + area weights from polygon geometry cells by assuming + that each cell part is a spherical polygon composed of + great circle segments; and ii) and the derivation of + line-length weights from line geometry cells by + assuming that each line part is composed of great + circle segments. scale: number, optional - If set to a positive number then scale the weights so that - they are less than or equal to that number. By default the - weights are scaled to lie between 0 and 1 (i.e. *scale* - is 1). + If set to a positive number then scale the weights so + that they are less than or equal to that number. By + default the weights are scaled to lie between 0 and 1 + (i.e. *scale* is 1). - Ignored if the moving window method is not weighted. The - *scale* parameter can not be set for moving integrals. + Ignored if the moving window method is not + weighted. The *scale* parameter can not be set for + moving integrals. *Parameter example:* - To scale all weights so that they lie between 0 and 0.5: - ``scale=0.5``. + To scale all weights so that they lie between 0 and + 0.5: ``scale=0.5``. {{inplace: `bool`, optional}} :Returns: `Field` or `None` - The field construct of moving window values, or `None` if - the operation was in-place. + The field construct of moving window values, or `None` + if the operation was in-place. **Examples:** @@ -14211,7 +14252,8 @@ def moving_window( >>> f.iscyclic('Y') False - Create a weighted 3-point running mean for the cyclic 'X' axis: + Create a weighted 3-point running mean for the cyclic 'X' + axis: >>> g = f.moving_window('mean', 3, axis='X', weights=True) >>> print(g) @@ -14238,11 +14280,13 @@ def moving_window( [225. 360.] [270. 405.]] - Create an unweighted 3-point running mean for the cyclic 'X' axis: + Create an unweighted 3-point running mean for the cyclic 'X' + axis: >>> g = f.moving_window('mean', 3, axis='X') - Create an weighted 4-point running integral for the non-cyclic 'Y' axis: + Create an weighted 4-point running integral for the non-cyclic + 'Y' axis: >>> g = f.moving_window('integral', 4, axis='Y', weights=True) >>> g.Units @@ -14375,7 +14419,7 @@ def moving_window( # Add a cell method if f.domain_axis(axis).get_size() > 1 or method == "integral": f._update_cell_methods( - method=method, domain_axes=f.domain_axes(view=True)(axis) + method=method, domain_axes=f.domain_axes(axis, todict=True) ) return f @@ -14976,7 +15020,7 @@ def cumsum( # Add a cell method f._update_cell_methods( - method="sum", domain_axes=f.domain_axes(view=True)(axis_key) + method="sum", domain_axes=f.domain_axes(axis_key, todict=True) ) return f @@ -15184,16 +15228,12 @@ def anchor( else: f = _inplace_enabled_define_and_cleanup(self) - dim = ( - f.dimension_coordinates(view=True) - .filter_by_axis(axis, mode="and", view=True) - .value(default=None) - ) + dim = f.dimension_coordinate(filter_by_axis=(axis,), + axis_mode="exact", default=None) if dim is None: raise ValueError( - "Can't shift non-cyclic {!r} axis".format( - f.constructs.domain_axis_identity(axis) - ) + "Can't shift non-cyclic " + f"{f.constructs.domain_axis_identity(axis)!r} axis" ) period = dim.period() @@ -15230,11 +15270,9 @@ def anchor( if not dry_run: f.roll(axis, shift, inplace=True) - dim = ( - f.dimension_coordinates(view=True) - .filter_by_axis(axis, mode="and", view=True) - .value() - ) + dim = f.dimension_coordinate(filter_by_axis=(axis,), + axis_mode="exact") + n = ((value - dim.data[0]) / period).ceil() else: # Adjust value so it's in the range (c[0]-period, c[0]] @@ -15246,7 +15284,9 @@ def anchor( if not dry_run: f.roll(axis, shift, inplace=True) + # TODO should this call be like the one above? dim = f.dimension_coordinate(axis) + n = ((value - dim.data[0]) / period).floor() if dry_run: @@ -15385,7 +15425,7 @@ def autocyclic(self, verbose=None): >>> f.autocyclic() """ -# dims = self.dimension_coordinates(view=True).filter_by_identity("X", view=True)._dictionary() + # dims = self.dimension_coordinates(view=True).filter_by_identity("X", view=True)._dictionary() dims = self.dimension_coordinates("X", todict=True) @@ -15821,15 +15861,19 @@ def unsqueeze(self, inplace=False, i=False, axes=None, **kwargs): f = _inplace_enabled_define_and_cleanup(self) - size_1_axes = self.domain_axes(view=True).filter_by_size(1, view=True) + size_1_axes = self.domain_axes(filter_by_size=(1,), todict=True) for axis in set(size_1_axes).difference(self.get_data_axes()): f.insert_dimension(axis, position=0, inplace=True) return f - def auxiliary_coordinate( self, identity=None, - default=ValueError(), key=False, axis_mode=None, - property_mode=None, **filters ): + def auxiliary_coordinate( + self, + identity=None, + default=ValueError(), + key=False, + **filter_kwargs, + ): """Return an auxiliary coordinate construct, or its key. .. versionadded:: 3.0.0 @@ -15933,14 +15977,18 @@ def auxiliary_coordinate( self, identity=None, TODO """ - return self._construct(("auxiliary_coordinate",), - "auxiliary_coordinate", - identity=identity, key=key, - default=default, axis_mode=axis_mode, - property_mode=property_mode, **filters) - - def construct(self, identity=None, default=ValueError(), - key=False, **filter_kwargs): + return self._construct( + ("auxiliary_coordinate",), + "auxiliary_coordinate", + identity=identity, + key=key, + default=default, + **filter_kwargs, + ) + + def construct( + self, identity=None, default=ValueError(), key=False, **filter_kwargs + ): """Select a metadata construct by its identity. .. seealso:: `del_construct`, `get_construct`, `has_construct`, @@ -16055,12 +16103,18 @@ def construct(self, identity=None, default=ValueError(), TypeError: No height coordinates """ - return self._construct((), "construct", identity=identity, - key=key, default=default, - **filter_kwargs) + return self._construct( + (), + "construct", + identity=identity, + key=key, + default=default, + **filter_kwargs, + ) - def domain_ancillary(self, identity=None, default=ValueError(), - key=False, **filter_kwargs): + def domain_ancillary( + self, identity=None, default=ValueError(), key=False, **filter_kwargs + ): """Return a domain ancillary construct, or its key. .. versionadded:: 3.0.0 @@ -16162,13 +16216,18 @@ def domain_ancillary(self, identity=None, default=ValueError(), TODO """ - return self._construct(("domain_ancillary",), - "domain_ancillary", identity=identity, - key=key, default=default, - **filter_kwargs) + return self._construct( + ("domain_ancillary",), + "domain_ancillary", + identity=identity, + key=key, + default=default, + **filter_kwargs, + ) - def cell_measure(self, identity=None, default=ValueError(), - key=False, **filter_kwargs): + def cell_measure( + self, identity=None, default=ValueError(), key=False, **filter_kwargs + ): """Select a cell measure construct by its identity. .. versionadded:: 3.0.0 @@ -16266,9 +16325,14 @@ def cell_measure(self, identity=None, default=ValueError(), TODO """ - return self._construct(("cell_measure",), "cell_meausure", - identity=identity, key=key, - default=default, **filter_kwargs) + return self._construct( + ("cell_measure",), + "cell_meausure", + identity=identity, + key=key, + default=default, + **filter_kwargs, + ) def cell_method(self, identity=None, default=ValueError(), key=False): """Select a cell method construct by its identity. @@ -16376,8 +16440,9 @@ def cell_method(self, identity=None, default=ValueError(), key=False): return c.value(default=default) - def coordinate(self, identity=None, default=ValueError(), - key=False, **filter_kwargs): + def coordinate( + self, identity=None, default=ValueError(), key=False, **filter_kwargs + ): """Return a dimension coordinate construct, or its key. .. versionadded:: 3.0.0 @@ -16475,13 +16540,28 @@ def coordinate(self, identity=None, default=ValueError(), TODO """ - return self._construct(("dimension_coordinate", - "auxiliary_coordinate"), "coordinate", - identity=identity, key=key, - default=default, **filter_kwargs) + c = self._construct( + ("dimension_coordinate", "auxiliary_coordinate"), + "coordinate", + identity=identity, + key=key, + default=default, + **filter_kwargs, + ) + + if identity is None: + return c - def coordinate_reference( self, identity=None, - default=ValueError(), key=False, **filter_kwargs ): + if not c: + da_key = self.domain_axis(identity, key=True, default=None) + if da_key is not None: + c = self.coordinates.filter_by_axis("exact", da_key) + + if len TODO + + def coordinate_reference( + self, identity=None, default=ValueError(), key=False, **filter_kwargs + ): """Return a coordinate reference construct, or its key. .. versionadded:: 3.0.0 @@ -16573,13 +16653,18 @@ def coordinate_reference( self, identity=None, TODO """ - return self._construct(("coordinate_reference",), - "coordinate_reference", - identity=identity, key=key, - default=default, **filter_kwargs) + return self._construct( + ("coordinate_reference",), + "coordinate_reference", + identity=identity, + key=key, + default=default, + **filter_kwargs, + ) - def field_ancillary(self, identity=None, default=ValueError(), - key=False, **filter_kwargs): + def field_ancillary( + self, identity=None, default=ValueError(), key=False, **filter_kwargs + ): """Return a field ancillary construct, or its key. .. versionadded:: 3.0.0 @@ -16682,13 +16767,18 @@ def field_ancillary(self, identity=None, default=ValueError(), TODO """ - return self._construct(("field_ancillary",), - "field_ancillary", identity=identity, - key=key, default=default, - **filter_kwargs) + return self._construct( + ("field_ancillary",), + "field_ancillary", + identity=identity, + key=key, + default=default, + **filter_kwargs, + ) - def dimension_coordinate( self, identity=None, key=False, - default=ValueError(), **filter_kwargs ): + def dimension_coordinate( + self, identity=None, key=False, default=ValueError(), **filter_kwargs + ): """Return a dimension coordinate construct, or its key. .. versionadded:: 3.0.0 @@ -16788,16 +16878,25 @@ def dimension_coordinate( self, identity=None, key=False, TODO """ - return self._construct(("dimension_coordinate",), - "dimension_coordinate", - identity=identity, key=key, - default=default, **filter_kwargs) - - def _construct(self, _ctypes, _method, identity=None, key=None, - default=None, **filter_kwargs): - """ TODO. + return self._construct( + ("dimension_coordinate",), + "dimension_coordinate", + identity=identity, + key=key, + default=default, + **filter_kwargs, + ) - """ + def _construct( + self, + _ctypes, + _method, + identity=None, + key=None, + default=None, + **filter_kwargs, + ): + """TODO.""" if not _ctypes: kwargs = filter_kwargs else: @@ -16813,15 +16912,15 @@ def _construct(self, _ctypes, _method, identity=None, key=None, ) kwargs.update(filter_kwargs) - + if key: - return super().construct_key(identity, default=default, - **kwargs) + return super().construct_key(identity, default=default, **kwargs) return super().construct(identity, default=default, **kwargs) - - def domain_axis(self, identity=None, key=False, - default=ValueError(), **filter_kwargs): + + def domain_axis( + self, identity=None, key=False, default=ValueError(), **filter_kwargs + ): """Return a domain axis construct, or its key. .. versionadded:: 3.0.0 @@ -16923,9 +17022,14 @@ def domain_axis(self, identity=None, key=False, # TODO consider using filter_by_key identity = da_key - c = self._construct(("domain_axis",), "domain_axis", - identity=identity, key=key, default=None, - **filter_kwargs) + c = self._construct( + ("domain_axis",), + "domain_axis", + identity=identity, + key=key, + default=None, + **filter_kwargs, + ) if c is not None: return c @@ -16935,12 +17039,11 @@ def domain_axis(self, identity=None, key=False, return da_key return self.constructs[da_key] - + return self._default( - default, - "No unique domain axis construct is identifable" + default, "No unique domain axis construct is identifable" ) - + def domain_axis_position(self, identity): """Return the position in the data of a domain axis construct. @@ -17294,10 +17397,10 @@ def set_construct( if construct_type == "dimension_coordinate": data_axes = self.constructs.data_axes() -# dimension_coordinates = self.dimension_coordinates(todict=True) -# for dim, dim_axes in tuple( -# dimension_coordinates.data_axes().items() -# ): + # dimension_coordinates = self.dimension_coordinates(todict=True) + # for dim, dim_axes in tuple( + # dimension_coordinates.data_axes().items() + # ): for dim in self.dimension_coordinates(todict=True): if dim == key: continue @@ -17988,9 +18091,7 @@ def percentile( # ------------------------------------------------------------ if axes: for key, c in ( - self.dimension_coordinates(view=True) - .filter_by_axis(*axes, mode="subset", view=True) - .items() + self.dimension_coordinates(filter_by_axis=axes, axis_mode="subset", todict=True).items() ): c_axes = self.get_data_axes(key) @@ -18013,7 +18114,7 @@ def percentile( out.set_construct(c, axes=c_axes, key=key, copy=False) - # TODO + # TODO optimise constructs access? other_axes = set( [ axis @@ -18027,7 +18128,7 @@ def percentile( # ------------------------------------------------------------ if other_axes: for key, c in self.constructs.filter_by_axis( - *other_axes, mode="subset", view=True + "subset", *other_axes, todict=True ).items(): c_axes = self.get_data_axes(key) out.set_construct(c, axes=c_axes, key=key) @@ -18620,7 +18721,9 @@ def roll(self, axis, shift, inplace=False, i=False, **kwargs): return f - dim = self.dimension_coordinate(filter_by_axis=(axis,), axis_mode="exact", default=None) + dim = self.dimension_coordinate( + filter_by_axis=(axis,), axis_mode="exact", default=None + ) if dim is not None and dim.period() is None: raise ValueError( f"Can't roll: {dim.identity()!r} axis has non-periodic " diff --git a/cf/functions.py b/cf/functions.py index 0280684fed..bade2232f9 100644 --- a/cf/functions.py +++ b/cf/functions.py @@ -1619,7 +1619,7 @@ def close_files(file_format=None): _file_to_fh[file_format].clear() - + def close_one_file(file_format=None): """Close an arbitrary open file containing a sub-array of a data array. diff --git a/cf/mixin/coordinate.py b/cf/mixin/coordinate.py index 1f766e393b..41299b888a 100644 --- a/cf/mixin/coordinate.py +++ b/cf/mixin/coordinate.py @@ -76,7 +76,7 @@ def T(self): """ if self.Units.isreftime: return True - + axis = self.get_property("axis", None) if axis is not None: return axis == "T" @@ -130,16 +130,13 @@ def X(self): """ standard_name = self.get_property("standard_name", None) - if ( - standard_name is not None - and standard_name in ( - "longitude", - "projection_x_coordinate", - "grid_longitude" - ) + if standard_name is not None and standard_name in ( + "longitude", + "projection_x_coordinate", + "grid_longitude", ): return True - + if self.Units.islongitude: return True @@ -182,23 +179,20 @@ def Y(self): """ standard_name = self.get_property("standard_name", None) - if ( - standard_name is not None - and standard_name in ( - "latitude", - "projection_y_coordinate", - "grid_latitude", - ) + if standard_name is not None and standard_name in ( + "latitude", + "projection_y_coordinate", + "grid_latitude", ): return True - + if self.Units.islatitude: return True axis = self.get_property("axis", None) if axis is not None: return axis == "Y" - + # Still here? Then check the bounds. if self.has_bounds(): bounds = self.get_bounds(None) @@ -257,23 +251,20 @@ def Z(self): """ standard_name = self.get_property("standard_name", None) - if ( - standard_name is not None - and standard_name in ( - "atmosphere_ln_pressure_coordinate", - "atmosphere_sigma_coordinate", - "atmosphere_hybrid_sigma_pressure_coordinate", - "atmosphere_hybrid_height_coordinate", - "atmosphere_sleve_coordinate", - "ocean_sigma_coordinate", - "ocean_s_coordinate", - "ocean_s_coordinate_g1", - "ocean_s_coordinate_g2", - "ocean_sigma_z_coordinate", - "ocean_double_sigma_coordinate", - ) + if standard_name is not None and standard_name in ( + "atmosphere_ln_pressure_coordinate", + "atmosphere_sigma_coordinate", + "atmosphere_hybrid_sigma_pressure_coordinate", + "atmosphere_hybrid_height_coordinate", + "atmosphere_sleve_coordinate", + "ocean_sigma_coordinate", + "ocean_s_coordinate", + "ocean_s_coordinate_g1", + "ocean_s_coordinate_g2", + "ocean_sigma_z_coordinate", + "ocean_double_sigma_coordinate", ): - return True + return True units = self.Units if units.ispressure: @@ -282,11 +273,11 @@ def Z(self): positive = self.get_property("positive", None) if positive is not None: return str(positive).lower() in ("up", "down") - + axis = self.get_property("axis", None) if axis is not None: return axis == "Z" - + if units and units.units in ("level", "layer" "sigma_level"): return True @@ -529,7 +520,7 @@ def identities(self, generator=False, ctypes="XTYZ", **kwargs): .. versionadded:: 3.0.0 .. seealso:: `id`, `identity` - + :Parameters: {{generator: `bool`, optional}} @@ -553,7 +544,7 @@ def identities(self, generator=False, ctypes="XTYZ", **kwargs): *Parameter example:* ``ctype=('T', 'X')`` - + :Returns: `list` @@ -584,7 +575,7 @@ def _ctypes_iter(self, ctypes): break identities = super().identities(generator=True) - + g = chain(identities, _ctypes_iter(self, ctypes)) if generator: return g diff --git a/cf/test/test_Field.py b/cf/test/test_Field.py index d7388d8b3d..3089c10158 100644 --- a/cf/test/test_Field.py +++ b/cf/test/test_Field.py @@ -955,9 +955,7 @@ def test_Field__add__(self): b = g + f axis = a.domain_axis("grid_longitude", key=1) - for key in a.field_ancillaries( - filter_by_axis=(axis,), axis_mode="or" - ): + for key in a.field_ancillaries(filter_by_axis=(axis,), axis_mode="or"): a.del_construct(key) for key in a.cell_measures(filter_by_axis=(axis,), axis_mode="or"): @@ -2003,9 +2001,7 @@ def test_Field_moving_window(self): "mean", window_size=3, axis="X", weights=weights ) - self.assertEqual( - len(g.cell_methods()), len(f.cell_methods()) + 1 - ) + self.assertEqual(len(g.cell_methods()), len(f.cell_methods()) + 1) def test_Field_derivative(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: @@ -2154,25 +2150,27 @@ def test_Field_coordinate(self): "dimensioncoordinate1", ): if identity == "domainaxis2": - key, c = f.dimension_coordinates( - filter_by_axis=(identity,), axis_mode="exact", - todict=True + key, c = f.dimension_coordinates( + filter_by_axis=(identity,), axis_mode="exact", todict=True ).popitem() - -# key = ( -# f.dimension_coordinates() -# .filter_by_axis(identity, mode="and") -# .key() -# ) -# c = ( -# f.dimension_coordinates(view=True) -# .filter_by_axis(identity, mode="and") -# .value() -# ) + + # key = ( + # f.dimension_coordinates() + # .filter_by_axis(identity, mode="and") + # .key() + # ) + # c = ( + # f.dimension_coordinates(view=True) + # .filter_by_axis(identity, mode="and") + # .value() + # ) else: key = f.construct_key(identity) c = f.construct(identity) + print(identity, key, repr(c)) + print(f.coordinate(identity)) + self.assertTrue(f.coordinate(identity).equals(c, verbose=2)) self.assertTrue(f.coordinate(identity, key=True) == key) @@ -2287,19 +2285,18 @@ def test_Field_dimension_coordinate(self): ): if identity == "domainaxis2": key, c = f.dimension_coordinates( - filter_by_axis=(identity,), axis_mode="exact", - todict=True + filter_by_axis=(identity,), axis_mode="exact", todict=True ).popitem() -# key = ( -# f.dimension_coordinates(view=True) -# .filter_by_axis(identity, mode="and") -# .key() -# ) -# c = ( -# f.dimension_coordinates(view=True) -# .filter_by_axis(identity, mode="and") -# .value() -# ) + # key = ( + # f.dimension_coordinates(view=True) + # .filter_by_axis(identity, mode="and") + # .key() + # ) + # c = ( + # f.dimension_coordinates(view=True) + # .filter_by_axis(identity, mode="and") + # .value() + # ) elif identity == "X": key = f.construct_key("grid_longitude") c = f.construct("grid_longitude") From 88d95680c22c56d756c63828457a1c8588c4f706 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Thu, 1 Apr 2021 17:49:02 +0100 Subject: [PATCH 14/53] devs --- cf/field.py | 261 +++++++++++++++++++++++++++++++--------------------- 1 file changed, 156 insertions(+), 105 deletions(-) diff --git a/cf/field.py b/cf/field.py index a2d8ed6805..ecb01be0f0 100644 --- a/cf/field.py +++ b/cf/field.py @@ -771,22 +771,20 @@ def analyse_items(self, relaxed_identities=None): continue else: - auxs = self.constructs.chain( - "filter_by_type", - ("auxiliary_coordinate",), - "filter_by_axis", - (axis,), - mode="and", - todict=True, + key, aux = self.auxiliary_coordinate( + filter_by_axis=(axis,), + axis_mode="and", # TODO check this "and" + item=True, + default=(None, None) ) # auxs = self.auxiliary_coordinates.filter_by_axis( # axis, mode="exact", todict=True # ) - if len(auxs) == 1: + if aux is not None: #len(auxs) == 1: # This axis of the domain does not have a # dimension coordinate but it does have exactly # one 1-d auxiliary coordinate, so that will do. - key, aux = auxs.popitem() +# key, aux = auxs.popitem() identity = aux.identity(strict=True, default=None) @@ -2569,10 +2567,11 @@ def _set_construct_parse_axes(self, item, axes=None, allow_scalar=True): ) if axes_sizes.count(n) == 1: - domain_axes = self.domain_axes( - filter_by_size=(n,), todict=True - ) - key, _ = domain_axes.popitem() +# domain_axes = self.domain_axes( +# filter_by_size=(n,), todict=True +# ) +# key, _ = domain_axes.popitem() + key = self.domain_axis(filter_by_size=(n,), key=True) axes.append(key) # domain_axes.filter_by_size(n, view=True).key() # ) @@ -4166,29 +4165,35 @@ def _weights_area_XY( """ # dimension_coordinates = self.dimension_coordinates(view=True) - xdims = self.dimension_coordinates("X", todict=True) - ydims = self.dimension_coordinates("Y", todict=True) +# xdims = self.dimension_coordinates("X", todict=True) +# ydims = self.dimension_coordinates("Y", todict=True) + + xkey, xcoord = self.dimension_coordinate("X", item=True, + default=(None, None)) + ykey, ycoord = self.dimension_coordinate("Y", item=True, + default=(None, None)) - if not (xdims and ydims): +# if not (xdims and ydims): + if xcoord is None and ycoord is None: if auto: return raise ValueError( - "Insufficient coordinate constructs for calculating " + "No unique coordinate constructs for calculating " "area weights" ) - xkey, xcoord = xdims.popitem() - ykey, ycoord = ydims.popitem() - - if xdims or ydims: - if auto: - return - - raise ValueError( - "Ambiguous coordinate constructs for calculating area " - "weights" - ) +# xkey, xcoord = xdims.popitem() +# ykey, ycoord = ydims.popitem() +# +# if xdims or ydims: +# if auto: +# return +# +# raise ValueError( +# "Ambiguous coordinate constructs for calculating area " +# "weights" +# ) if xcoord.Units.equivalent( Units("radians") @@ -10569,10 +10574,8 @@ def collapse( axis_mode="exact", default=None, ) - if dc is not None: - _, dc = dc.popitem() - if not dc.has_bounds(): - dc.set_bounds(dc.create_bounds(cellsize=0)) + if dc is not None and not dc.has_bounds(): + dc.set_bounds(dc.create_bounds(cellsize=0)) continue @@ -11614,11 +11617,9 @@ def _group_weights(weights, iaxis, index): # ---------------------------------------------------- # E.g. group=cf.M() # ---------------------------------------------------- - coord = ( - self.dimension_coordinates(view=True) - .filter_by_axis(axis, mode="exact", view=True) - .value(None) - ) + coord = self.dimension_coordinate( + filter_by_axis=(axis,), axis_mode="exact", + default=None ) if coord is None: raise ValueError("dddddd siduhfsuildfhsuil dhfdui TODO") @@ -11649,13 +11650,11 @@ def _group_weights(weights, iaxis, index): # ---------------------------------------------------- # Chunks of # ---------------------------------------------------- - coord = ( - self.dimension_coordinates(view=True) - .filter_by_axis(axis, mode="exact", view=True) - .value(None) + coord = self.dimension_coordinate( + filter_by_axis=(axis,), axis_mode="exact", default=None) ) if coord is None: - raise ValueError("TODO dddddd siduhfsuildfhsuil dhfdui ") + raise ValueError("TODO asdas 4444444 dhfdui ") if coord.Units.isreftime: raise ValueError( @@ -11704,16 +11703,11 @@ def _group_weights(weights, iaxis, index): # ---------------------------------------------------- # E.g. group=[cf.month(4), cf.month(cf.wi(9, 11))] # ---------------------------------------------------- - coord = ( - self.dimension_coordinates(view=True) - .filter_by_axis(axis, mode="exact", view=True) - .value(None) - ) + coord = self.dimension_coordinate( + filter_by_axis=(axis,), axis_mode="exact", default=None if coord is None: - coord = ( - self.auxiliary_coordinates(view=True) - .filter_by_axis(axis, mode="exact", view=True) - .value(None) + coord = self.auxiliary_coordinate( + filter_by_axis=(axis,), axis_mode="exact", default=None ) if coord is None: raise ValueError("asdad8777787 TODO") @@ -12863,19 +12857,29 @@ def indices(self, *mode, **kwargs): key = None construct = None else: -# c = constructs.filter_by_identity(identity, view=True) - c = self.constructs.filter( +## c = constructs.filter_by_identity(identity, view=True) +# c = self.constructs.filter( +# filter_by_data=True, +# filter_by_identity=(identity,), +# todict=True +# ) +# if len(c) != 1: +# raise ValueError( +# "Can't find indices: Ambiguous axis or axes: " +# f"{identity!r}" + key, construct = self.construct( + identity, filter_by_data=True, - filter_by_identity=(identity,), - todict=True + item=True, + default=(None, None) ) - if len(c) != 1: + if construct is None: raise ValueError( "Can't find indices: Ambiguous axis or axes: " f"{identity!r}" ) - key, construct = c.popitem() +# key, construct = c.popitem() axes = self.get_data_axes(key) @@ -13459,15 +13463,24 @@ def set_data( axes = [] for n in data_shape: # da = domain_axes.filter_by_size(n, todict=True) - da = f.domain_axes(filter_by_size=(n,), todict=True) - if len(da) != 1: +# da = f.domain_axes(filter_by_size=(n,), todict=True) +# if len(da) != 1: +# raise ValueError( +# "Can't insert data: Ambiguous data shape: " +# f"{data_shape}. " +# "Consider setting the axes parameter." +# ) +# +# da_key, _ = da.popitem() + da_key = f.domain_axis(filter_by_size=(n,), key=True, + default=None) + if da_key is None: #len(da) != 1: raise ValueError( "Can't insert data: Ambiguous data shape: " - f"{data_shape}. " - "Consider setting the axes parameter." + f"{data_shape}. Consider setting the axes parameter." ) - da_key, _ = da.popitem() +# da_key, _ = da.popitem() axes.append(da_key) else: @@ -15427,12 +15440,13 @@ def autocyclic(self, verbose=None): """ # dims = self.dimension_coordinates(view=True).filter_by_identity("X", view=True)._dictionary() - dims = self.dimension_coordinates("X", todict=True) + key, dim = self.dimension_coordinate("X", item=True, + default=(None, None)) - if len(dims) != 1: + if dim is None: return False - key, dim = dims.popitem() +# key, dim = dims.popitem() if not dim.Units.islongitude: if dim.get_property("standard_name", None) not in ( @@ -15986,9 +16000,8 @@ def auxiliary_coordinate( **filter_kwargs, ) - def construct( - self, identity=None, default=ValueError(), key=False, **filter_kwargs - ): + def construct( self, identity=None, default=ValueError(), + key=False, item=False, **filter_kwargs ): """Select a metadata construct by its identity. .. seealso:: `del_construct`, `get_construct`, `has_construct`, @@ -16055,6 +16068,8 @@ def construct( key: `bool`, optional If True then return the selected construct key. By default the construct itself is returned. + + item: TODO :Returns: @@ -16108,6 +16123,7 @@ def construct( "construct", identity=identity, key=key, + item=item, default=default, **filter_kwargs, ) @@ -16441,7 +16457,7 @@ def cell_method(self, identity=None, default=ValueError(), key=False): return c.value(default=default) def coordinate( - self, identity=None, default=ValueError(), key=False, **filter_kwargs + self, identity=None, default=ValueError(), key=False, item=False, **filter_kwargs ): """Return a dimension coordinate construct, or its key. @@ -16545,6 +16561,7 @@ def coordinate( "coordinate", identity=identity, key=key, + item=item, default=default, **filter_kwargs, ) @@ -16555,9 +16572,18 @@ def coordinate( if not c: da_key = self.domain_axis(identity, key=True, default=None) if da_key is not None: - c = self.coordinates.filter_by_axis("exact", da_key) + c = self.coordinate( + filter_by_axis=(da_key,), axis_mode="exact", + key=key, item=item, + default=None + ) + + if c is not None: + return c - if len TODO + return self._default( + default, f"Can't find exactly one item to return" + ) def coordinate_reference( self, identity=None, default=ValueError(), key=False, **filter_kwargs @@ -16776,9 +16802,8 @@ def field_ancillary( **filter_kwargs, ) - def dimension_coordinate( - self, identity=None, key=False, default=ValueError(), **filter_kwargs - ): + def dimension_coordinate( self, identity=None, key=False, + default=ValueError(), item=False, **filter_kwargs ): """Return a dimension coordinate construct, or its key. .. versionadded:: 3.0.0 @@ -16884,6 +16909,7 @@ def dimension_coordinate( identity=identity, key=key, default=default, + item=item, **filter_kwargs, ) @@ -16891,36 +16917,47 @@ def _construct( self, _ctypes, _method, - identity=None, - key=None, - default=None, + identity=None, + key=False, + default=None, + item=False, **filter_kwargs, ): """TODO.""" - if not _ctypes: - kwargs = filter_kwargs + if identity is None: + identities = () else: - # Ensure that filter_by_types is the first filter - # applied, as it's the cheapest - kwargs = {"filter_by_type": _ctypes} + identities = (identity,) - if filter_kwargs: - if "filter_by_type" in filter_kwargs: - raise TypeError( - f"{_method}() got an unexpected keyword argument " - "'filter_by_type'" - ) + filter_kwargs["todict"] = True - kwargs.update(filter_kwargs) + c = self._filter_interface( + _ctypes, + _method, + identities, + **filter_kwargs, + ) - if key: - return super().construct_key(identity, default=default, **kwargs) + n = len(c) + if n == 1: + key, construct = c.popitem() + if key: + return key - return super().construct(identity, default=default, **kwargs) + if item: + return key, construct - def domain_axis( - self, identity=None, key=False, default=ValueError(), **filter_kwargs - ): + return construct + + if not n: + return self._default(default, "Can't return zero items") + + return self._default( + default, f"Can't return more than one ({n}) item" + ) + + def domain_axis( self, identity=None, key=False, + default=ValueError(), item=False, **filter_kwargs ): """Return a domain axis construct, or its key. .. versionadded:: 3.0.0 @@ -17019,7 +17056,8 @@ def domain_axis( "Index does not exist for field construct data dimenions", ) else: - # TODO consider using filter_by_key + # TODO consider using filter_by_key ... but I think + # filter_by_identity is faster, though identity = da_key c = self._construct( @@ -17028,6 +17066,7 @@ def domain_axis( identity=identity, key=key, default=None, + item=item **filter_kwargs, ) if c is not None: @@ -17038,7 +17077,12 @@ def domain_axis( if key: return da_key - return self.constructs[da_key] + construct = self.constructs[da_key] + + if item: + return da_key, construct + + return construct return self._default( default, "No unique domain axis construct is identifable" @@ -20815,16 +20859,23 @@ def derivative( if axis is None: raise ValueError("Invalid axis specifier") - dims = self.dimension_coordinates(view=True).filter_by_axis( - axis, mode="exact", view=True + coord = self.dimension_coordinate( + filter_by_axis=(axis,), axis_mode="exact", + default=None ) - len_dims = len(dims) - if not len_dims: - raise ValueError("Invalid axis specifier") - elif len_dims != 1: + if coord is None: raise ValueError("Axis specified is not unique.") - - dckey, coord = dims._dictionary().popitem() + +# dims = self.dimension_coordinates(view=True).filter_by_axis( +# axis, mode="exact", view=True +# ) +# len_dims = len(dims) +# if not len_dims: +# raise ValueError("Invalid axis specifier") +# elif len_dims != 1: +# raise ValueError("Axis specified is not unique.") +# +# dckey, coord = dims._dictionary().popitem() # Get the axis index axis_index = self.get_data_axes().index(axis) From 2101b8d808f7a964d618bc12d68fcf47a78f4e27 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Fri, 2 Apr 2021 11:04:37 +0100 Subject: [PATCH 15/53] devs --- cf/field.py | 272 +++++++++++++++++++++++++--------------------------- 1 file changed, 133 insertions(+), 139 deletions(-) diff --git a/cf/field.py b/cf/field.py index ecb01be0f0..7778fc3150 100644 --- a/cf/field.py +++ b/cf/field.py @@ -773,18 +773,18 @@ def analyse_items(self, relaxed_identities=None): else: key, aux = self.auxiliary_coordinate( filter_by_axis=(axis,), - axis_mode="and", # TODO check this "and" + axis_mode="and", # TODO check this "and" item=True, - default=(None, None) + default=(None, None), ) # auxs = self.auxiliary_coordinates.filter_by_axis( # axis, mode="exact", todict=True # ) - if aux is not None: #len(auxs) == 1: + if aux is not None: # len(auxs) == 1: # This axis of the domain does not have a # dimension coordinate but it does have exactly # one 1-d auxiliary coordinate, so that will do. -# key, aux = auxs.popitem() + # key, aux = auxs.popitem() identity = aux.identity(strict=True, default=None) @@ -2567,11 +2567,13 @@ def _set_construct_parse_axes(self, item, axes=None, allow_scalar=True): ) if axes_sizes.count(n) == 1: -# domain_axes = self.domain_axes( -# filter_by_size=(n,), todict=True -# ) -# key, _ = domain_axes.popitem() - key = self.domain_axis(filter_by_size=(n,), key=True) + # domain_axes = self.domain_axes( + # filter_by_size=(n,), todict=True + # ) + # key, _ = domain_axes.popitem() + key = self.domain_axis( + filter_by_size=(n,), key=True + ) axes.append(key) # domain_axes.filter_by_size(n, view=True).key() # ) @@ -4165,15 +4167,17 @@ def _weights_area_XY( """ # dimension_coordinates = self.dimension_coordinates(view=True) -# xdims = self.dimension_coordinates("X", todict=True) -# ydims = self.dimension_coordinates("Y", todict=True) + # xdims = self.dimension_coordinates("X", todict=True) + # ydims = self.dimension_coordinates("Y", todict=True) - xkey, xcoord = self.dimension_coordinate("X", item=True, - default=(None, None)) - ykey, ycoord = self.dimension_coordinate("Y", item=True, - default=(None, None)) + xkey, xcoord = self.dimension_coordinate( + "X", item=True, default=(None, None) + ) + ykey, ycoord = self.dimension_coordinate( + "Y", item=True, default=(None, None) + ) -# if not (xdims and ydims): + # if not (xdims and ydims): if xcoord is None and ycoord is None: if auto: return @@ -4183,17 +4187,17 @@ def _weights_area_XY( "area weights" ) -# xkey, xcoord = xdims.popitem() -# ykey, ycoord = ydims.popitem() -# -# if xdims or ydims: -# if auto: -# return -# -# raise ValueError( -# "Ambiguous coordinate constructs for calculating area " -# "weights" -# ) + # xkey, xcoord = xdims.popitem() + # ykey, ycoord = ydims.popitem() + # + # if xdims or ydims: + # if auto: + # return + # + # raise ValueError( + # "Ambiguous coordinate constructs for calculating area " + # "weights" + # ) if xcoord.Units.equivalent( Units("radians") @@ -11618,8 +11622,8 @@ def _group_weights(weights, iaxis, index): # E.g. group=cf.M() # ---------------------------------------------------- coord = self.dimension_coordinate( - filter_by_axis=(axis,), axis_mode="exact", - default=None ) + filter_by_axis=(axis,), axis_mode="exact", default=None + ) if coord is None: raise ValueError("dddddd siduhfsuildfhsuil dhfdui TODO") @@ -11651,7 +11655,7 @@ def _group_weights(weights, iaxis, index): # Chunks of # ---------------------------------------------------- coord = self.dimension_coordinate( - filter_by_axis=(axis,), axis_mode="exact", default=None) + filter_by_axis=(axis,), axis_mode="exact", default=None ) if coord is None: raise ValueError("TODO asdas 4444444 dhfdui ") @@ -11705,6 +11709,7 @@ def _group_weights(weights, iaxis, index): # ---------------------------------------------------- coord = self.dimension_coordinate( filter_by_axis=(axis,), axis_mode="exact", default=None + ) if coord is None: coord = self.auxiliary_coordinate( filter_by_axis=(axis,), axis_mode="exact", default=None @@ -12846,7 +12851,7 @@ def indices(self, *mode, **kwargs): indices = [slice(None)] * self.ndim domain_axes = self.domain_axes(todict=True) -# constructs = self.constructs.filter_by_data(view=True) + # constructs = self.constructs.filter_by_data(view=True) parsed = {} unique_axes = set() @@ -12857,21 +12862,21 @@ def indices(self, *mode, **kwargs): key = None construct = None else: -## c = constructs.filter_by_identity(identity, view=True) -# c = self.constructs.filter( -# filter_by_data=True, -# filter_by_identity=(identity,), -# todict=True -# ) -# if len(c) != 1: -# raise ValueError( -# "Can't find indices: Ambiguous axis or axes: " -# f"{identity!r}" + ## c = constructs.filter_by_identity(identity, view=True) + # c = self.constructs.filter( + # filter_by_data=True, + # filter_by_identity=(identity,), + # todict=True + # ) + # if len(c) != 1: + # raise ValueError( + # "Can't find indices: Ambiguous axis or axes: " + # f"{identity!r}" key, construct = self.construct( identity, filter_by_data=True, item=True, - default=(None, None) + default=(None, None), ) if construct is None: raise ValueError( @@ -12879,7 +12884,7 @@ def indices(self, *mode, **kwargs): f"{identity!r}" ) -# key, construct = c.popitem() + # key, construct = c.popitem() axes = self.get_data_axes(key) @@ -13463,24 +13468,25 @@ def set_data( axes = [] for n in data_shape: # da = domain_axes.filter_by_size(n, todict=True) -# da = f.domain_axes(filter_by_size=(n,), todict=True) -# if len(da) != 1: -# raise ValueError( -# "Can't insert data: Ambiguous data shape: " -# f"{data_shape}. " -# "Consider setting the axes parameter." -# ) -# -# da_key, _ = da.popitem() - da_key = f.domain_axis(filter_by_size=(n,), key=True, - default=None) - if da_key is None: #len(da) != 1: + # da = f.domain_axes(filter_by_size=(n,), todict=True) + # if len(da) != 1: + # raise ValueError( + # "Can't insert data: Ambiguous data shape: " + # f"{data_shape}. " + # "Consider setting the axes parameter." + # ) + # + # da_key, _ = da.popitem() + da_key = f.domain_axis( + filter_by_size=(n,), key=True, default=None + ) + if da_key is None: # len(da) != 1: raise ValueError( "Can't insert data: Ambiguous data shape: " f"{data_shape}. Consider setting the axes parameter." ) -# da_key, _ = da.popitem() + # da_key, _ = da.popitem() axes.append(da_key) else: @@ -13878,7 +13884,7 @@ def match_by_construct(self, *identities, OR=False, **conditions): n = 0 - self_cell_methods = self.cell_methods() # TODO + self_cell_methods = self.cell_methods() # TODO for identity in identities: cms = False @@ -13905,7 +13911,7 @@ def match_by_construct(self, *identities, OR=False, **conditions): "cell_method" }: key = tuple(self_cell_methods.ordered())[-1] - filtered = self_cell_methods(key)(identity) # TODO + filtered = self_cell_methods(key)(identity) # TODO if not filtered: if not OR: return False @@ -15241,8 +15247,9 @@ def anchor( else: f = _inplace_enabled_define_and_cleanup(self) - dim = f.dimension_coordinate(filter_by_axis=(axis,), - axis_mode="exact", default=None) + dim = f.dimension_coordinate( + filter_by_axis=(axis,), axis_mode="exact", default=None + ) if dim is None: raise ValueError( "Can't shift non-cyclic " @@ -15283,8 +15290,9 @@ def anchor( if not dry_run: f.roll(axis, shift, inplace=True) - dim = f.dimension_coordinate(filter_by_axis=(axis,), - axis_mode="exact") + dim = f.dimension_coordinate( + filter_by_axis=(axis,), axis_mode="exact" + ) n = ((value - dim.data[0]) / period).ceil() else: @@ -15440,13 +15448,14 @@ def autocyclic(self, verbose=None): """ # dims = self.dimension_coordinates(view=True).filter_by_identity("X", view=True)._dictionary() - key, dim = self.dimension_coordinate("X", item=True, - default=(None, None)) + key, dim = self.dimension_coordinate( + "X", item=True, default=(None, None) + ) if dim is None: return False -# key, dim = dims.popitem() + # key, dim = dims.popitem() if not dim.Units.islongitude: if dim.get_property("standard_name", None) not in ( @@ -16000,8 +16009,14 @@ def auxiliary_coordinate( **filter_kwargs, ) - def construct( self, identity=None, default=ValueError(), - key=False, item=False, **filter_kwargs ): + def construct( + self, + identity=None, + default=ValueError(), + key=False, + item=False, + **filter_kwargs, + ): """Select a metadata construct by its identity. .. seealso:: `del_construct`, `get_construct`, `has_construct`, @@ -16062,13 +16077,17 @@ def construct( self, identity=None, default=ValueError(), default: optional Return the value of the *default* parameter if a construct - can not be found. If set to an `Exception` instance then - it will be raised instead. + can not be found. + + {{default Exception}} + + If the *default* is `None`, or if *item* is True and + *default* is a 2-tuple of `Ǹone`s, then TODO key: `bool`, optional If True then return the selected construct key. By default the construct itself is returned. - + item: TODO :Returns: @@ -16457,7 +16476,12 @@ def cell_method(self, identity=None, default=ValueError(), key=False): return c.value(default=default) def coordinate( - self, identity=None, default=ValueError(), key=False, item=False, **filter_kwargs + self, + identity=None, + default=ValueError(), + key=False, + item=False, + **filter_kwargs, ): """Return a dimension coordinate construct, or its key. @@ -16573,9 +16597,11 @@ def coordinate( da_key = self.domain_axis(identity, key=True, default=None) if da_key is not None: c = self.coordinate( - filter_by_axis=(da_key,), axis_mode="exact", - key=key, item=item, - default=None + filter_by_axis=(da_key,), + axis_mode="exact", + key=key, + item=item, + default=None, ) if c is not None: @@ -16802,8 +16828,14 @@ def field_ancillary( **filter_kwargs, ) - def dimension_coordinate( self, identity=None, key=False, - default=ValueError(), item=False, **filter_kwargs ): + def dimension_coordinate( + self, + identity=None, + key=False, + default=ValueError(), + item=False, + **filter_kwargs, + ): """Return a dimension coordinate construct, or its key. .. versionadded:: 3.0.0 @@ -16913,51 +16945,14 @@ def dimension_coordinate( self, identity=None, key=False, **filter_kwargs, ) - def _construct( + def domain_axis( self, - _ctypes, - _method, - identity=None, - key=False, - default=None, - item=False, + identity=None, + key=False, + default=ValueError(), + item=False, **filter_kwargs, ): - """TODO.""" - if identity is None: - identities = () - else: - identities = (identity,) - - filter_kwargs["todict"] = True - - c = self._filter_interface( - _ctypes, - _method, - identities, - **filter_kwargs, - ) - - n = len(c) - if n == 1: - key, construct = c.popitem() - if key: - return key - - if item: - return key, construct - - return construct - - if not n: - return self._default(default, "Can't return zero items") - - return self._default( - default, f"Can't return more than one ({n}) item" - ) - - def domain_axis( self, identity=None, key=False, - default=ValueError(), item=False, **filter_kwargs ): """Return a domain axis construct, or its key. .. versionadded:: 3.0.0 @@ -17066,7 +17061,7 @@ def domain_axis( self, identity=None, key=False, identity=identity, key=key, default=None, - item=item + item=item, **filter_kwargs, ) if c is not None: @@ -17078,11 +17073,11 @@ def domain_axis( self, identity=None, key=False, return da_key construct = self.constructs[da_key] - + if item: - return da_key, construct + return da_key, construct - return construct + return construct return self._default( default, "No unique domain axis construct is identifable" @@ -18134,9 +18129,9 @@ def percentile( # axes # ------------------------------------------------------------ if axes: - for key, c in ( - self.dimension_coordinates(filter_by_axis=axes, axis_mode="subset", todict=True).items() - ): + for key, c in self.dimension_coordinates( + filter_by_axis=axes, axis_mode="subset", todict=True + ).items(): c_axes = self.get_data_axes(key) c = c.copy() @@ -18172,7 +18167,7 @@ def percentile( # ------------------------------------------------------------ if other_axes: for key, c in self.constructs.filter_by_axis( - "subset", *other_axes, todict=True + "subset", *other_axes, todict=True ).items(): c_axes = self.get_data_axes(key) out.set_construct(c, axes=c_axes, key=key) @@ -20860,22 +20855,21 @@ def derivative( raise ValueError("Invalid axis specifier") coord = self.dimension_coordinate( - filter_by_axis=(axis,), axis_mode="exact", - default=None + filter_by_axis=(axis,), axis_mode="exact", default=None ) if coord is None: raise ValueError("Axis specified is not unique.") - -# dims = self.dimension_coordinates(view=True).filter_by_axis( -# axis, mode="exact", view=True -# ) -# len_dims = len(dims) -# if not len_dims: -# raise ValueError("Invalid axis specifier") -# elif len_dims != 1: -# raise ValueError("Axis specified is not unique.") -# -# dckey, coord = dims._dictionary().popitem() + + # dims = self.dimension_coordinates(view=True).filter_by_axis( + # axis, mode="exact", view=True + # ) + # len_dims = len(dims) + # if not len_dims: + # raise ValueError("Invalid axis specifier") + # elif len_dims != 1: + # raise ValueError("Axis specified is not unique.") + # + # dckey, coord = dims._dictionary().popitem() # Get the axis index axis_index = self.get_data_axes().index(axis) From 36d88a13cd81bbd8fd823848c05cd03507c740b9 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Sat, 3 Apr 2021 20:33:24 +0100 Subject: [PATCH 16/53] devs --- cf/field.py | 61 +++++++++++++++++++++++++++++++---------------------- 1 file changed, 36 insertions(+), 25 deletions(-) diff --git a/cf/field.py b/cf/field.py index 7778fc3150..9fd6088fa2 100644 --- a/cf/field.py +++ b/cf/field.py @@ -16477,13 +16477,13 @@ def cell_method(self, identity=None, default=ValueError(), key=False): def coordinate( self, - identity=None, + *identity, default=ValueError(), key=False, item=False, **filter_kwargs, ): - """Return a dimension coordinate construct, or its key. + """Return a dimension or auxiliary coordinate construct, or its key. .. versionadded:: 3.0.0 @@ -16583,7 +16583,7 @@ def coordinate( c = self._construct( ("dimension_coordinate", "auxiliary_coordinate"), "coordinate", - identity=identity, + identity, key=key, item=item, default=default, @@ -16596,24 +16596,34 @@ def coordinate( if not c: da_key = self.domain_axis(identity, key=True, default=None) if da_key is not None: - c = self.coordinate( - filter_by_axis=(da_key,), - axis_mode="exact", - key=key, - item=item, - default=None, + return self._default( + default, f"Can't find exactly one item to return" ) - if c is not None: - return c + c = self._construct( + ("dimension_coordinate", "auxiliary_coordinate"), + "coordinate", + identity, + key=key, + item=item, + default=default, + filter_by_axis=(da_key,), + axis_mode="exact" + ) - return self._default( - default, f"Can't find exactly one item to return" + c = self.coordinate( + filter_by_axis=(da_key,), + axis_mode="exact", + key=key, + item=item, + default=None, ) + + if c is not None: + return c - def coordinate_reference( - self, identity=None, default=ValueError(), key=False, **filter_kwargs - ): + def coordinate_reference( self, *identity, default=ValueError(), + key=False, item=False, **filter_kwargs ): """Return a coordinate reference construct, or its key. .. versionadded:: 3.0.0 @@ -16708,15 +16718,15 @@ def coordinate_reference( return self._construct( ("coordinate_reference",), "coordinate_reference", - identity=identity, + identity, key=key, default=default, + item=item, **filter_kwargs, ) - def field_ancillary( - self, identity=None, default=ValueError(), key=False, **filter_kwargs - ): + def field_ancillary( self, *identity, default=ValueError(), + key=False, item=False, **filter_kwargs ): """Return a field ancillary construct, or its key. .. versionadded:: 3.0.0 @@ -16822,15 +16832,16 @@ def field_ancillary( return self._construct( ("field_ancillary",), "field_ancillary", - identity=identity, + identity, key=key, default=default, + item=item, **filter_kwargs, ) def dimension_coordinate( self, - identity=None, + *identity, key=False, default=ValueError(), item=False, @@ -16938,7 +16949,7 @@ def dimension_coordinate( return self._construct( ("dimension_coordinate",), "dimension_coordinate", - identity=identity, + identity, key=key, default=default, item=item, @@ -16947,7 +16958,7 @@ def dimension_coordinate( def domain_axis( self, - identity=None, + *identity, key=False, default=ValueError(), item=False, @@ -17058,7 +17069,7 @@ def domain_axis( c = self._construct( ("domain_axis",), "domain_axis", - identity=identity, + identity, key=key, default=None, item=item, From fa54b01814ef7ce70b9c98d27ee6a4ec4a4bde26 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Mon, 5 Apr 2021 11:15:45 +0100 Subject: [PATCH 17/53] devs --- cf/field.py | 148 ++++++++++++++++++++++++++-------------------------- 1 file changed, 74 insertions(+), 74 deletions(-) diff --git a/cf/field.py b/cf/field.py index 9fd6088fa2..8f4da9fbc6 100644 --- a/cf/field.py +++ b/cf/field.py @@ -16477,7 +16477,7 @@ def cell_method(self, identity=None, default=ValueError(), key=False): def coordinate( self, - *identity, + *identity, default=ValueError(), key=False, item=False, @@ -16586,18 +16586,18 @@ def coordinate( identity, key=key, item=item, - default=default, + default=None, **filter_kwargs, ) - if identity is None: + if c is not None: return c if not c: da_key = self.domain_axis(identity, key=True, default=None) if da_key is not None: return self._default( - default, f"Can't find exactly one item to return" + default, f"Can't find exactly one construct" ) c = self._construct( @@ -16607,23 +16607,21 @@ def coordinate( key=key, item=item, default=default, - filter_by_axis=(da_key,), - axis_mode="exact" - ) - - c = self.coordinate( filter_by_axis=(da_key,), axis_mode="exact", - key=key, - item=item, - default=None, ) - + if c is not None: return c - def coordinate_reference( self, *identity, default=ValueError(), - key=False, item=False, **filter_kwargs ): + def coordinate_reference( + self, + *identity, + default=ValueError(), + key=False, + item=False, + **filter_kwargs, + ): """Return a coordinate reference construct, or its key. .. versionadded:: 3.0.0 @@ -16725,8 +16723,14 @@ def coordinate_reference( self, *identity, default=ValueError(), **filter_kwargs, ) - def field_ancillary( self, *identity, default=ValueError(), - key=False, item=False, **filter_kwargs ): + def field_ancillary( + self, + *identity, + default=ValueError(), + key=False, + item=False, + **filter_kwargs, + ): """Return a field ancillary construct, or its key. .. versionadded:: 3.0.0 @@ -16841,105 +16845,101 @@ def field_ancillary( self, *identity, default=ValueError(), def dimension_coordinate( self, - *identity, + *identity, key=False, default=ValueError(), item=False, **filter_kwargs, ): - """Return a dimension coordinate construct, or its key. + """Select a dimension coordinate construct. .. versionadded:: 3.0.0 - .. seealso:: `construct`, `auxiliary_coordinate`, `cell_measure`, - `cell_method`, `coordinate_reference`, - `dimension_coordinates`, `domain_ancillary`, - `domain_axis`, `field_ancillary` + .. seealso:: `construct`, `dimension_coordinates` :Parameters: identity: optional - Select the dimension coordinate construct by one of: + Select dimension coordinate constructs that have an + identity, defined by their `!identities` methods, that + matches any of the given values. In addition to a + construct identities, the values are matched against: - * `None`. This is the default, which selects the - dimension coordinate construct when there is only one - of them. + * The construct identifier, with or without the + ``'key%'`` prefix, of a dimension coordinate + construct. - * The identity or key of a dimension coordinate - construct. + *Parameter example:* + ``'dimensioncoordinate1'`` - * The identity or key of a domain axis construct that is - spanned by a dimension coordinate construct's data. + *Parameter example:* + ``'key%dimensioncoordinate0'`` - * The position, in the field construct's data, of a domain - axis construct that is spanned by a dimension coordinate - construct's data. + * The identity or construct identifier, with or + without the ``'key%'`` prefix, of a domain axis + construct that is spanned by a dimension coordinate + construct's data. - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); a `Query` object - (e.g. ``cf.eq('longitude')``); or a compiled regular - expression (e.g. ``re.compile('^atmosphere')``) that - selects the relevant constructs whose identities match via - `re.search`. + *Parameter example:* + ``'domainaxis2'`` - A construct has a number of identities, and is selected if - any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: + *Parameter example:* + ``'ncdim%latitude'`` - >>> x.identities() - ['time', - 'long_name=Time', - 'foo=bar', - 'standard_name=time', - 'ncvar%t', - 'T'] + * The integer position, in the field construct's data, + of the domain axis construct that is spanned by a + dimension coordinate construct's data. - A construct key may optionally have the ``'key%'`` - prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. + *Parameter example:* + ``0'`` - A position of a domain axis construct in the field - construct's data is specified by an integer index. + *Parameter example:* + ``cf.gt(2)`` - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. + If no values are provided then all constructs are + selected. - *Parameter example:* - ``identity='Y'`` + {{value match}} + + {{displayed identity}} *Parameter example:* - ``identity='latitude'`` + ``'Y'`` *Parameter example:* - ``identity='long_name=Latitude'`` + ``latitude'`` *Parameter example:* - ``identity='dimensioncoordinate1'`` + ``re.compile('^lat')`` *Parameter example:* - ``identity='domainaxis2'`` + ``'long_name=Latitude'`` *Parameter example:* - ``identity='ncdim%y'`` + ``'Z', 'altutude'`` key: `bool`, optional - If True then return the selected construct key. By default - the construct itself is returned. + If True then return the selected construct + identifier. By default the construct itself is + returned. default: optional Return the value of the *default* parameter if a construct can not be found. If set to an `Exception` instance then it will be raised instead. + item: `bool`, optional + If True then return the selected construct and its + construct identifier in a 2-tuple. By default the only + construct is returned. + + .. versionadded:: 3.9.0 + :Returns: - `DimensionCoordinate` or `str` - The selected dimension coordinate construct, or its key. + `DimensionCoordinate` or `str` or `tuple` + The selected dimension coordinate construct, or its + construct identifier, or both. **Examples:** @@ -16958,7 +16958,7 @@ def dimension_coordinate( def domain_axis( self, - *identity, + *identity, key=False, default=ValueError(), item=False, From f6d4c28f2a34ea08b99fc955ea730af12bbaf056 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Tue, 6 Apr 2021 13:33:05 +0100 Subject: [PATCH 18/53] devs --- cf/constructs.py | 110 ------------ cf/field.py | 439 ++++++++++++++++++++++------------------------- cf/functions.py | 37 ++-- 3 files changed, 229 insertions(+), 357 deletions(-) diff --git a/cf/constructs.py b/cf/constructs.py index 4cea7471fd..0d8264a5bf 100644 --- a/cf/constructs.py +++ b/cf/constructs.py @@ -157,116 +157,6 @@ def _matching_values(cls, value0, construct, value1, basic=False): # # return self.constructs[da_key] - # def filter_by_identity(self, *identities, view=False, - # todict=False, cache=None, **identities_kwargs): - # """Select metadata constructs by identity. - # - # .. versionadded:: 3.0.0 - # - # .. seealso:: `filter_by_axis`, `filter_by_data`, `filter_by_key`, - # `filter_by_measure`, `filter_by_method`, - # `filter_by_naxes`, `filter_by_ncdim`, - # `filter_by_ncvar`, `filter_by_property`, - # `filter_by_size`, `filter_by_type`, - # `filters_applied`, `inverse_filter`, `unfilter` - # - # :Parameters: - # - # identities: optional - # Select constructs that have any of the given identities or - # construct keys. - # - # An identity is specified by a string (e.g. ``'latitude'``, - # ``'long_name=time'``, etc.); or a compiled regular - # expression (e.g. ``re.compile('^atmosphere')``), for which - # all constructs whose identities match (via `re.search`) - # are selected. - # - # If no identities are provided then all constructs are selected. - # - # Each construct has a number of identities, and is selected - # if any of them match any of those provided. A construct's - # identities are those returned by its `!identities` - # method. In the following example, the construct ``x`` has - # five identities: - # - # >>> x.identities() - # ['time', 'long_name=Time', 'foo=bar', 'T', 'ncvar%t'] - # - # A construct key may optionally have the ``'key%'`` - # prefix. For example ``'dimensioncoordinate2'`` and - # ``'key%dimensioncoordinate2'`` are both acceptable keys. - # - # Note that the identifiers of a metadata construct in the - # output of a `print` or `!dump` call are always one of its - # identities, and so may always be used as an *identities* - # argument. - # - # Domain axis constructs may also be identified by their - # position in the field construct's data array. Positions - # are specified by either integers. - # - # .. note:: This is an extension to the functionality of - # `cfdm.Constucts.filter_by_identity`. - # - # {{view: `bool`, optional}} - # - # {{todict: `bool`, optional}} - # - # {{cache: optional}} - # - # identities_kwargs: optional - # Additional parameters for configuring each construct's - # `identities` method. By default ``generator=True`` is - # passed by default, and ``ctype`` is inferred from the - # *identities* parameter. - # - # .. versionadded:: 3.9.0 - # - # :Returns: - # - # `Constructs` - # The selected constructs and their construct keys. - # - # **Examples:** - # - # Select constructs that have a "standard_name" property of - # 'latitude': - # - # >>> d = c.filter_by_identity('latitude') - # - # Select constructs that have a "long_name" property of 'Height': - # - # >>> d = c.filter_by_identity('long_name=Height') - # - # Select constructs that have a "standard_name" property of - # 'latitude' or a "foo" property of 'bar': - # - # >>> d = c.filter_by_identity('latitude', 'foo=bar') - # - # Select constructs that have a netCDF variable name of 'time': - # - # >>> d = c.filter_by_identity('ncvar%time') - # - # """ - # if cache is not None: - # return cache - # - # # Allow keys without the 'key%' prefix - # for n, identity in enumerate(identities): - # if identity in self: - # identities = list(identities) - # identities[n] = "key%" + identity - # break - # - # ctype = [i for i in "XTYZ" if i in identities] - # - # return super().filter_by_identity( - # identities, todict=todict, - # _config={"identities_kwargs": {"ctype": ctype}, - # "bypass": lambda x: x in ctype} - # ) - def _filter_by_identity(self, arg, todict, _config, identities): """Worker function for `filter_by_identity` and `filter`. diff --git a/cf/field.py b/cf/field.py index 8f4da9fbc6..0901ea84e2 100644 --- a/cf/field.py +++ b/cf/field.py @@ -84,6 +84,7 @@ _DEPRECATION_ERROR_DICT, _DEPRECATION_ERROR_SEQUENCE, _DEPRECATION_ERROR_KWARG_VALUE, + DeprecationError, ) from .formula_terms import FormulaTerms @@ -736,16 +737,12 @@ def analyse_items(self, relaxed_identities=None): # ("dimension_coordinate",), "filter_by_axis", (axis,) # mode="and", todict=True # ) - dims = self.dimension_coordinates( - filter_by_axis=(axis,), axis_mode="and", todict=True + key, dim = self.dimension_coordinate( + item=True, default=(None, None), filter_by_axis=(axis,) ) - if len(dims) == 1: + if dim is not None: # This axis of the domain has a dimension coordinate - key, dim = dims.popitem() - # key = dims.key() - # dim = dims.value() - identity = dim.identity(strict=True, default=None) if identity is None: # Dimension coordinate has no identity, but it may @@ -777,15 +774,10 @@ def analyse_items(self, relaxed_identities=None): item=True, default=(None, None), ) - # auxs = self.auxiliary_coordinates.filter_by_axis( - # axis, mode="exact", todict=True - # ) - if aux is not None: # len(auxs) == 1: + if aux is not None: # This axis of the domain does not have a # dimension coordinate but it does have exactly # one 1-d auxiliary coordinate, so that will do. - # key, aux = auxs.popitem() - identity = aux.identity(strict=True, default=None) if identity is None and relaxed_identities: @@ -1563,7 +1555,9 @@ def _binary_operation_old(self, other, method): "auxiliary_coordinate", "domain_ancillary", ), - filter_by_axis=(axis0,), + filter_by_axis=( + axis0, + ), # TODO check if we need an axis_mode="or" or "subset" here todict=True, ) ) @@ -1926,28 +1920,24 @@ def _binary_operation(self, other, method): out1 = {} for i, (f, out) in enumerate(zip((field0, field1), (out0, out1))): data_axes = f.get_data_axes() - - # f_dimension_coordinates = f.dimension_coordinates(todict=True) - # f_auxiliary_coordinates = f.auxiliary_coordinates(todict=True) for axis in f.domain_axes(todict=True): identity = None key = None coord = None coord_type = None - coords = f.dimension_coordinates( - filter_by_axis=(axis,), axis_mode="exact", todict=True + key, coord = f.dimension_coordinate( + item=True, + default=(None, None), + filter_by_axis=(axis,), ) - if len(coords) == 1: - # This axis of the domain has a dimension coordinate - key, coord = coords.popitem() - # key = coords.key() - # coord = coords.value() - + if coord is not None: + # This axis of the domain has a dimension + # coordinate identity = coord.identity(strict=True, default=None) if identity is None: - # Dimension coordinate has no identity, but it may - # have a recognised axis. + # Dimension coordinate has no identity, but it + # may have a recognised axis. for ctype in ("T", "X", "Y", "Z"): if getattr(coord, ctype, False): identity = ctype @@ -1956,17 +1946,17 @@ def _binary_operation(self, other, method): if identity is None and relaxed_identities: identity = coord.identity(relaxed=True, default=None) else: - coords = f.auxiliary_coordinates( - filter_by_axis=(axis,), axis_mode="exact", todict=True + key, coord = f.auxiliary_coordinate( + item=True, + default=(None, None), + filter_by_axis=(axis,), + axis_mode="exact", ) - if len(coords) == 1: + if coord is not None: # This axis of the domain does not have a - # dimension coordinate but it does have exactly - # one 1-d auxiliary coordinate, so that will do. - key, coord = coords.popitem() - # key = coords.key() - # coord = coords.value() - + # dimension coordinate but it does have + # exactly one 1-d auxiliary coordinate, so + # that will do. identity = coord.identity(strict=True, default=None) if identity is None and relaxed_identities: @@ -2032,7 +2022,7 @@ def _binary_operation(self, other, method): # List of field1 coordinate reference constucts which will # be added to field0. E.g. - # [] + # [] refs_to_add_from_field1 = [] # Check that the two fields are combinable @@ -2484,11 +2474,7 @@ def _equivalent_coordinate_references( return False # Compare the domain ancillaries - # domain_ancillaries = self.domain_ancillaries(todict=True) - # field1_domain_ancillaries = field1.domain_ancillaries(todict=True) - # TODO consider case of None key ? - for ( term, identifier0, @@ -2567,16 +2553,10 @@ def _set_construct_parse_axes(self, item, axes=None, allow_scalar=True): ) if axes_sizes.count(n) == 1: - # domain_axes = self.domain_axes( - # filter_by_size=(n,), todict=True - # ) - # key, _ = domain_axes.popitem() - key = self.domain_axis( + da_key = self.domain_axis( filter_by_size=(n,), key=True ) - axes.append(key) - # domain_axes.filter_by_size(n, view=True).key() - # ) + axes.append(da_key) else: raise ValueError( f"Can't insert {item!r}: Ambiguous shape: " @@ -3144,19 +3124,16 @@ def _regrid_get_latlong(self, name, axes=None): if axes["X"] == axes["Y"]: raise ValueError("TODO") - x = self.auxiliary_coordinates( - "X", filter_by_naxes=(2,), todict=True + lon_key, lon = self.auxiliary_coordinate( + "X", item=True, filter_by_naxes=(2,), default=(None, None) ) - y = self.auxiliary_coordinates( - "Y", filter_by_naxes=(2,), todict=True + lat_key, lat = self.auxiliary_coordinate( + "Y", item=True, filter_by_naxes=(2,), default=(None, None) ) - if len(x) != 1: - raise ValueError("TODO") - if len(y) != 1: - raise ValueError("TODO") - - lon_key, lon = tuple(x.items())[0] - lat_key, lat = tuple(y.items())[0] + if lon is None: + raise ValueError("TODO x") + if lat is None: + raise ValueError("TODO y") if lat.shape != lon.shape: raise ValueError("TODO") @@ -3214,10 +3191,6 @@ def _regrid_get_latlong(self, name, axes=None): lon_found = False lat_found = False - # auxiliary_coordinates = self.auxiliary_coordinates( - # view=True, cache=auxiliary_coordinates - # ) - for key, aux in self.auxiliary_coordinates( filter_by_naxes=(2,), todict=True ).items(): @@ -3558,21 +3531,10 @@ def _regrid_get_reordered_sections( axis_indices = [] if axis_order is not None: for axis in axis_order: - # axis_key = self.dim(axis, key=True) - # dims = self.dimension_coordinates( - # filter_by_axis=(axis,), axis_mode="exact", todict=True - # ) - # if len(dims)!= 1: - # axis_key = None - # else: - # axis_key, _ = dims.popitem() - axis_key = self.dimension_coordinate( filter_by_axis=(axis,), - axis_mode="exact", default=None, key=True, - todict=True, ) if axis_key is not None: if axis_key in regrid_axes: @@ -3826,7 +3788,7 @@ def _regrid_update_coordinate_references( """ domain_axes = None - domain_ancillaries = None + domain_ancillaries = self.domain_ancillaries(todict=True) for key, ref in self.coordinate_references(todict=True).items(): ref_axes = [] @@ -3837,42 +3799,23 @@ def _regrid_update_coordinate_references( self.del_construct(key) continue - domain_ancillaries = self.domain_ancillaries(todict=True) - for ( term, value, ) in ref.coordinate_conversion.domain_ancillaries().items(): - # domain_ancillaries = self.domain_ancillaries( - # view=True, cache=domain_ancillaries - # ) - # - # key = domain_ancillaries(value, view=True).key(default=None) - - if value in domain_ancillaries: - key = value - else: + if value not in domain_ancillaries: continue + key = value + # If this domain ancillary spans both X and Y axes # then regrid it, otherwise remove it - # if f.domain_anc(key, axes_all=('X', 'Y')):# v2 x = self.domain_axis("X", key=True) y = self.domain_axis("Y", key=True) - # if domain_ancillaries.filter_by_key(key).filter_by_axis( - # x, y, mode="exact", view=True - # ): - # if len( - # self.domain_ancillaries( - # filter_by_axis=(x, y), axis_mode="exact", - # todict=True - # ) - # ) == 1: if self.domain_ancillary( filter_by_axis=(x, y), axis_mode="exact", key=True, - todict=True, default=False, ): # Convert the domain ancillary into an independent @@ -3908,7 +3851,7 @@ def _regrid_update_coordinate_references( d_axes = self.get_data_axes(key) domain_axes = self.domain_axes( - todict=True, cache=domain_axes + todict=True, cached=domain_axes ) for k_s, new_size in zip( @@ -4048,7 +3991,7 @@ def _regrid_update_coordinates( self.set_construct(d, axes=[k_s]) # dst_auxiliary_coordinates = dst.auxiliary_coordinates( - # view=True, cache=dst_auxiliary_coordinates + # view=True, cached=dst_auxiliary_coordinates # ) for aux_key, aux in dst.auxiliary_coordinates( @@ -4081,7 +4024,7 @@ def _regrid_update_coordinates( self.set_construct(coord, axes=[axis_key]) else: # dst_auxiliary_coordinates = dst.auxiliary_coordinates( - # view=True, cache=dst_auxiliary_coordinates + # view=True, cached=dst_auxiliary_coordinates # )# for src_axis_key, dst_axis_key in zip( @@ -4165,11 +4108,6 @@ def _weights_area_XY( `bool` or `None` """ - # dimension_coordinates = self.dimension_coordinates(view=True) - - # xdims = self.dimension_coordinates("X", todict=True) - # ydims = self.dimension_coordinates("Y", todict=True) - xkey, xcoord = self.dimension_coordinate( "X", item=True, default=(None, None) ) @@ -4177,28 +4115,15 @@ def _weights_area_XY( "Y", item=True, default=(None, None) ) - # if not (xdims and ydims): - if xcoord is None and ycoord is None: + if xcoord is None or ycoord is None: if auto: return raise ValueError( - "No unique coordinate constructs for calculating " - "area weights" + "No unique 'X' and 'Y' dimension coordinate constructs for " + "calculating area weights" ) - # xkey, xcoord = xdims.popitem() - # ykey, ycoord = ydims.popitem() - # - # if xdims or ydims: - # if auto: - # return - # - # raise ValueError( - # "Ambiguous coordinate constructs for calculating area " - # "weights" - # ) - if xcoord.Units.equivalent( Units("radians") ) and ycoord.Units.equivalent(Units("radians")): @@ -4307,8 +4232,8 @@ def _weights_data( methods: `bool`, optional If True then add a description of the method used to - create the weights to the *comp* dictionary, as opposed to - the actual weights. + create the weights to the *comp* dictionary, as + opposed to the actual weights. """ # -------------------------------------------------------- @@ -5228,7 +5153,7 @@ def _weights_measure( f"Can't find weights: Multiple {measure!r} cell measures" ) - key, clm = dict(m).popitem() + key, clm = m.popitem() clm_axes0 = self.get_data_axes(key) @@ -5318,11 +5243,11 @@ def _weights_yyy( y_axis = None z_axis = None - auxiliary_coordinates = self.auxiliary_coordinates( + auxiliary_coordinates_1d = self.auxiliary_coordinates( filter_by_naxes=(1,), todict=True ) - for key, aux in auxiliary_coordinates.items(): + for key, aux in auxiliary_coordinates_1d.items(): if aux.get_geometry(None) != geometry_type: continue @@ -5482,6 +5407,12 @@ def Flags(self): @property def ncdimensions(self): """""" + _DEPRECATION_WARNING_ATTRIBUTE( + self, + "ncdimensions", + version="3.0.0", + ) # pragma: no cover + out = {} for dim, domain_axis in self.domain_axes(todict=True).items(): ncdim = domain_axis.nc_get_dimension(None) @@ -10464,7 +10395,9 @@ def collapse( all_axes = [] for axes in input_axes: if axes is None: - domain_axes = self.domain_axes(todict=False, cache=domain_axes) + domain_axes = self.domain_axes( + todict=False, cached=domain_axes + ) all_axes.append(list(domain_axes)) continue @@ -10505,7 +10438,7 @@ def collapse( # ------------------------------------------------------------ # # ------------------------------------------------------------ - domain_axes = f.domain_axes(todict=False, cache=domain_axes) + domain_axes = f.domain_axes(todict=False, cached=domain_axes) # auxiliary_coordinates = f.auxiliary_coordinates(view=True) # dimension_coordinates = f.dimension_coordinates(view=True) @@ -10575,7 +10508,6 @@ def collapse( # ).value(None) dc = f.dimension_coordinate( filter_by_axis=(axis,), - axis_mode="exact", default=None, ) if dc is not None and not dc.has_bounds(): @@ -10912,7 +10844,7 @@ def collapse( # axis, mode="exact", view=True # ).value(None) dim = f.dimension_coordinate( - filter_by_axis=(axis,), axis_mode="exact", default=None + filter_by_axis=(axis,), default=None ) if dim is None: continue @@ -11622,7 +11554,7 @@ def _group_weights(weights, iaxis, index): # E.g. group=cf.M() # ---------------------------------------------------- coord = self.dimension_coordinate( - filter_by_axis=(axis,), axis_mode="exact", default=None + filter_by_axis=(axis,), default=None ) if coord is None: raise ValueError("dddddd siduhfsuildfhsuil dhfdui TODO") @@ -11655,7 +11587,7 @@ def _group_weights(weights, iaxis, index): # Chunks of # ---------------------------------------------------- coord = self.dimension_coordinate( - filter_by_axis=(axis,), axis_mode="exact", default=None + filter_by_axis=(axis,), default=None ) if coord is None: raise ValueError("TODO asdas 4444444 dhfdui ") @@ -11708,7 +11640,7 @@ def _group_weights(weights, iaxis, index): # E.g. group=[cf.month(4), cf.month(cf.wi(9, 11))] # ---------------------------------------------------- coord = self.dimension_coordinate( - filter_by_axis=(axis,), axis_mode="exact", default=None + filter_by_axis=(axis,), default=None ) if coord is None: coord = self.auxiliary_coordinate( @@ -13463,30 +13395,17 @@ def set_data( if not domain_axes: raise ValueError("Can't set data: No domain axes exist") - # domain_axes = f.domain_axes(view=True) - axes = [] for n in data_shape: - # da = domain_axes.filter_by_size(n, todict=True) - # da = f.domain_axes(filter_by_size=(n,), todict=True) - # if len(da) != 1: - # raise ValueError( - # "Can't insert data: Ambiguous data shape: " - # f"{data_shape}. " - # "Consider setting the axes parameter." - # ) - # - # da_key, _ = da.popitem() da_key = f.domain_axis( filter_by_size=(n,), key=True, default=None ) - if da_key is None: # len(da) != 1: + if da_key is None: raise ValueError( "Can't insert data: Ambiguous data shape: " f"{data_shape}. Consider setting the axes parameter." ) - # da_key, _ = da.popitem() axes.append(da_key) else: @@ -15247,9 +15166,7 @@ def anchor( else: f = _inplace_enabled_define_and_cleanup(self) - dim = f.dimension_coordinate( - filter_by_axis=(axis,), axis_mode="exact", default=None - ) + dim = f.dimension_coordinate(filter_by_axis=(axis,), default=None) if dim is None: raise ValueError( "Can't shift non-cyclic " @@ -15290,9 +15207,7 @@ def anchor( if not dry_run: f.roll(axis, shift, inplace=True) - dim = f.dimension_coordinate( - filter_by_axis=(axis,), axis_mode="exact" - ) + dim = f.dimension_coordinate(filter_by_axis=(axis,)) n = ((value - dim.data[0]) / period).ceil() else: @@ -15446,8 +15361,6 @@ def autocyclic(self, verbose=None): >>> f.autocyclic() """ - # dims = self.dimension_coordinates(view=True).filter_by_identity("X", view=True)._dictionary() - key, dim = self.dimension_coordinate( "X", item=True, default=(None, None) ) @@ -15455,8 +15368,6 @@ def autocyclic(self, verbose=None): if dim is None: return False - # key, dim = dims.popitem() - if not dim.Units.islongitude: if dim.get_property("standard_name", None) not in ( "longitude", @@ -16000,18 +15911,41 @@ def auxiliary_coordinate( TODO """ - return self._construct( - ("auxiliary_coordinate",), + c = self._select_construct( + ("auxiliary_coordinate"), "auxiliary_coordinate", - identity=identity, + identity, key=key, - default=default, + item=item, + default=None, **filter_kwargs, ) + if c is not None: + return c + + if not c: + da_key = self.domain_axis(identity, key=True, default=None) + if da_key is not None: + return self._default( + default, + f"{self.__class__.__name__}.auxiliary_coordinate() can't " + "return zero constructs", + ) + + return self._select_construct( + ("auxiliary_coordinate"), + "auxiliary_coordinate", + identity, + key=key, + item=item, + default=default, + filter_by_axis=(da_key,), + axis_mode="exact", + ) def construct( self, - identity=None, + *identity, default=ValueError(), key=False, item=False, @@ -16137,10 +16071,10 @@ def construct( TypeError: No height coordinates """ - return self._construct( + return self._select_construct( (), "construct", - identity=identity, + identity, key=key, item=item, default=default, @@ -16148,7 +16082,12 @@ def construct( ) def domain_ancillary( - self, identity=None, default=ValueError(), key=False, **filter_kwargs + self, + *identity, + default=ValueError(), + key=False, + item=False, + **filter_kwargs, ): """Return a domain ancillary construct, or its key. @@ -16251,17 +16190,23 @@ def domain_ancillary( TODO """ - return self._construct( + return self._select_construct( ("domain_ancillary",), "domain_ancillary", - identity=identity, + identity, key=key, default=default, + item=item, **filter_kwargs, ) def cell_measure( - self, identity=None, default=ValueError(), key=False, **filter_kwargs + self, + *identity, + default=ValueError(), + key=False, + item=False, + **filter_kwargs, ): """Select a cell measure construct by its identity. @@ -16360,16 +16305,24 @@ def cell_measure( TODO """ - return self._construct( + return self._select_construct( ("cell_measure",), "cell_meausure", - identity=identity, + identity, key=key, default=default, + item=item, **filter_kwargs, ) - def cell_method(self, identity=None, default=ValueError(), key=False): + def cell_method( + self, + *identity, + default=ValueError(), + key=False, + item=False, + **filter_kwargs, + ): """Select a cell method construct by its identity. .. versionadded:: 3.0.0 @@ -16473,6 +16426,9 @@ def cell_method(self, identity=None, default=ValueError(), key=False): if key: return c.key(default=default) + if item: + return c.key(default=default), c.value(default=default) + return c.value(default=default) def coordinate( @@ -16580,7 +16536,7 @@ def coordinate( TODO """ - c = self._construct( + c = self._select_construct( ("dimension_coordinate", "auxiliary_coordinate"), "coordinate", identity, @@ -16589,7 +16545,6 @@ def coordinate( default=None, **filter_kwargs, ) - if c is not None: return c @@ -16597,10 +16552,12 @@ def coordinate( da_key = self.domain_axis(identity, key=True, default=None) if da_key is not None: return self._default( - default, f"Can't find exactly one construct" + default, + f"{self.__class__.__name__}.coordinate() can't " + "return zero constructs", ) - c = self._construct( + return self._select_construct( ("dimension_coordinate", "auxiliary_coordinate"), "coordinate", identity, @@ -16611,9 +16568,6 @@ def coordinate( axis_mode="exact", ) - if c is not None: - return c - def coordinate_reference( self, *identity, @@ -16713,7 +16667,7 @@ def coordinate_reference( TODO """ - return self._construct( + return self._select_construct( ("coordinate_reference",), "coordinate_reference", identity, @@ -16833,7 +16787,7 @@ def field_ancillary( TODO """ - return self._construct( + return self._select_construct( ("field_ancillary",), "field_ancillary", identity, @@ -16946,15 +16900,37 @@ def dimension_coordinate( TODO """ - return self._construct( + c = self._select_construct( ("dimension_coordinate",), "dimension_coordinate", identity, key=key, - default=default, item=item, + default=None, **filter_kwargs, ) + if c is not None: + return c + + if not c: + da_key = self.domain_axis(identity, key=True, default=None) + if da_key is not None: + return self._default( + default, + f"{self.__class__.__name__}.dimension_coordinate can't " + "return zero constructs", + ) + + return self._select_construct( + ("dimension_coordinate",), + "dimension_coordinate", + identity, + key=key, + item=item, + default=default, + filter_by_axis=(da_key,), + axis_mode="exact", + ) def domain_axis( self, @@ -17052,21 +17028,27 @@ def domain_axis( """ # Try for integer index - try: - da_key = self.get_data_axes(default=None)[identity] - except TypeError: - pass - except IndexError: - return self._default( - default, - "Index does not exist for field construct data dimenions", - ) - else: - # TODO consider using filter_by_key ... but I think - # filter_by_identity is faster, though - identity = da_key + if identity: + identity2 = [] + + data_axes = self.get_data_axes(default=None) + for i in identity: + try: + identity2.append(data_axes[i]) + except TypeError: + identity2.append(i) + except IndexError: + pass + + if not identity2: + return self._default( + default, + "Indices do not exist for field construct data dimenions", + ) + + identity = identity2 - c = self._construct( + c = self._select_construct( ("domain_axis",), "domain_axis", identity, @@ -17078,7 +17060,8 @@ def domain_axis( if c is not None: return c - da_key = self.domain_axis_key(identity, default=None) + da_key = self.domain_axis_key(*identity, default=None) + if da_key is not None: if key: return da_key @@ -17091,7 +17074,9 @@ def domain_axis( return construct return self._default( - default, "No unique domain axis construct is identifable" + default, + f"{self.__class__.__name__}.domain_axis() can't return zero " + "constructs", ) def domain_axis_position(self, identity): @@ -18771,9 +18756,7 @@ def roll(self, axis, shift, inplace=False, i=False, **kwargs): return f - dim = self.dimension_coordinate( - filter_by_axis=(axis,), axis_mode="exact", default=None - ) + dim = self.dimension_coordinate(filter_by_axis=(axis,), default=None) if dim is not None and dim.period() is None: raise ValueError( f"Can't roll: {dim.identity()!r} axis has non-periodic " @@ -20865,23 +20848,10 @@ def derivative( if axis is None: raise ValueError("Invalid axis specifier") - coord = self.dimension_coordinate( - filter_by_axis=(axis,), axis_mode="exact", default=None - ) + coord = self.dimension_coordinate(filter_by_axis=(axis,), default=None) if coord is None: raise ValueError("Axis specified is not unique.") - # dims = self.dimension_coordinates(view=True).filter_by_axis( - # axis, mode="exact", view=True - # ) - # len_dims = len(dims) - # if not len_dims: - # raise ValueError("Invalid axis specifier") - # elif len_dims != 1: - # raise ValueError("Axis specified is not unique.") - # - # dckey, coord = dims._dictionary().popitem() - # Get the axis index axis_index = self.get_data_axes().index(axis) @@ -21309,31 +21279,36 @@ def refs(self, *identities, **kwargs): @property def _Axes(self): """""" - _DEPRECATION_ERROR_ATTRIBUTE( - self, "_Axes", "Use attribute 'domain_axes' instead." - ) # pragma: no cover + raise DeprecationError( + f"{self.__class__.__name__} attribute '_Axes' has been deprecated " + "at version 3.0.0 and is no longer available" + "Use 'domain_axes' instead." + ) @property def CellMethods(self): """""" - _DEPRECATION_ERROR_ATTRIBUTE( - self, "CellMethods", "Use method 'cell_methods.ordered' instead." - ) # pragma: no cover + raise DeprecationError( + f"{self.__class__.__name__} attribute 'CellMethods' has been deprecated " + "at version 3.0.0 and is no longer available" + "Use 'cell_methods' instead." + ) @property def Items(self): - """Deprecated at version 3.0.0. - - Use attribute `constructs` instead. - - """ - _DEPRECATION_ERROR_ATTRIBUTE( - self, "Items", "Use 'constructs' attribute instead." - ) # pragma: no cover + """""" + raise DeprecationError( + f"{self.__class__.__name__} attribute 'Items' has been deprecated " + "at version 3.0.0 and is no longer available" + "Use 'constructs' instead." + ) def CM(self, xxx): - """Deprecated at version 3.0.0.""" - _DEPRECATION_ERROR_METHOD(self, "CM") # pragma: no cover + """""" + raise DeprecationError( + f"{self.__class__.__name__} method 'CM' has been deprecated " + "at version 3.0.0 and is no longer available" + ) def axis_name(self, *args, **kwargs): """Return the canonical name for an axis. diff --git a/cf/functions.py b/cf/functions.py index bade2232f9..3a20e53dcf 100644 --- a/cf/functions.py +++ b/cf/functions.py @@ -3364,7 +3364,7 @@ def _DEPRECATION_ERROR(message="", version="3.0.0"): def _DEPRECATION_ERROR_ARG(instance, method, arg, message="", version="3.0.0"): raise DeprecationError( "Argument {2!r} of method '{0}.{1}' has been deprecated at version " - "{4} and is no longer available. {3}".format( + "{4} and is no longer available and will be removed at version 4.0.0. {3}".format( instance.__class__.__name__, method, arg, message, version ) ) @@ -3392,7 +3392,7 @@ def _DEPRECATION_ERROR_FUNCTION_KWARGS( for key in kwargs.keys(): raise DeprecationError( "Keyword {1!r} of function '{0}' has been deprecated at version " - "{3} and is no longer available. {2}".format( + "{3} and is no longer available and will be removed at version 4.0.0. {2}".format( func, key, message, version ) ) @@ -3423,7 +3423,7 @@ def _DEPRECATION_ERROR_KWARGS( for key in kwargs.keys(): raise DeprecationError( "Keyword {2!r} of method '{0}.{1}' has been deprecated at " - "version {4} and is no longer available. {3}".format( + "version {4} and is no longer available and will be removed at version 4.0.0. {3}".format( instance.__class__.__name__, method, key, message, version ) ) @@ -3434,7 +3434,7 @@ def _DEPRECATION_ERROR_KWARG_VALUE( ): raise DeprecationError( "Value {!r} of keyword {!r} of method '{}.{}' has been deprecated at " - "version {} and is no longer available. {}".format( + "version {} and is no longer available and will be removed at version 4.0.0. {}".format( value, kwarg, method, instance.__class__.__name__, version, message ) ) @@ -3443,7 +3443,7 @@ def _DEPRECATION_ERROR_KWARG_VALUE( def _DEPRECATION_ERROR_METHOD(instance, method, message="", version="3.0.0"): raise DeprecationError( "{} method {!r} has been deprecated at version {} and is no longer " - "available. {}".format( + "available and will be removed at version 4.0.0. {}".format( instance.__class__.__name__, method, version, message ) ) @@ -3452,25 +3452,30 @@ def _DEPRECATION_ERROR_METHOD(instance, method, message="", version="3.0.0"): def _DEPRECATION_ERROR_ATTRIBUTE( instance, attribute, message="", version="3.0.0" ): - raise DeprecationError( - "{} attribute {!r} has been deprecate at version {} and is no longer " - "available. {}".format( - instance.__class__.__name__, attribute, version, message - ) + warnings.warn( + "{} attribute {!r} has been deprecated at version {} and will be " + "removed at version 4.0.0. {}".format( + instance.__class__.__name__, method, version, message + ), + DeprecationWarning, ) def _DEPRECATION_ERROR_FUNCTION(func, message="", version="3.0.0"): raise DeprecationError( "Function {!r} has been deprecated at version {} and is no longer " - "available. {}".format(func, version, message) + "available and will be removed at version 4.0.0. {}".format( + func, version, message + ) ) def _DEPRECATION_ERROR_CLASS(cls, message="", version="3.0.0"): raise DeprecationError( "Class {!r} has been deprecated at version {} and is no longer " - "available. {}".format(cls, version, message) + "available and will be removed at version 4.0.0. {}".format( + cls, version, message + ) ) @@ -3479,7 +3484,7 @@ def _DEPRECATION_WARNING_METHOD( ): warnings.warn( "{} method {!r} has been deprecated at version {} and will be " - "removed in a future version. {}".format( + "removed at version 4.0.0. {}".format( instance.__class__.__name__, method, version, message ), DeprecationWarning, @@ -3489,14 +3494,16 @@ def _DEPRECATION_WARNING_METHOD( def _DEPRECATION_ERROR_DICT(message="", version="3.0.0"): raise DeprecationError( "Use of a 'dict' to identify constructs has been deprecated at " - "version {} and is no longer available. {}".format(version, message) + "version {} and is no longer available and will be removed at version 4.0.0. {}".format( + version, message + ) ) def _DEPRECATION_ERROR_SEQUENCE(instance, version="3.0.0"): raise DeprecationError( "Use of a {!r} to identify constructs has been deprecated at version " - "{} and is no longer available. Use the * operator to unpack the " + "{} and is no longer available and will be removed at version 4.0.0. Use the * operator to unpack the " "arguments instead.".format(instance.__class__.__name__, version) ) From 74e22202881c4a02a14eb1b36adebf46a41011be Mon Sep 17 00:00:00 2001 From: David Hassell Date: Wed, 7 Apr 2021 18:17:15 +0100 Subject: [PATCH 19/53] devs --- cf/aggregate.py | 17 +- cf/field.py | 943 ++++++++++------------------ cf/test/create_test_files.py | 2 +- cf/test/setup_create_field.py | 3 - cf/test/test_AuxiliaryCoordinate.py | 57 +- cf/test/test_CellMeasure.py | 21 +- cf/test/test_CellMethod.py | 21 +- cf/test/test_CoordinateReference.py | 25 +- cf/test/test_Count.py | 4 +- cf/test/test_Datetime.py | 2 +- cf/test/test_DimensionCoordinate.py | 55 +- cf/test/test_Field.py | 503 ++++----------- cf/test/test_FieldAncillary.py | 29 +- cf/test/test_FieldList.py | 72 +-- cf/test/test_Index.py | 4 +- cf/test/test_List.py | 12 +- cf/test/test_Maths.py | 12 - cf/test/test_Partition.py | 30 - cf/test/test_Query.py | 79 ++- cf/test/test_TimeDuration.py | 291 +++++---- cf/test/test_cfa.py | 10 +- cf/test/test_collapse.py | 4 +- cf/test/test_decorators.py | 9 +- cf/test/test_docstring.py | 3 - cf/test/test_dsg.py | 17 +- cf/test/test_external.py | 29 +- cf/test/test_functions.py | 31 +- cf/test/test_gathering.py | 119 +--- cf/test/test_general.py | 6 +- cf/test/test_geometry.py | 49 +- cf/test/test_groups.py | 2 +- cf/test/test_read_write.py | 10 +- 32 files changed, 772 insertions(+), 1699 deletions(-) delete mode 100644 cf/test/test_Partition.py diff --git a/cf/aggregate.py b/cf/aggregate.py index e6076ed672..c71f25c01b 100644 --- a/cf/aggregate.py +++ b/cf/aggregate.py @@ -802,12 +802,11 @@ def __str__(self): return "\n".join(strings) def coordinate_values(self): - """Create a report listing all coordinate cell values and - bounds.""" + """Create a report listing coordinate cell values and bounds.""" string = ["First cell: " + str(self.first_values)] string.append("Last cell: " + str(self.last_values)) - string.append("First bounds: " + str(self.first_bounds)) - string.append("Last bounds: " + str(self.last_bounds)) + string.append("First cell bounds: " + str(self.first_bounds)) + string.append("Last cell bounds: " + str(self.last_bounds)) return "\n".join(string) @@ -1950,11 +1949,7 @@ def aggregate( aggregating_axes = [] axis_items = meta[0].axis.items() for axis in axes: - # TODO IMPORTANT: should this be filter_by_axis ???? Yes, surely ... - coord = meta[0].field.coordinate( - filter_by_axis=(axis,), axis_mode="exact", default=None - ) - # coord = coords.value(default=None) + coord = meta[0].field.coordinate(axis, default=None) if coord is None: continue @@ -2365,7 +2360,7 @@ def _create_hash_and_first_values( key = aux["key"] canonical_units = aux["units"] - coord = field.item(key) + coord = field.constructs[key] axes = [m_id_to_axis[identity] for identity in aux["axes"]] domain_axes = item_axes[key] @@ -2998,7 +2993,7 @@ def _ok_coordinate_arrays(meta, axis, overlap, contiguous, verbose=None): number_of_1d_aux_coord_values = 0 for m in meta: aux = m.axis[axis]["keys"][i] - array = m.field.item(aux).array + array = m.field.constructs[aux].array set_of_1d_aux_coord_values.update(array) number_of_1d_aux_coord_values += array.size if ( diff --git a/cf/field.py b/cf/field.py index 0901ea84e2..fe5b188c33 100644 --- a/cf/field.py +++ b/cf/field.py @@ -80,9 +80,6 @@ _DEPRECATION_ERROR_ARG, _DEPRECATION_ERROR_KWARGS, _DEPRECATION_ERROR_METHOD, - _DEPRECATION_ERROR_ATTRIBUTE, - _DEPRECATION_ERROR_DICT, - _DEPRECATION_ERROR_SEQUENCE, _DEPRECATION_ERROR_KWARG_VALUE, DeprecationError, ) @@ -1510,10 +1507,11 @@ def _binary_operation_old(self, other, method): field1_coordinate_references = field1.coordinate_references( todict=True ) - field1_domain_ancillaries = field1_domain_ancillaries(todict=True) + + field1_domain_ancillaries = field1.domain_ancillaries(todict=True) field1_domain_axes = field1.domain_axes(todict=True) - # field0_auxiliary_coordinates = field0.auxiliary_coordinates(view=True) + # field0_auxiliary_coordinates = field0.auxiliary_coordinates(todict=True) # field0_domain_ancillaries = field0_domain_ancillaries(todict=True) # c = field0.constructs.filter_by_type( @@ -1602,7 +1600,7 @@ def _binary_operation_old(self, other, method): "5: remove_items = {}".format(remove_items) ) # pragma: no cover - for key0, aux0 in field0_auxiliary_coordinates.items(): + for key0, aux0 in field0.auxiliary_coordinates(todict=True).items(): if key0 in remove_items: # Field0 auxiliary coordinate has already marked for # removal @@ -1665,7 +1663,7 @@ def _binary_operation_old(self, other, method): # Copy field1 auxiliary coordinates which do not span any # matching axes to field0 # ------------------------------------------------------------ - filed1_data_axes = field1.constructs.data_axes() + field1_data_axes = field1.constructs.data_axes() for key1 in field1_auxiliary_coordinates: if key1 in insert_aux: continue @@ -1738,6 +1736,8 @@ def _binary_operation_old(self, other, method): "{}, {}, {!r}".format(axes0, key1, field1.constructs[key1]) ) # pragma: no cover + # field1_domain_ancillaries = field1.domain_ancillaries(todict=True) + for key1, axes0 in insert_domain_anc.items(): try: key0 = field0.set_construct( @@ -3060,50 +3060,30 @@ def _regrid_get_latlong(self, name, axes=None): and Y coordinates are returned, which are not long/lat. """ - auxiliary_coordinates = None - if axes is None: - # Retrieve the field construct's X and Y dimension coordinates - xdims = self.dimension_coordinates("X", todict=True) - len_x = len(xdims) - if not len_x: - raise ValueError( - f"No X dimension coordinate found for the {name} " + # Retrieve the field construct's X and Y dimension + # coordinates + x_key, x = self.dimension_coordinate( + "X", + item=True, + default=ValueError( + f"No unique X dimension coordinate found for the {name} " "field. If none is present you " - "may need to specify the axes keyword, " - "otherwise you may need to set the X " - "attribute of the X dimension coordinate " - "to True." - ) - - if len_x > 1: - raise ValueError( - f"{name.captalize()} field has multiple 'X' dimension " - "coordinates" - ) - - ydims = self.dimension_coordinates("Y", todict=True) - len_y = len(ydims) - - if not len_y: - raise ValueError( - f"No Y dimension coordinate found for the {name} " + "may need to specify the axes keyword." + ), + ) + y_key, y = self.dimension_coordinate( + "Y", + item=True, + default=ValueError( + f"No unique Y dimension coordinate found for the {name} " "field. If none is present you " - "may need to specify the axes keyword, " - "otherwise you may need to set the Y " - "attribute of the Y dimension coordinate " - "to True." - ) - - if len_y > 1: - raise ValueError( - f"{name} field has multiple 'Y' dimension coordinates" - ) + "may need to specify the axes keyword." + ), + ) - x_key, x = xdims.popitem() - y_key, y = xdims.popitem() - x_axis = self.domain_axis(x_key, key=True) - y_axis = self.domain_axis(y_key, key=True) + x_axis = self.get_data_axes(x_key)[0] + y_axis = self.get_data_axes(y_key)[0] x_size = x.size y_size = y.size @@ -3120,7 +3100,7 @@ def _regrid_get_latlong(self, name, axes=None): if axes["X"] in (1, 0) and axes["Y"] in (0, 1): # Axes specified by integer position in dimensions of - # lat and lon 2-d coordinates + # lat and lon 2-d auxiliary coordinates if axes["X"] == axes["Y"]: raise ValueError("TODO") @@ -3136,27 +3116,15 @@ def _regrid_get_latlong(self, name, axes=None): raise ValueError("TODO y") if lat.shape != lon.shape: - raise ValueError("TODO") + raise ValueError("TODO 222222") lon_axes = self.get_data_axes(lon_key) lat_axes = self.get_data_axes(lat_key) if lat_axes != lon_axes: - raise ValueError("TODO") + raise ValueError("TODO 3333333") - x_axis = self.domain_axis( - lon_axes[axes["X"]], - key=True, - default=ValueError( - f"'X' axis specified for {name} field not found." - ), - ) - y_axis = self.domain_axis( - lat_axes[axes["Y"]], - key=True, - default=ValueError( - f"'Y' axis specified for {name} field not found." - ), - ) + x_axis = lon_axes[axes["X"]] + y_axis = lat_axes[axes["Y"]] else: x_axis = self.domain_axis( axes["X"], @@ -3283,7 +3251,7 @@ def _regrid_get_cartesian_coords(self, name, axes): if d is None: raise ValueError( f"No unique {name} dimension coordinate " - f"matches key {key}." + f"matches key {key!r}." ) coords.append(d.copy()) @@ -3667,7 +3635,7 @@ def _regrid_compute_field_mass( The destination field. """ - if not type(_compute_field_mass) == dict: + if not isinstance(_compute_field_mass, dict): raise ValueError( "Expected _compute_field_mass to be a dictionary." ) @@ -3969,7 +3937,7 @@ def _regrid_update_coordinates( self.del_construct(key) domain_axes = self.domain_axes(todict=True) - dst_auxiliary_coordinates = None + # dst_auxiliary_coordinates = None if cartesian: # Make axes map @@ -4306,21 +4274,21 @@ def _weights_field(self, fields, comp, weights_axes, methods=False): """Creates a weights field.""" s = self.analyse_items() - # domain_axes = self.domain_axes(todict=True) - domain_axes_size_1 = self.domain_axes(filter_by_size=(1,), todict=True) + domain_axes = self.domain_axes(todict=True) + # domain_axes_size_1 = self.domain_axes(filter_by_size=(1,), todict=True) for w in fields: t = w.analyse_items() # TODO CHECK this with org - domain_axes_size_1 = w.domain_axes( - filter_by_size=(1,), todict=True - ) if t["undefined_axes"]: # if set( # t.domain_axes.filter_by_size(gt(1), view=True) # ).intersection(t["undefined_axes"]): - if set(domain_axes_size_1).intersection(t["undefined_axes"]): + w_domain_axes_1 = w.domain_axes( + filter_by_size=(1,), todict=True + ) + if set(w_domain_axes_1).intersection(t["undefined_axes"]): raise ValueError("345jn456jn TODO") w = w.squeeze() @@ -5318,7 +5286,7 @@ def _weights_yyy( aux_X.bounds.varray if aux_Z is None: - for key, aux in auxiliary_coordinates.items(): + for key, aux in auxiliary_coordinates_1d.items(): if aux.Z: aux_Z = aux.copy() z_axis = self.get_data_axes(key)[0] @@ -5404,23 +5372,6 @@ def Flags(self): ) ) - @property - def ncdimensions(self): - """""" - _DEPRECATION_WARNING_ATTRIBUTE( - self, - "ncdimensions", - version="3.0.0", - ) # pragma: no cover - - out = {} - for dim, domain_axis in self.domain_axes(todict=True).items(): - ncdim = domain_axis.nc_get_dimension(None) - if ncdim is not None: - out[dim] = ncdim - - return out - @property def rank(self): """The number of axes in the domain. @@ -10595,7 +10546,7 @@ def collapse( g_weights = f.weights( weights, components=True, - axes=list(collapse_axes.keys()), + axes=list(collapse_axes), # .keys()), scale=scale, measure=measure, radius=radius, @@ -10605,7 +10556,8 @@ def collapse( if not g_weights: g_weights = None - axis = collapse_axes.key() + # axis = collapse_axes.key() + axis = [a for a in collapse_axes][0] f = f._collapse_grouped( method, @@ -10647,6 +10599,7 @@ def collapse( verbose=verbose, ) continue + elif regroup: raise ValueError( "Can't return an array of groups for a non-grouped " @@ -10682,8 +10635,7 @@ def collapse( if method == "integral": if not measure: raise ValueError( - "Must set measure=True for {!r} " - "collapses".format(method) + f"Must set measure=True for {method!r} collapses" ) if scale is not None: @@ -10709,12 +10661,13 @@ def collapse( d_kwargs["weights"] = d_weights logger.info( - " Output weights = {!r}".format(d_weights) + f" Output weights = {d_weights!r}" ) # pragma: no cover + elif method == "integral": raise ValueError( - "Must set the 'weights' parameter " - "for {!r} collapses".format(method) + f"Must set the 'weights' parameter for {method!r} " + "collapses" ) if method in _collapse_ddof_methods: @@ -10723,12 +10676,12 @@ def collapse( # ======================================================== # Collapse the data array # ======================================================== - logger.info(" Before collapse of data:") # pragma: no cover logger.info( - " iaxes, d_kwargs = {} {}".format(iaxes, d_kwargs) + " Before collapse of data:\n" + f" iaxes, d_kwargs = {iaxes} {d_kwargs}\n" + f" f.shape = {f.shape}\n" + f" f.dtype = {f.dtype}\n" ) # pragma: no cover - logger.info(" f.shape = {}".format(f.shape)) # pragma: no cover - logger.info(" f.dtype = {}".format(f.dtype)) # pragma: no cover getattr(f.data, method)( axes=iaxes, @@ -10747,18 +10700,17 @@ def collapse( [axis for axis in data_axes if axis not in collapse_axes] ) - logger.info(" After collapse of data:") # pragma: no cover - logger.info(" f.shape = {}".format(f.shape)) # pragma: no cover - logger.info(" f.dtype = {}".format(f.dtype)) # pragma: no cover + logger.info( + " After collapse of data:\n" + f" f.shape = {f.shape}\n" + f" f.dtype = {f.dtype}\n", + f"collapse_axes = {collapse_axes}", + ) # pragma: no cover # --------------------------------------------------------- # Update dimension coordinates, auxiliary coordinates, # cell measures and domain ancillaries # --------------------------------------------------------- - logger.info( - " collapse_axes = {}".format(collapse_axes) - ) # pragma: no cover - for axis, domain_axis in collapse_axes.items(): # Ignore axes which are already size 1 size = domain_axis.get_size() @@ -11675,9 +11627,9 @@ def _group_weights(weights, iaxis, index): # ---------------------------------------------------- # Over days # ---------------------------------------------------- - coord = self.dimension_coordinates.filter_by_axis( - axis, mode="exact", view=True - ).value(None) + coord = self.dimension_coordinate( + filter_by_axis=(axis,), default=None + ) if coord is None or not coord.Units.isreftime: raise ValueError( "Reference-time dimension coordinates are required " @@ -11690,7 +11642,7 @@ def _group_weights(weights, iaxis, index): "required for an 'over days' collapse" ) - cell_methods = self.cell_methods(view=True).ordered() + cell_methods = self.cell_methods().ordered() w = [ cm.get_qualifier("within", None) for cm in cell_methods.values() @@ -11793,10 +11745,8 @@ def _group_weights(weights, iaxis, index): # ---------------------------------------------------- # Over years # ---------------------------------------------------- - coord = ( - self.dimension_coordinates(view=True) - .filter_by_axis(axis, mode="exact", view=True) - .value(None) + coord = self.dimension_coordinate( + filter_by_axis=(axis,), default=None ) if coord is None or not coord.Units.isreftime: raise ValueError( @@ -11811,7 +11761,7 @@ def _group_weights(weights, iaxis, index): "required for an 'over years' collapse" ) - cell_methods = self.cell_methods(view=True).ordered() + cell_methods = self.cell_methods().ordered() w = [ cm.get_qualifier("within", None) for cm in cell_methods.values() @@ -11932,10 +11882,8 @@ def _group_weights(weights, iaxis, index): # ---------------------------------------------------- # Within days # ---------------------------------------------------- - coord = ( - self.dimension_coordinates(view=True) - .filter_by_axis(axis, mode="exact", view=True) - .value(None) + coord = self.dimension_coordinate( + filter_by_axis=(axis,), default=None ) if coord is None or not coord.Units.isreftime: raise ValueError( @@ -12022,10 +11970,8 @@ def _group_weights(weights, iaxis, index): # ---------------------------------------------------- # Within years # ---------------------------------------------------- - coord = ( - self.dimension_coordinates(view=True) - .filter_by_axis(axis, mode="exact", view=True) - .value() + coord = self.dimension_coordinate( + filter_by_axis=(axis,), default=None ) if coord is None or not coord.Units.isreftime: raise ValueError( @@ -12245,10 +12191,8 @@ def _group_weights(weights, iaxis, index): # Hack to fix missing bounds! for g in fl: try: - c = ( - g.dimension_coordinates(view=True) - .filter_by_axis(axis, mode="exact", view=True) - .value() + c = g.dimension_coordinate( + filter_by_axis=(axis,), default=None ) if not c.has_bounds(): c.set_bounds(c.create_bounds()) @@ -12263,10 +12207,9 @@ def _group_weights(weights, iaxis, index): and coord.construct_type == "dimension_coordinate" ): fl.sort( - key=lambda g: g.dimension_coordinates(view=True) - .filter_by_axis(axis, mode="exact", view=True) - .value() - .datum(0), + key=lambda g: g.dimension_coordinate( + filter_by_axis=(axis,) + ).datum(0), reverse=coord.decreasing, ) @@ -12794,7 +12737,7 @@ def indices(self, *mode, **kwargs): key = None construct = None else: - ## c = constructs.filter_by_identity(identity, view=True) + # c = constructs.filter_by_identity(identity, view=True) # c = self.constructs.filter( # filter_by_data=True, # filter_by_identity=(identity,), @@ -15400,60 +15343,6 @@ def autocyclic(self, verbose=None): return True - def axes(self, axes=None, **kwargs): - """Return domain axis constructs. - - .. seealso:: `constructs`, `domain_axis`, `domain_axes` - - :Parameters: - - axes: - - kwargs: deprecated at version 3.0.0 - - :Returns: - - `Constructs` - The domain axis constructs and their construct keys. - - **Examples:** - - >>> f.axes() - Constructs: - {} - - >>> f.axes() - Constructs: - {'domainaxis0': , - 'domainaxis1': , - 'domainaxis2': , - 'domainaxis3': } - - """ - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, - "axes", - kwargs, - "Use methods of the 'domain_axes' attribute instead.", - ) # pragma: no cover - - if axes is None: - return self.domain_axes() - - if isinstance(axes, (str, int)): - axes = (axes,) - - out = [ - self.domain_axis(identity, key=True, default=None) - for identity in axes - ] - - out = set(out) - out.discard(None) - - return self.domain_axes().filter_by_key(*out, view=True) - @_deprecated_kwarg_check("i") def squeeze(self, axes=None, inplace=False, i=False, **kwargs): """Remove size 1 axes from the data. @@ -15803,9 +15692,10 @@ def unsqueeze(self, inplace=False, i=False, axes=None, **kwargs): def auxiliary_coordinate( self, - identity=None, + *identity, default=ValueError(), key=False, + item=False, **filter_kwargs, ): """Return an auxiliary coordinate construct, or its key. @@ -15912,7 +15802,7 @@ def auxiliary_coordinate( """ c = self._select_construct( - ("auxiliary_coordinate"), + ("auxiliary_coordinate",), "auxiliary_coordinate", identity, key=key, @@ -15923,19 +15813,12 @@ def auxiliary_coordinate( if c is not None: return c - if not c: - da_key = self.domain_axis(identity, key=True, default=None) - if da_key is not None: - return self._default( - default, - f"{self.__class__.__name__}.auxiliary_coordinate() can't " - "return zero constructs", - ) - + da_key = self.domain_axis(*identity, key=True, default=None) + if da_key is not None: return self._select_construct( - ("auxiliary_coordinate"), + ("auxiliary_coordinate",), "auxiliary_coordinate", - identity, + (), key=key, item=item, default=default, @@ -15943,6 +15826,15 @@ def auxiliary_coordinate( axis_mode="exact", ) + if default is None: + return default + + return self._default( + default, + f"{self.__class__.__name__}.auxiliary_coordinate() can only " + "return a unique construct", + ) + def construct( self, *identity, @@ -16406,30 +16298,45 @@ def cell_method( TODO """ - cell_methods = self.cell_methods(view=True) - c = cell_methods - - if identity is not None: - c = c(identity, view=True) - if not c: - da_key = self.domain_axis(identity, key=True, default=None) - cm_keys = [ - key - for key, cm in cell_methods.items() - if cm.get_axes(None) == (da_key,) - ] - if cm_keys: - c = cell_methods(*cm_keys, view=True) - else: - c = cell_methods(None, view=True) + c = self._select_construct( + ("cell_method",), + "cell_method", + identity, + key=key, + item=item, + default=None, + **filter_kwargs, + ) + if c is not None: + return c + + domain_axes = self.domain_axes(*identity, todict=True) + if domain_axes: + cell_methods = self.cell_methods(todict=True) + cm_keys = [ + k + for k, cm in cell_methods.items() + for da_key in domain_axes + if cm.get_axes(None) == (da_key,) + ] + if len(cm_keys) == 1: + k = cm_keys[0] + if key: + return k - if key: - return c.key(default=default) + if item: + return k, cell_methods[k] - if item: - return c.key(default=default), c.value(default=default) + return cell_methods[k] - return c.value(default=default) + if default is None: + return default + + return self._default( + default, + f"{self.__class__.__name__}.cell_method() can only " + "return a unique construct", + ) def coordinate( self, @@ -16439,7 +16346,8 @@ def coordinate( item=False, **filter_kwargs, ): - """Return a dimension or auxiliary coordinate construct, or its key. + """Return a dimension or auxiliary coordinate construct, or its + key. .. versionadded:: 3.0.0 @@ -16548,19 +16456,12 @@ def coordinate( if c is not None: return c - if not c: - da_key = self.domain_axis(identity, key=True, default=None) - if da_key is not None: - return self._default( - default, - f"{self.__class__.__name__}.coordinate() can't " - "return zero constructs", - ) - + da_key = self.domain_axis(*identity, key=True, default=None) + if da_key is not None: return self._select_construct( ("dimension_coordinate", "auxiliary_coordinate"), "coordinate", - identity, + (), key=key, item=item, default=default, @@ -16568,6 +16469,15 @@ def coordinate( axis_mode="exact", ) + if default is None: + return default + + return self._default( + default, + f"{self.__class__.__name__}.coordinate() can only " + "return a unique construct", + ) + def coordinate_reference( self, *identity, @@ -16912,19 +16822,12 @@ def dimension_coordinate( if c is not None: return c - if not c: - da_key = self.domain_axis(identity, key=True, default=None) - if da_key is not None: - return self._default( - default, - f"{self.__class__.__name__}.dimension_coordinate can't " - "return zero constructs", - ) - + da_key = self.domain_axis(*identity, key=True, default=None) + if da_key is not None: return self._select_construct( ("dimension_coordinate",), "dimension_coordinate", - identity, + (), key=key, item=item, default=default, @@ -16932,6 +16835,15 @@ def dimension_coordinate( axis_mode="exact", ) + if default is None: + return None + + return self._default( + default, + f"{self.__class__.__name__}.dimension_coordinate() can only " + "return a unique construct", + ) + def domain_axis( self, *identity, @@ -17041,6 +16953,9 @@ def domain_axis( pass if not identity2: + if default is None: + return default + return self._default( default, "Indices do not exist for field construct data dimenions", @@ -17073,13 +16988,16 @@ def domain_axis( return construct + if default is None: + return default + return self._default( default, f"{self.__class__.__name__}.domain_axis() can't return zero " "constructs", ) - def domain_axis_position(self, identity): + def domain_axis_position(self, *identity): """Return the position in the data of a domain axis construct. .. versionadded:: 3.0.0 @@ -17169,7 +17087,7 @@ def domain_axis_position(self, identity): 1 """ - key = self.domain_axis(identity, key=True) + key = self.domain_axis(*identity, key=True) return self.get_data_axes().index(key) def axes_names(self, *identities, **kwargs): @@ -17208,7 +17126,7 @@ def axes_names(self, *identities, **kwargs): return out - def axis_size(self, identity, default=ValueError(), axes=None, **kwargs): + def axis_size(self, *identity, default=ValueError(), axes=None, **kwargs): """Return the size of a domain axis construct. :Parameters: @@ -17304,7 +17222,7 @@ def axis_size(self, identity, default=ValueError(), axes=None, **kwargs): self, "axis_size", kwargs, "See f.domain_axes" ) # pragma: no cover - axis = self.domain_axis(identity, key=True) + axis = self.domain_axis(*identity, key=True) domain_axes = self.domain_axes(todict=True) @@ -18613,7 +18531,7 @@ def flatten(self, axes=None, return_axis=False, inplace=False): f.set_data_axes(new_data_axes) # Modify or remove cell methods that span the flatten axes - for key, cm in tuple(f.cell_methods(todict=True).items()): + for key, cm in f.cell_methods(todict=True).items(): cm_axes = set(cm.get_axes(())) if not cm_axes or cm_axes.isdisjoint(axes): continue @@ -18627,22 +18545,29 @@ def flatten(self, axes=None, return_axis=False, inplace=False): set_axes = True for i, a in enumerate(cm_axes): sn = None - for ctype in ( - "dimension_coordinate", - "auxiliary_coordinate", - ): - for c in ( - f.constructs.filter_by_type(ctype, view=True) - .filter_by_axis(a, mode="exact", view=True) - .values() - ): - sn = c.get_property("standard_name", None) - if sn is not None: - break - + for c in f.coordinates( + filter_by_axis=(a,), axis_mode="exact", todict=True + ).values(): + sn = c.get_property("standard_name", None) if sn is not None: break + # for ctype in ( + # "dimension_coordinate", + # "auxiliary_coordinate", + # ): + # for c in ( + # f.constructs.filter_by_type(ctype, view=True) + # .filter_by_axis(a, mode="exact", view=True) + # .values() + # ): + # sn = c.get_property("standard_name", None) + # if sn is not None: + # break + # + # if sn is not None: + # break + if sn is None: f.del_construct(key) set_axes = False @@ -18665,7 +18590,7 @@ def flatten(self, axes=None, return_axis=False, inplace=False): # Flatten the constructs that span all of the flattened axes, # and no others. for key, c in f.constructs.filter_by_axis( - *axes, mode="and", view=True + "and", *axes, todict=True ).items(): c_axes = f.get_data_axes(key) c_iaxes = sorted( @@ -18680,9 +18605,7 @@ def flatten(self, axes=None, return_axis=False, inplace=False): # Remove constructs that span some, but not all, of the # flattened axes - for key in tuple( - f.constructs.filter_by_axis(*axes, mode="or", view=True).keys() - ): + for key in f.constructs.filter_by_axis("or", *axes, todict=True): f.del_construct(key) # Remove the domain axis constructs for the flattened axes @@ -20900,287 +20823,107 @@ def derivative( # ---------------------------------------------------------------- # Aliases # ---------------------------------------------------------------- - def aux(self, identity, default=ValueError(), key=False, **kwargs): - """Alias for `cf.Field.auxiliary_coordinate`.""" - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, - "aux", - kwargs, - "Use methods of the 'auxiliary_coordinates' attribute instead.", - ) # pragma: no cover - - return self.auxiliary_coordinate(identity, key=key, default=default) - - def auxs(self, *identities, **kwargs): - """Alias for `cf.Field.auxiliary_coordinates()`.""" - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, - "auxs", - kwargs, - "Use methods of the 'auxiliary_coordinates' attribute " - "instead.", - ) # pragma: no cover - - for i in identities: - if isinstance(i, dict): - _DEPRECATION_ERROR_DICT() # pragma: no cover - elif isinstance(i, (list, tuple, set)): - _DEPRECATION_ERROR_SEQUENCE(i) # pragma: no cover - elif isinstance(i, str) and ":" in i: - error = True - if "=" in i: - index0 = i.index("=") - index1 = i.index(":") - error = index0 > index1 - - if error: - _DEPRECATION_ERROR( - "The identity format {!r} has been deprecated at " - "version 3.0.0. Try {!r} instead.".format( - i, i.replace(":", "=", 1) - ) - ) # pragma: no cover - - return self.auxiliary_coordinates()(*identities) - - def axis(self, identity, key=False, default=ValueError(), **kwargs): - """Alias of `cf.Field.domain_axis`.""" - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, - "axis", - kwargs, - "Use methods of the 'domain_axes' attribute instead.", - ) # pragma: no cover - - return self.domain_axis(identity, key=key, default=default) - - def coord(self, identity, default=ValueError(), key=False, **kwargs): - """Alias for `cf.Field.coordinate`.""" - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, - "coord", - kwargs, - "Use methods of the 'coordinates' attribute instead.", - ) # pragma: no cover - - if identity in self.domain_axes(todict=True): - # Allow an identity to be the domain axis construct key - # spanned by a dimension coordinate construct - return self.dimension_coordinate( - identity, key=key, default=default - ) - - return self.coordinate(identity, key=key, default=default) - - def coords(self, *identities, **kwargs): - """Alias for `cf.Field.coordinates()`.""" - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, - "coords", - kwargs, - "Use methods of the 'coordinates' attribute instead.", - ) # pragma: no cover - - for i in identities: - if isinstance(i, dict): - _DEPRECATION_ERROR_DICT() # pragma: no cover - elif isinstance(i, (list, tuple, set)): - _DEPRECATION_ERROR_SEQUENCE(i) # pragma: no cover - elif isinstance(i, str) and ":" in i: - error = True - if "=" in i: - index0 = i.index("=") - index1 = i.index(":") - error = index0 > index1 - - if error: - _DEPRECATION_ERROR( - "The identity format {!r} has been deprecated at " - "version 3.0.0. Try {!r} instead.".format( - i, i.replace(":", "=", 1) - ) - ) # pragma: no cover - - return self.coordinates(view=True).filter_by_identity( - *identities, **kwargs + def aux( + self, + *identity, + key=False, + default=ValueError(), + item=False, + **filter_kwargs, + ): + """Alias for `auxiliary_coordinate`.""" + return self.auxiliary_coordinate( + *identity, key=key, default=default, item=item, **filter_kwargs ) - def dim(self, identity, default=ValueError(), key=False, **kwargs): - """Alias for `cf.Field.dimension_coordinate`.""" - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, - "dim", - kwargs, - "Use methods of the 'dimension_coordinates' attribute " - "instead.", - ) # pragma: no cover - - return self.dimension_coordinate(identity, key=key, default=default) + def auxs(self, *identities, **filter_kwargs): + """Alias for `coordinates`.""" + return self.auxiliary_coordinates(*identities, **filter_kwargs) - def dims(self, *identities, **kwargs): - """Alias for `cf.Field.dimension_coordinates()`.""" - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, - "dims", - kwargs, - "Use methods of the 'dimension_coordinates' attribute " - "instead.", - ) # pragma: no cover + def axes(self, *identities, **filter_kwargs): + """Alias for `domain_axes`.""" + return self.domain_axes(*identities, **filter_kwargs) - for i in identities: - if isinstance(i, dict): - _DEPRECATION_ERROR_DICT() # pragma: no cover - elif isinstance(i, (list, tuple, set)): - _DEPRECATION_ERROR_SEQUENCE(i) # pragma: no cover - elif isinstance(i, str) and ":" in i: - error = True - if "=" in i: - index0 = i.index("=") - index1 = i.index(":") - error = index0 > index1 - - if error: - _DEPRECATION_ERROR( - "The identity format {!r} has been deprecated at " - "version 3.0.0. Try {!r} instead.".format( - i, i.replace(":", "=", 1) - ) - ) # pragma: no cover - - return self.dimension_coordinates(view=True).filter_by_identity( - *identities, **kwargs + def axis( + self, + *identity, + key=False, + default=ValueError(), + item=False, + **filter_kwargs, + ): + """Alias for `domain_axis`.""" + return self.domain_axis( + *identity, key=key, default=default, item=item, **filter_kwargs ) - def domain_anc(self, identity, default=ValueError(), key=False, **kwargs): - """Alias for `cf.Field.domain_ancillary`.""" - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, - "domain_anc", - kwargs, - "Use methods of the 'domain_ancillaries' attribute " - "instead.", - ) # pragma: no cover - - return self.domain_ancillary(identity, key=key, default=default) - - def domain_ancs(self, *identities, **kwargs): - """Alias for `cf.Field.domain_ancillaries()`.""" - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, - "domain_ancs", - kwargs, - "Use methods of the 'domain_ancillaries' attribute " - "instead.", - ) # pragma: no cover - - for i in identities: - if isinstance(i, dict): - _DEPRECATION_ERROR_DICT() # pragma: no cover - elif isinstance(i, (list, tuple, set)): - _DEPRECATION_ERROR_SEQUENCE(i) # pragma: no cover - elif isinstance(i, str) and ":" in i: - error = True - if "=" in i: - index0 = i.index("=") - index1 = i.index(":") - error = index0 > index1 - - if error: - _DEPRECATION_ERROR( - "The identity format {!r} has been deprecated at " - "version 3.0.0. Try {!r} instead.".format( - i, i.replace(":", "=", 1) - ) - ) # pragma: no cover - - return self.domain_ancillaries(view=True).filter_by_identity( - *identities, **kwargs + def coord( + self, + *identity, + key=False, + default=ValueError(), + item=False, + **filter_kwargs, + ): + """Alias for `coordinate`.""" + return self.coordinate( + *identity, key=key, default=default, item=item, **filter_kwargs ) - def field_anc(self, identity, default=ValueError(), key=False, **kwargs): - """Alias for `cf.Field.field_ancillary`.""" - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, - "field_anc", - kwargs, - "Use methods of the 'field_ancillaries' attribute " "instead.", - ) # pragma: no cover - - return self.field_ancillary(identity, key=key, default=default) - - def field_ancs(self, *identities, **kwargs): - """Alias for `cf.Field.field_ancillaries()`.""" - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, - "field_ancs", - kwargs, - "Use methods of the 'field_ancillaries' attribute " "instead.", - ) # pragma: no cover + def coords(self, *identities, **filter_kwargs): + """Alias for `coordinates`.""" + return self.coordinates(*identities, **filter_kwargs) - for i in identities: - if isinstance(i, dict): - _DEPRECATION_ERROR_DICT() # pragma: no cover - elif isinstance(i, (list, tuple, set)): - _DEPRECATION_ERROR_SEQUENCE(i) # pragma: no cover - elif isinstance(i, str) and ":" in i: - error = True - if "=" in i: - index0 = i.index("=") - index1 = i.index(":") - error = index0 > index1 - - if error: - _DEPRECATION_ERROR( - "The identity format {!r} has been deprecated at " - "version 3.0.0. Try {!r} instead.".format( - i, i.replace(":", "=", 1) - ) - ) # pragma: no cover - - return self.field_ancillaries(view=True).filter_by_identity( - *identities, **kwargs + def dim( + self, + *identity, + key=False, + default=ValueError(), + item=False, + **filter_kwargs, + ): + """Alias for `dimension_coordinate`.""" + return self.dimension_coordinate( + *identity, key=key, default=default, item=item, **filter_kwargs ) - def item(self, identity, key=False, default=ValueError(), **kwargs): - """Alias for `cf.Field.construct`.""" - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, - "item", - kwargs, - "Use methods of the 'constructs' attribute instead.", - ) # pragma: no cover + def dims(self, *identities, **filter_kwargs): + """Alias for `dimension_coordinates`.""" + return self.dimension_coordinates(*identities, **filter_kwargs) - return self.construct(identity, key=key, default=default) + def domain_anc( + self, + *identity, + key=False, + default=ValueError(), + item=False, + **filter_kwargs, + ): + """Alias for `domain_ancillary`.""" + return self.domain_ancillary( + *identity, key=key, default=default, item=item, **filter_kwargs + ) - def items(self, *identities, **kwargs): - """Alias for `c.Field.constructs.filter_by_data`.""" - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, - "items", - kwargs, - "Use methods of the 'constructs' attribute instead.", - ) # pragma: no cover + def domain_ancs(self, *identities, **filter_kwargs): + """Alias for `domain_ancillaries`.""" + return self.domain_ancillaries(*identities, **filter_kwargs) - for i in identities: - if isinstance(i, dict): - _DEPRECATION_ERROR_DICT() # pragma: no cover - elif isinstance(i, (list, tuple, set)): - _DEPRECATION_ERROR_SEQUENCE(i) # pragma: no cover + def field_anc( + self, + *identity, + key=False, + default=ValueError(), + item=False, + **filter_kwargs, + ): + """Alias for `cf.Field.field_ancillary`.""" + return self.field_ancillary( + *identity, key=key, default=default, item=item, **filter_kwargs + ) - return self.constructs.filter_by_data().filter_by_identity(*identities) + def field_ancs(self, *identities, **filter_kwargs): + """Alias for `field_ancillaries`.""" + return self.field_ancillaries(*identities, **filter_kwargs) def key(self, identity, default=ValueError(), **kwargs): """Alias for `cf.Field.construct_key`.""" @@ -21194,84 +20937,47 @@ def key(self, identity, default=ValueError(), **kwargs): return self.construct_key(identity, default=default) - def measure(self, identity, default=ValueError(), key=False, **kwargs): - """Alias for `cf.Field.cell_measure`.""" - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, - "measure", - kwargs, - "Use methods of the 'cell_measures' attribute instead", - ) # pragma: no cover - - return self.cell_measure(identity, key=key, default=default) - - def measures(self, *identities, **kwargs): - """Alias for `cf.Field.cell_measures()`.""" - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, - "measures", - kwargs, - "Use methods of the 'cell_measures' attribute instead", - ) # pragma: no cover - - for i in identities: - if isinstance(i, dict): - _DEPRECATION_ERROR_DICT() # pragma: no cover - elif isinstance(i, (list, tuple, set)): - _DEPRECATION_ERROR_SEQUENCE(i) # pragma: no cover - elif isinstance(i, str) and ":" in i: - error = True - if "=" in i: - index0 = i.index("=") - index1 = i.index(":") - error = index0 > index1 - - if error and i.startswith("measure:"): - error = False - - if error: - _DEPRECATION_ERROR( - "The identity format {!r} has been deprecated at " - "version 3.0.0. Try {!r} instead.".format( - i, i.replace(":", "=", 1) - ) - ) # pragma: no cover - - return self.cell_measures()(*identities) - - def ref(self, identity, default=ValueError(), key=False, **kwargs): - """Alias for `cf.Field.coordinate_reference`.""" - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, - "ref", - kwargs, - "Use methods of the 'coordinate_references' attribute " - "instead.", - ) # pragma: no cover - - return self.coordinate_reference(identity, key=key, default=default) + def measure( + self, + *identity, + key=False, + default=ValueError(), + item=False, + **filter_kwargs, + ): + """Alias for `cell_measure`.""" + return self.cell_measure( + *identity, + key=key, + default=default, + item=item, + **filter_kwargs, + ) - def refs(self, *identities, **kwargs): - """Alias for `cf.Field.coordinate_references()`.""" - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, - "refs", - kwargs, - "Use methods of the 'coordinate_references' attribute " - "instead.", - ) # pragma: no cover + def measures(self, *identities, **filter_kwargs): + """Alias for `cell_measures`.""" + return self.cell_measures(*identities, **filter_kwargs) - for i in identities: - if isinstance(i, dict): - _DEPRECATION_ERROR_DICT() # pragma: no cover - elif isinstance(i, (list, tuple, set)): - _DEPRECATION_ERROR_SEQUENCE(i) # pragma: no cover + def ref( + self, + *identity, + default=ValueError(), + key=False, + item=False, + **filter_kwargs, + ): + """Alias for `coordinate_reference`.""" + return self.coordinate_reference( + *identity, + key=key, + default=default, + item=item, + **filter_kwargs, + ) - return self.coordinate_references()(*identities) + def refs(self, *identities, **filter_kwargs): + """Alias for `coordinate_references`.""" + return self.coordinate_references(*identities, **filter_kwargs) # ---------------------------------------------------------------- # Deprecated attributes and methods @@ -21289,8 +20995,8 @@ def _Axes(self): def CellMethods(self): """""" raise DeprecationError( - f"{self.__class__.__name__} attribute 'CellMethods' has been deprecated " - "at version 3.0.0 and is no longer available" + f"{self.__class__.__name__} attribute 'CellMethods' has been " + "deprecated at version 3.0.0 and is no longer available" "Use 'cell_methods' instead." ) @@ -21519,6 +21225,25 @@ def insert_ref(self, item, key=None, axes=None, copy=True, replace=True): "instead.", ) # pragma: no cover + def item( + self, + *identity, + key=False, + default=ValueError(), + item=False, + **filter_kwargs, + ): + """""" + _DEPRECATION_ERROR_METHOD( + self, "item", "Use 'construct' method instead." + ) # pragma: no cover + + def items(self, *identities, **filter_kwargs): + """""" + _DEPRECATION_ERROR_METHOD( + self, "items", "Use 'constructs' method instead." + ) # pragma: no cover + def item_axes( self, description=None, diff --git a/cf/test/create_test_files.py b/cf/test/create_test_files.py index 6f40d454e0..a6898168e6 100644 --- a/cf/test/create_test_files.py +++ b/cf/test/create_test_files.py @@ -1528,6 +1528,6 @@ def _make_broken_bounds_cdl(filename): if __name__ == "__main__": print("Run date:", datetime.datetime.now()) - print(cf.environment()) + cf.environment() print() unittest.main(verbosity=2) diff --git a/cf/test/setup_create_field.py b/cf/test/setup_create_field.py index 687624c806..ec484646c9 100644 --- a/cf/test/setup_create_field.py +++ b/cf/test/setup_create_field.py @@ -205,9 +205,6 @@ def test_create_field(self): x = f.dump(display=False) -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_AuxiliaryCoordinate.py b/cf/test/test_AuxiliaryCoordinate.py index 40f7431b32..c779a4c2da 100644 --- a/cf/test/test_AuxiliaryCoordinate.py +++ b/cf/test/test_AuxiliaryCoordinate.py @@ -1,6 +1,5 @@ import datetime import faulthandler -import os import unittest import numpy @@ -11,10 +10,7 @@ class AuxiliaryCoordinateTest(unittest.TestCase): - filename = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "test_file.nc" - ) - # f = cf.read(filename)[0] + f = cf.example_field(1) aux1 = cf.AuxiliaryCoordinate() aux1.standard_name = "latitude" @@ -46,37 +42,32 @@ class AuxiliaryCoordinateTest(unittest.TestCase): def test_AuxiliaryCoordinate_mask_invalid(self): a = self.aux1.copy() - _ = a.mask_invalid() + a.mask_invalid() self.assertIsNone(a.mask_invalid(inplace=True)) a.del_bounds() - _ = a.mask_invalid() + a.mask_invalid() self.assertIsNone(a.mask_invalid(inplace=True)) def test_AuxiliaryCoordinate_chunk(self): a = self.aux1.copy() - a.chunk() def test_AuxiliaryCoordinate__repr__str__dump(self): - f = cf.read(self.filename)[0] - x = f.auxiliary_coordinate("latitude") - - _ = repr(x) - _ = str(x) - _ = x.dump(display=False) + x = self.f.auxiliary_coordinate("latitude") + repr(x) + str(x) + x.dump(display=False) def test_AuxiliaryCoordinate_bounds(self): - f = cf.read(self.filename)[0] - d = f.dimension_coordinate("X") + d = self.f.dimension_coordinate("X") x = cf.AuxiliaryCoordinate(source=d) - _ = x.upper_bounds - _ = x.lower_bounds + x.upper_bounds + x.lower_bounds def test_AuxiliaryCoordinate_properties(self): - f = cf.read(self.filename)[0] - x = f.auxiliary_coordinate("latitude") + x = self.f.auxiliary_coordinate("latitude") x.positive = "up" self.assertEqual(x.positive, "up") @@ -88,12 +79,11 @@ def test_AuxiliaryCoordinate_properties(self): del x.axis self.assertIsNone(getattr(x, "axis", None)) - d = f.dimension_coordinate("X") + d = self.f.dimension_coordinate("X") x = cf.AuxiliaryCoordinate(source=d) def test_AuxiliaryCoordinate_insert_dimension(self): - f = cf.read(self.filename)[0] - d = f.dimension_coordinate("X") + d = self.f.dimension_coordinate("X") x = cf.AuxiliaryCoordinate(source=d) self.assertEqual(x.shape, (9,)) @@ -108,8 +98,7 @@ def test_AuxiliaryCoordinate_insert_dimension(self): self.assertEqual(x.bounds.shape, (9, 1, 2), x.bounds.shape) def test_AuxiliaryCoordinate_transpose(self): - f = cf.read(self.filename)[0] - x = f.auxiliary_coordinate("longitude") + x = self.f.auxiliary_coordinate("longitude").copy() bounds = cf.Bounds( data=cf.Data(numpy.arange(9 * 10 * 4).reshape(9, 10, 4)) @@ -128,8 +117,7 @@ def test_AuxiliaryCoordinate_transpose(self): self.assertEqual(x.bounds.shape, (10, 9, 4), x.bounds.shape) def test_AuxiliaryCoordinate_squeeze(self): - f = cf.read(self.filename)[0] - x = f.auxiliary_coordinate("longitude") + x = self.f.auxiliary_coordinate("longitude").copy() bounds = cf.Bounds( data=cf.Data(numpy.arange(9 * 10 * 4).reshape(9, 10, 4)) @@ -234,34 +222,33 @@ def test_AuxiliaryCoordinate_rint(self): def test_AuxiliaryCoordinate_close(self): aux = self.aux1.copy() - aux.close() def test_AuxiliaryCoordinate_sin_cos_tan(self): aux = self.aux1.copy() - _ = aux.cos() + aux.cos() self.assertIsNone(aux.cos(inplace=True)) - _ = aux.sin() + aux.sin() self.assertIsNone(aux.sin(inplace=True)) - _ = aux.tan() + aux.tan() self.assertIsNone(aux.tan(inplace=True)) def test_AuxiliaryCoordinate_log_exp(self): aux = self.aux1.copy() - _ = aux.exp() + aux.exp() self.assertIsNone(aux.exp(inplace=True)) - _ = aux.log() + aux.log() self.assertIsNone(aux.log(inplace=True)) def test_AuxiliaryCoordinate_count(self): aux = self.aux1.copy() - _ = aux.count() + aux.count() aux.del_data() with self.assertRaises(Exception): @@ -277,7 +264,7 @@ def test_AuxiliaryCoordinate_cyclic(self): def test_AuxiliaryCoordinate_roll(self): aux = self.aux1.copy() - _ = aux.roll(0, 3) + aux.roll(0, 3) self.assertIsNone(aux.roll(-1, 4, inplace=True)) def test_AuxiliaryCoordinate_round(self): diff --git a/cf/test/test_CellMeasure.py b/cf/test/test_CellMeasure.py index 4fd77acd6a..95261aee42 100644 --- a/cf/test/test_CellMeasure.py +++ b/cf/test/test_CellMeasure.py @@ -1,6 +1,5 @@ import datetime import faulthandler -import os import unittest faulthandler.enable() # to debug seg faults and timeouts @@ -9,22 +8,17 @@ class CellMeasureTest(unittest.TestCase): - filename = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "test_file.nc" - ) - # f = cf.read(filename)[0] + f = cf.example_field(1) def test_CellMeasure__repr__str__dump(self): - f = cf.read(self.filename)[0] - x = f.cell_measures(view=True)("measure:area").value() + x = self.f.cell_measure("measure:area") - _ = repr(x) - _ = str(x) - _ = x.dump(display=False) + repr(x) + str(x) + x.dump(display=False) def test_CellMeasure_measure(self): - f = cf.read(self.filename)[0] - x = f.cell_measures(view=True)("measure:area").value() + x = self.f.cell_measure("measure:area").copy() self.assertEqual(x.measure, "area") del x.measure @@ -33,8 +27,7 @@ def test_CellMeasure_measure(self): self.assertEqual(x.measure, "qwerty") def test_CellMeasure_identity(self): - f = cf.read(self.filename)[0] - x = f.cell_measures(view=True)("measure:area").value() + x = self.f.cell_measure("measure:area").copy() self.assertEqual(x.identity(), "measure:area") del x.measure diff --git a/cf/test/test_CellMethod.py b/cf/test/test_CellMethod.py index 913b2c848e..87c4e37f7d 100644 --- a/cf/test/test_CellMethod.py +++ b/cf/test/test_CellMethod.py @@ -1,6 +1,5 @@ import datetime import faulthandler -import inspect import unittest faulthandler.enable() # to debug seg faults and timeouts @@ -52,36 +51,21 @@ class CellMethodTest(unittest.TestCase): "time: sum within days time: maximum over days", ) - test_only = [] - # test_only = ['test_CellMethod___str__'] - # test_only = ['test_CellMethod_equals'] - # test_only = ['test_CellMethod_equivalent'] - # test_only = ['test_CellMethod_get_set_delete'] - def test_CellMethod__repr__str__(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - for s in self.strings: cms = cf.CellMethod.create(s) t = " ".join(map(str, cms)) self.assertEqual(t, s, "{!r} != {!r}".format(t, s)) for cm in cms: - _ = repr(cm) + repr(cm) def test_CellMethod_equals(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - for s in self.strings: cms = cf.CellMethod.create(s) for cm in cms: self.assertTrue(cm.equals(cm.copy(), verbose=2)) def test_CellMethod_equivalent(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - for s in self.strings: cms = cf.CellMethod.create(s) for cm in cms: @@ -140,9 +124,6 @@ def test_CellMethod_equivalent(self): ) def test_CellMethod_get_set_delete(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - cm0, cm1 = cf.CellMethod.create( "time: minimum within days time: sum over years" ) diff --git a/cf/test/test_CoordinateReference.py b/cf/test/test_CoordinateReference.py index e56745da84..5cfbc19626 100644 --- a/cf/test/test_CoordinateReference.py +++ b/cf/test/test_CoordinateReference.py @@ -31,9 +31,7 @@ def _remove_tmpfiles(): class CoordinateReferenceTest(unittest.TestCase): - filename = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "test_file.nc" - ) + f = cf.example_field(1) datum = cf.Datum(parameters={"earth_radius": 6371007}) @@ -66,9 +64,6 @@ class CoordinateReferenceTest(unittest.TestCase): coordinates=["x", "y", "lat", "lon"], ) - def setUp(self): - self.f = cf.read(self.filename)[0] - def test_CoordinateReference__repr__str__dump(self): coordinate_conversion = cf.CoordinateConversion( parameters={ @@ -86,17 +81,17 @@ def test_CoordinateReference__repr__str__dump(self): datum=datum, ) - _ = repr(t) - _ = str(t) - _ = t.dump(display=False) + repr(t) + str(t) + t.dump(display=False) self.assertFalse(t.has_bounds()) - _ = repr(datum) - _ = str(datum) + repr(datum) + str(datum) - _ = repr(coordinate_conversion) - _ = str(coordinate_conversion) + repr(coordinate_conversion) + str(coordinate_conversion) def test_CoordinateReference_equals(self): # Create a vertical grid mapping coordinate reference @@ -242,7 +237,7 @@ def test_CoordinateReference_get__getitem__(self): self.vconversion.get_parameter("standard_name"), ) with self.assertRaises(Exception): - _ = self.vcr["orog"] + self.vcr["orog"] self.assertEqual( self.hcr["earth_radius"], self.datum.get_parameter("earth_radius") @@ -266,7 +261,7 @@ def test_CoordinateReference_get__getitem__(self): self.assertIsNone(self.hcr.get("qwerty")) self.assertEqual(self.hcr.get("qwerty", 12), 12) with self.assertRaises(Exception): - _ = self.hcr["qwerty"] + self.hcr["qwerty"] if __name__ == "__main__": diff --git a/cf/test/test_Count.py b/cf/test/test_Count.py index 78e784b2f9..1f61976962 100644 --- a/cf/test/test_Count.py +++ b/cf/test/test_Count.py @@ -8,6 +8,8 @@ class CountTest(unittest.TestCase): + contiguous = "DSG_timeSeries_contiguous.nc" + def setUp(self): # Disable log messages to silence expected warnings cf.log_level("DISABLE") @@ -20,8 +22,6 @@ def setUp(self): # < ... test code ... > # cf.log_level('DISABLE') - self.contiguous = "DSG_timeSeries_contiguous.nc" - def test_Count__repr__str__dump(self): f = cf.read(self.contiguous)[0] diff --git a/cf/test/test_Datetime.py b/cf/test/test_Datetime.py index 9d716131c9..37f93ada0d 100644 --- a/cf/test/test_Datetime.py +++ b/cf/test/test_Datetime.py @@ -59,7 +59,7 @@ def test_Datetime_rt2dt(self): np.ma.array([3]), ): b = cf.cfdatetime.rt2dt(a, Units("days since 1970-01-01")) - self.assertTrue(b == cf.dt(1970, 1, 4, calendar="gregorian")) + self.assertEqual(b, cf.dt(1970, 1, 4, calendar="gregorian")) for a in ( np.ma.array(3, mask=True), diff --git a/cf/test/test_DimensionCoordinate.py b/cf/test/test_DimensionCoordinate.py index e6a41d1fa0..f82cdbf897 100644 --- a/cf/test/test_DimensionCoordinate.py +++ b/cf/test/test_DimensionCoordinate.py @@ -1,6 +1,5 @@ import datetime import faulthandler -import os import unittest import numpy @@ -11,11 +10,7 @@ class DimensionCoordinateTest(unittest.TestCase): - filename = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "test_file.nc" - ) - - # f = cf.read(filename)[0] + f = cf.example_field(1) dim = cf.DimensionCoordinate() dim.standard_name = "latitude" @@ -45,12 +40,11 @@ class DimensionCoordinateTest(unittest.TestCase): dim.set_bounds(bounds) def test_DimensionCoordinate__repr__str__dump(self): - f = cf.read(self.filename)[0] - x = f.dimension_coordinate("X") + x = self.f.dimension_coordinate("X") - _ = repr(x) - _ = str(x) - _ = x.dump(display=False) + repr(x) + str(x) + x.dump(display=False) def test_DimensionCoordinate_convert_reference_time(self): d = cf.DimensionCoordinate() @@ -134,33 +128,31 @@ def test_DimensionCoordinate_convert_reference_time(self): self.assertTrue((d.array == [1.0, 2, 3]).all()) def test_DimensionCoordinate_roll(self): - f = cf.read(self.filename)[0] - - x = f.dimension_coordinate("X") - y = f.dimension_coordinate("Y") + x = self.f.dimension_coordinate("X").copy() + y = self.f.dimension_coordinate("Y") - _ = x.roll(0, 3) + x.roll(0, 3) with self.assertRaises(Exception): y.roll(0, 3) - _ = x.roll(0, 3) - _ = x.roll(-1, 3) + x.roll(0, 3) + x.roll(-1, 3) with self.assertRaises(Exception): - _ = x.roll(2, 3) + x.roll(2, 3) a = x[0] - _ = a.roll(0, 3) + a.roll(0, 3) self.assertIsNone(a.roll(0, 3, inplace=True)) - _ = x.roll(0, 0) - _ = x.roll(0, 3, inplace=True) + x.roll(0, 0) + x.roll(0, 3, inplace=True) self.assertIsNone(x.roll(0, 0, inplace=True)) - _ = x._centre(360) - _ = x.flip()._centre(360) + x._centre(360) + x.flip()._centre(360) # Test roll on coordinate without bounds: - g = f.copy() + g = self.f.copy() g.dimension_coordinate("X").del_bounds() for shift_by in [1, -1, g.shape[2]]: # vary roll direction and extent @@ -247,11 +239,10 @@ def test_DimensionCoordinate_override_calendar(self): ) def test_DimensionCoordinate_bounds(self): - f = cf.read(self.filename)[0] - x = f.dimension_coordinate("X") + x = self.f.dimension_coordinate("X") - _ = x.upper_bounds - _ = x.lower_bounds + x.upper_bounds + x.lower_bounds self.assertTrue(x.increasing) @@ -268,8 +259,7 @@ def test_DimensionCoordinate_bounds(self): y.create_bounds() def test_DimensionCoordinate_properties(self): - f = cf.read(self.filename)[0] - x = f.dimension_coordinate("X") + x = self.f.dimension_coordinate("X").copy() x.positive = "up" self.assertEqual(x.positive, "up") @@ -283,8 +273,7 @@ def test_DimensionCoordinate_properties(self): self.assertEqual(x.ndim, 1) def test_DimensionCoordinate_insert_dimension(self): - f = cf.read(self.filename)[0] - x = f.dimension_coordinate("X") + x = self.f.dimension_coordinate("X").copy() self.assertEqual(x.shape, (9,)) self.assertEqual(x.bounds.shape, (9, 2)) diff --git a/cf/test/test_Field.py b/cf/test/test_Field.py index 3089c10158..55f06c0d14 100644 --- a/cf/test/test_Field.py +++ b/cf/test/test_Field.py @@ -1,7 +1,6 @@ import atexit import datetime import faulthandler -import inspect import itertools import os import re @@ -84,31 +83,25 @@ def setUp(self): self.f = cf.read(self.filename)[0] def test_Field_creation_commands(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - for i in range(7): f = cf.example_field(i) - _ = f.creation_commands() + f.creation_commands() f = cf.example_field(1) for rd in (False, True): - _ = f.creation_commands(representative_data=rd) + f.creation_commands(representative_data=rd) for indent in (0, 4): - _ = f.creation_commands(indent=indent) + f.creation_commands(indent=indent) for s in (False, True): - _ = f.creation_commands(string=s) + f.creation_commands(string=s) for ns in ("cf", ""): - _ = f.creation_commands(namespace=ns) + f.creation_commands(namespace=ns) def test_Field_get_filenames(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.example_field(0) cf.write(f, tmpfile) @@ -138,9 +131,6 @@ def test_Field_get_filenames(self): self.assertEqual(g.get_filenames(), set(), g.get_filenames()) def test_Field_halo(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.example_field(7) g = f.copy() @@ -159,9 +149,6 @@ def test_Field_halo(self): ) def test_Field_has_construct(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.example_field(1) self.assertTrue(f.has_construct("T")) @@ -173,9 +160,6 @@ def test_Field_has_construct(self): self.assertFalse(f.has_construct("height")) def test_Field_compress_uncompress(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - methods = ("contiguous", "indexed", "indexed_contiguous") for method in methods: @@ -221,9 +205,6 @@ def test_Field_compress_uncompress(self): self.assertTrue(f.equals(c, verbose=2), message) def test_Field_apply_masking(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.example_field(0) for prop in ( @@ -276,9 +257,6 @@ def test_Field_apply_masking(self): self.assertTrue(e.equals(g.data, verbose=1)) def test_Field_flatten(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() axis = f.set_construct(cf.DomainAxis(1)) @@ -322,9 +300,6 @@ def test_Field_flatten(self): self.assertIsNone(f.flatten(inplace=True)) def test_Field_bin(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() d = f.digitize(10) @@ -357,9 +332,6 @@ def test_Field_bin(self): self.assertTrue((a == b.array).all()) def test_Field_direction(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() yaxis = f.domain_axis("Y", key=True) ydim = f.dimension_coordinate("Y", key=True) @@ -373,9 +345,6 @@ def test_Field_direction(self): f.directions() def test_Field_domain_axis_position(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f for i in range(f.ndim): @@ -392,9 +361,6 @@ def test_Field_domain_axis_position(self): self.assertEqual(f.domain_axis_position("grid_latitude"), 1) def test_Field_weights(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() f += 1 @@ -465,9 +431,6 @@ def test_Field_weights(self): f.weights(components=True, data=True) def test_Field_replace_construct(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() for x in ( @@ -486,9 +449,6 @@ def test_Field_replace_construct(self): f.replace_construct("grid_longitude", f.construct("grid_latitude")) def test_Field_allclose(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() g = f.copy() @@ -503,9 +463,6 @@ def test_Field_allclose(self): self.assertFalse(f.allclose(g.array)) def test_Field_collapse(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() f[0, 3] *= -1 f[0, 5, ::2] = cf.masked @@ -609,9 +566,6 @@ def test_Field_collapse(self): ) def test_Field_all(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() self.assertFalse(f.all()) @@ -623,9 +577,6 @@ def test_Field_all(self): self.assertFalse(f.all()) def test_Field_any(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() self.assertTrue(f.any()) @@ -634,9 +585,6 @@ def test_Field_any(self): self.assertFalse(f.any()) def test_Field_atol_rtol(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f g = f.copy() @@ -664,9 +612,6 @@ def test_Field_atol_rtol(self): cf.rtol(self.rtol) def test_Field_concatenate(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() g = cf.Field.concatenate([f.copy()], axis=0) @@ -686,9 +631,6 @@ def test_Field_concatenate(self): g = cf.Field.concatenate([], axis=0) def test_Field_AUXILIARY_MASK(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - ac = numpy.ma.masked_all((3, 7)) ac[0, 0:5] = [1.0, 2.0, 3.0, -99, 5.0] ac[0, 3] = numpy.ma.masked @@ -848,9 +790,6 @@ def test_Field_AUXILIARY_MASK(self): cf.chunksize(self.original_chunksize) def test_Field__getitem__(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy().squeeze() d = f.data f = self.f.copy().squeeze() @@ -883,15 +822,12 @@ def test_Field__getitem__(self): g = f[[7, 4, 1], slice(6, 8)] g = f.squeeze() - _ = g[0:3, 5] + g[0:3, 5] g = f[0].squeeze() - _ = g[5] + g[5] def test_Field__setitem__(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy().squeeze() f[...] = 0 @@ -933,9 +869,6 @@ def test_Field__setitem__(self): f[..., 0:2] = g def test_Field__add__(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() g = f * 0 @@ -965,12 +898,9 @@ def test_Field__add__(self): self.assertTrue(b.equals(a, verbose=2)) with self.assertRaises(Exception): - _ = f + ("a string",) + f + ("a string",) def test_Field__mul__(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy().squeeze() f.standard_name = "qwerty" @@ -979,9 +909,6 @@ def test_Field__mul__(self): self.assertIsNone(g.get_property("standard_name", None)) def test_Field__gt__(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy().squeeze() f.standard_name = "qwerty" @@ -991,18 +918,12 @@ def test_Field__gt__(self): self.assertIsNone(g.get_property("standard_name", None)) def test_Field_domain_mask(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() - _ = f.domain_mask() - _ = f.domain_mask(grid_longitude=cf.wi(25, 31)) + f.domain_mask() + f.domain_mask(grid_longitude=cf.wi(25, 31)) def test_Field_cumsum(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() g = f.copy() @@ -1042,9 +963,6 @@ def test_Field_cumsum(self): ) def test_Field_flip(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() g = f[(slice(None, None, -1),) * f.ndim] @@ -1068,23 +986,7 @@ def test_Field_flip(self): self.assertIsNone(f.flip("X", inplace=True)) self.assertTrue(f.equals(g, verbose=1)) - """ - def test_Field_close(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - - f = self.f.copy() - self.assertIsNone(f.close()) - - _ = repr(f.data) - for c in f.constructs.filter_by_data().values(): - _ = repr(c.data) - """ - def test_Field_anchor(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - dimarray = self.f.dimension_coordinate("grid_longitude").array f = self.f.copy() @@ -1133,15 +1035,12 @@ def test_Field_anchor(self): ) def test_Field_cell_area(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() ca = f.cell_area() self.assertEqual(ca.ndim, 2) - self.assertEqual(len(ca.dimension_coordinate()), 2) + self.assertEqual(len(ca.dimension_coordinates()), 2) self.assertEqual(len(ca.domain_ancillaries()), 0) self.assertEqual(len(ca.coordinate_references()), 1) @@ -1162,13 +1061,10 @@ def test_Field_cell_area(self): self.assertTrue(y.has_bounds()) def test_Field_radius(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() with self.assertRaises(Exception): - _ = f.radius() + f.radius() for default in ("earth", cf.field._earth_radius): r = f.radius(default=default) @@ -1188,22 +1084,21 @@ def test_Field_radius(self): self.assertEqual(r.Units, cf.Units("m")) self.assertEqual(r, a) - with self.assertRaises(Exception): - _ = f.radius() + with self.assertRaises(ValueError): + f.radius() - with self.assertRaises(Exception): - _ = f.radius(default=[12, 34]) + with self.assertRaises(ValueError): + f.radius(default=[12, 34]) - with self.assertRaises(Exception): - _ = f.radius(default=[[12, 34]]) + with self.assertRaises(ValueError): + f.radius(default=[[12, 34]]) - with self.assertRaises(Exception): - _ = f.radius(default="qwerty") + with self.assertRaises(ValueError): + f.radius(default="qwerty") cr = f.coordinate_reference( "grid_mapping_name:rotated_latitude_longitude" ) - cr = f.coordinate_reference("rotated_latitude_longitude") cr.datum.set_parameter("earth_radius", a.copy()) r = f.radius(default=None) @@ -1213,60 +1108,62 @@ def test_Field_radius(self): cr = f.coordinate_reference( "standard_name:atmosphere_hybrid_height_coordinate" ) - cr = f.coordinate_reference("atmosphere_hybrid_height_coordinate") cr.datum.set_parameter("earth_radius", a.copy()) r = f.radius(default=None) self.assertEqual(r.Units, cf.Units("m")) self.assertEqual(r, a) - cr = f.coordinate_reference("atmosphere_hybrid_height_coordinate") + cr = f.coordinate_reference( + "standard_name:atmosphere_hybrid_height_coordinate" + ) cr.datum.set_parameter("earth_radius", cf.Data(5678, "km")) - with self.assertRaises(Exception): - _ = f.radius(default=None) + with self.assertRaises(ValueError): + f.radius(default=None) - cr = f.coordinate_reference("atmosphere_hybrid_height_coordinate") + cr = f.coordinate_reference( + "standard_name:atmosphere_hybrid_height_coordinate" + ) cr.datum.del_parameter("earth_radius") - cr = f.coordinate_reference("rotated_latitude_longitude") + cr = f.coordinate_reference( + "grid_mapping_name:rotated_latitude_longitude" + ) cr.datum.set_parameter("earth_radius", cf.Data([123, 456], "m")) - with self.assertRaises(Exception): - _ = f.radius(default=None) + with self.assertRaises(ValueError): + f.radius(default=None) def test_Field_set_get_del_has_data(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() - _ = f.rank - _ = f.data + f.rank + f.data del f.data f = self.f.copy() self.assertTrue(f.has_data()) data = f.get_data() - _ = f.del_data() - _ = f.get_data(default=None) - _ = f.del_data(default=None) + f.del_data() + f.get_data(default=None) + f.del_data(default=None) self.assertFalse(f.has_data()) - _ = f.set_data(data, axes=None) - _ = f.set_data(data, axes=None, copy=False) + f.set_data(data, axes=None) + f.set_data(data, axes=None, copy=False) self.assertTrue(f.has_data()) f = self.f.copy() - _ = f.del_data_axes() + f.del_data_axes() self.assertFalse(f.has_data_axes()) self.assertIsNone(f.del_data_axes(default=None)) f = self.f.copy() for key in f.constructs.filter_by_data(): self.assertTrue(f.has_data_axes(key)) - _ = f.get_data_axes(key) - _ = f.del_data_axes(key) + f.get_data_axes(key) + f.del_data_axes(key) self.assertIsNone(f.del_data_axes(key, default=None)) self.assertIsNone(f.get_data_axes(key, default=None)) self.assertFalse(f.has_data_axes(key)) @@ -1324,9 +1221,6 @@ def test_Field_set_get_del_has_data(self): g.set_data(cf.Data(numpy.arange(90).reshape(10, 9))) def test_Field_get_data_axes(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f self.assertEqual( f.get_data_axes(), @@ -1345,9 +1239,6 @@ def test_Field_get_data_axes(self): self.assertIsNone(f.get_data_axes(default=None)) def test_Field_equals(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() g = f.copy() self.assertTrue(f.equals(f, verbose=2)) @@ -1358,9 +1249,6 @@ def test_Field_equals(self): self.assertFalse(f.equals(g + 1)) def test_Field_insert_dimension(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() f.squeeze("Z", inplace=True) self.assertEqual(f.ndim, 2) @@ -1375,9 +1263,6 @@ def test_Field_insert_dimension(self): f.insert_dimension(1, "qwerty") def test_Field_indices(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() array = numpy.ma.array(f.array) @@ -1668,9 +1553,6 @@ def test_Field_indices(self): f.indices(grid_latitude=cf.contains(-23.2)) def test_Field_match(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() f.long_name = "qwerty" f.nc_set_variable("tas") @@ -1788,9 +1670,6 @@ def test_Field_match(self): ) def test_Field_autocyclic(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() self.assertFalse(f.autocyclic()) @@ -1798,47 +1677,9 @@ def test_Field_autocyclic(self): f.autocyclic() def test_Field_construct_key(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - - f = self.f.copy() - x = f.construct("grid_longitude") - i = f.item("grid_longitude") - self.assertTrue(x.equals(i, verbose=2)) - - x = f.construct_key("grid_longitude") - i = f.item("grid_longitude", key=True) - self.assertTrue(x == i) - - def test_Field_item(self): - # v2 compatibility - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - - f = self.f.copy() - x = f.construct("grid_longitude") - i = f.item("grid_longitude") - self.assertTrue(x.equals(i, verbose=2)) - - x = f.construct_key("grid_longitude") - i = f.item("grid_longitude", key=True) - self.assertTrue(x == i) - - x = f.construct("grid_longitude", key=True) - i = f.item("grid_longitude", key=True) - self.assertTrue(x == i) - - self.assertTrue( - f.constructs.filter_by_data().equals(f.items(), verbose=2) - ) - self.assertTrue( - f.constructs("X", "Y").equals(f.items(*["X", "Y"]), verbose=2) - ) + self.f.construct_key("grid_longitude") def test_Field_convolution_filter(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - if not SCIPY_AVAILABLE: # needed for 'convolution_filter' method raise unittest.SkipTest("SciPy must be installed for this test.") @@ -1856,9 +1697,6 @@ def test_Field_convolution_filter(self): ) def test_Field_moving_window(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - if not SCIPY_AVAILABLE: # needed for 'moving_window' method raise unittest.SkipTest("SciPy must be installed for this test.") @@ -1870,7 +1708,7 @@ def test_Field_moving_window(self): self.assertIsNone(g) with self.assertRaises(ValueError): - _ = f.moving_window("mean", window_size=3, axis="X", cval=39) + f.moving_window("mean", window_size=3, axis="X", cval=39) f = cf.example_field(0) a = f.array @@ -1997,16 +1835,11 @@ def test_Field_moving_window(self): g = f.moving_window("mean", window_size=3, axis="Y", weights=weights) with self.assertRaises(ValueError): - _ = f.moving_window( - "mean", window_size=3, axis="X", weights=weights - ) + f.moving_window("mean", window_size=3, axis="X", weights=weights) self.assertEqual(len(g.cell_methods()), len(f.cell_methods()) + 1) def test_Field_derivative(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - if not SCIPY_AVAILABLE: # needed for 'derivative' method raise unittest.SkipTest("SciPy must be installed for this test.") @@ -2038,9 +1871,6 @@ def test_Field_derivative(self): self.assertTrue((g.array == 2.0).all()) def test_Field_convert(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() c = f.convert("grid_latitude") @@ -2075,17 +1905,11 @@ def test_Field_convert(self): f.convert("qwerty") def test_Field_section(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.read(self.filename2)[0][0:10] g = f.section(("X", "Y")) self.assertEqual(len(g), 10, "len(g)={}".format(len(g))) def test_Field_squeeze(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() self.assertIsNone(f.squeeze(inplace=True)) @@ -2098,9 +1922,6 @@ def test_Field_squeeze(self): self.assertIsNone(f.squeeze(0, inplace=True)) def test_Field_unsqueeze(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() self.assertEqual(f.ndim, 3) @@ -2116,31 +1937,20 @@ def test_Field_unsqueeze(self): self.assertEqual(f.ndim, 2) def test_Field_auxiliary_coordinate(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - - f = self.f.copy() + f = self.f for identity in ("auxiliarycoordinate1", "latitude"): - key = f.construct_key(identity) - c = f.construct(identity) - + key, c = f.construct_item(identity) self.assertTrue( f.auxiliary_coordinate(identity).equals(c, verbose=2) ) self.assertEqual(f.auxiliary_coordinate(identity, key=True), key) - identities = ["latitude", "longitude"] - auxiliary_coordinates = f.auxiliary_coordinates() - auxiliary_coordinates(*identities) - auxiliary_coordinates() - auxiliary_coordinates(identities[0]) + with self.assertRaises(ValueError): + f.aux("long_name:qwerty") def test_Field_coordinate(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - - f = self.f.copy() + f = self.f for identity in ( "domainaxis2", @@ -2154,36 +1964,16 @@ def test_Field_coordinate(self): filter_by_axis=(identity,), axis_mode="exact", todict=True ).popitem() - # key = ( - # f.dimension_coordinates() - # .filter_by_axis(identity, mode="and") - # .key() - # ) - # c = ( - # f.dimension_coordinates(view=True) - # .filter_by_axis(identity, mode="and") - # .value() - # ) else: - key = f.construct_key(identity) - c = f.construct(identity) - - print(identity, key, repr(c)) - print(f.coordinate(identity)) + key, c = f.construct_item(identity) self.assertTrue(f.coordinate(identity).equals(c, verbose=2)) - self.assertTrue(f.coordinate(identity, key=True) == key) + self.assertEqual(f.coordinate(identity, key=True), key) - identities = ["auxiliarycoordinate1", "dimensioncoordinate1"] - coordinates = f.coordinates() - coordinates(*identities) - coordinates() - coordinates(identities[0]) + with self.assertRaises(ValueError): + f.coord("long_name:qweRty") def test_Field_coordinate_reference(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() for identity in ( @@ -2192,37 +1982,37 @@ def test_Field_coordinate_reference(self): "standard_name:atmosphere_hybrid_height_coordinate", "grid_mapping_name:rotated_latitude_longitude", ): - # 'atmosphere_hybrid_height_coordinate', - # 'rotated_latitude_longitude'): key = f.construct_key(identity) c = f.construct(identity) self.assertTrue( f.coordinate_reference(identity).equals(c, verbose=2) ) - self.assertTrue(f.coordinate_reference(identity, key=True) == key) + self.assertEqual(f.coordinate_reference(identity, key=True), key) key = f.construct_key( "standard_name:atmosphere_hybrid_height_coordinate" ) - self.assertTrue( + self.assertEqual( f.coordinate_reference( - "atmosphere_hybrid_height_coordinate", key=True - ) - == key + "standard_name:atmosphere_hybrid_height_coordinate", key=True + ), + key, ) key = f.construct_key("grid_mapping_name:rotated_latitude_longitude") - self.assertTrue( - f.coordinate_reference("rotated_latitude_longitude", key=True) - == key + self.assertEqual( + f.coordinate_reference( + "grid_mapping_name:rotated_latitude_longitude", key=True + ), + key, ) # Delete self.assertIsNone(f.del_coordinate_reference("qwerty", default=None)) - self.assertTrue(len(f.coordinate_references()) == 2) - self.assertTrue(len(f.domain_ancillaries()) == 3) + self.assertEqual(len(f.coordinate_references()), 2) + self.assertEqual(len(f.domain_ancillaries()), 3) c = f.coordinate_reference( "standard_name:atmosphere_hybrid_height_coordinate" ) @@ -2230,13 +2020,13 @@ def test_Field_coordinate_reference(self): "standard_name:atmosphere_hybrid_height_coordinate" ) self.assertTrue(cr.equals(c, verbose=2)) - self.assertTrue(len(f.coordinate_references()) == 1) - self.assertTrue(len(f.domain_ancillaries()) == 0) + self.assertEqual(len(f.coordinate_references()), 1) + self.assertEqual(len(f.domain_ancillaries()), 0) f.del_coordinate_reference( "grid_mapping_name:rotated_latitude_longitude" ) - self.assertTrue(len(f.coordinate_references()) == 0) + self.assertEqual(len(f.coordinate_references()), 0) # Set f = self.f.copy() @@ -2249,7 +2039,7 @@ def test_Field_coordinate_reference(self): "grid_mapping_name:rotated_latitude_longitude" ) f.set_coordinate_reference(cr, field=g) - self.assertTrue(len(f.coordinate_references()) == 1) + self.assertEqual(len(f.coordinate_references()), 1) cr = g.coordinate_reference( "standard_name:atmosphere_hybrid_height_coordinate" @@ -2259,8 +2049,8 @@ def test_Field_coordinate_reference(self): "foo", "domainancillary99" ) f.set_coordinate_reference(cr, field=g) - self.assertTrue(len(f.coordinate_references()) == 2) - self.assertTrue(len(f.domain_ancillaries()) == 3) + self.assertEqual(len(f.coordinate_references()), 2) + self.assertEqual(len(f.domain_ancillaries()), 3) f.del_construct("coordinatereference0") f.del_construct("coordinatereference1") @@ -2269,13 +2059,13 @@ def test_Field_coordinate_reference(self): "grid_mapping_name:rotated_latitude_longitude" ) f.set_coordinate_reference(cr) - self.assertTrue(len(f.coordinate_references()) == 1) + self.assertEqual(len(f.coordinate_references()), 1) - def test_Field_dimension_coordinate(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return + with self.assertRaises(ValueError): + f.ref("long_name:qweRty") - f = self.f.copy() + def test_Field_dimension_coordinate(self): + f = self.f for identity in ( "domainaxis2", @@ -2287,121 +2077,83 @@ def test_Field_dimension_coordinate(self): key, c = f.dimension_coordinates( filter_by_axis=(identity,), axis_mode="exact", todict=True ).popitem() - # key = ( - # f.dimension_coordinates(view=True) - # .filter_by_axis(identity, mode="and") - # .key() - # ) - # c = ( - # f.dimension_coordinates(view=True) - # .filter_by_axis(identity, mode="and") - # .value() - # ) elif identity == "X": - key = f.construct_key("grid_longitude") - c = f.construct("grid_longitude") + key, c = f.construct_item("grid_longitude") else: - key = f.construct_key(identity) - c = f.construct(identity) + key, c = f.construct_item(identity) self.assertTrue( f.dimension_coordinate(identity).equals(c, verbose=2) ) - self.assertTrue(f.dimension_coordinate(identity, key=True) == key) + self.assertEqual(f.dimension_coordinate(identity, key=True), key) - identities = ["grid_latitude", "X"] - dimension_coordinates = f.dimension_coordinates() - dimension_coordinates(*identities) - dimension_coordinates() - dimension_coordinates(identities[0]) + i = f.dimension_coordinate(identity, item=True) + self.assertEqual(i[0], key) + self.assertTrue(i[1].equals(c)) - self.assertIsNone(f.dim("long_name=qwerty:asd", None)) - self.assertTrue(len(f.dims("long_name=qwerty:asd")) == 0) + self.assertIsNone( + f.dimension_coordinate("long_name=qwerty:asd", default=None) + ) + self.assertEqual( + len(f.dimension_coordinates("long_name=qwerty:asd")), 0 + ) - with self.assertRaises(Exception): + with self.assertRaises(ValueError): f.dim("long_name:qwerty") - with self.assertRaises(Exception): - f.dims("long_name:qwerty") - def test_Field_cell_measure(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - - f = self.f.copy() + f = self.f for identity in ("measure:area", "cellmeasure0"): - key = f.construct_key(identity) - c = f.construct(identity) + key, c = f.construct_item(identity) self.assertTrue(f.cell_measure(identity).equals(c, verbose=2)) - self.assertTrue(f.cell_measure(identity, key=True) == key) + self.assertEqual(f.cell_measure(identity, key=True), key) - self.assertTrue(f.measure(identity).equals(c, verbose=2)) - self.assertTrue(f.measure(identity, key=True) == key) + self.assertTrue(f.cell_measure(identity).equals(c, verbose=2)) + self.assertEqual(f.cell_measure(identity, key=True), key) - self.assertTrue(len(f.measures()) == 1) - self.assertTrue(len(f.measures("measure:area")) == 1) - self.assertTrue(len(f.measures(*["measure:area"])) == 1) + self.assertEqual(len(f.cell_measures()), 1) + self.assertEqual(len(f.cell_measures("measure:area")), 1) + self.assertEqual(len(f.cell_measures(*["measure:area"])), 1) - self.assertIsNone(f.measure("long_name=qwerty:asd", None)) - self.assertTrue(len(f.measures("long_name=qwerty:asd")) == 0) + self.assertIsNone(f.cell_measure("long_name=qwerty:asd", default=None)) + self.assertEqual(len(f.cell_measures("long_name=qwerty:asd")), 0) - with self.assertRaises(Exception): + with self.assertRaises(ValueError): f.measure("long_name:qwerty") - with self.assertRaises(Exception): - f.measures("long_name:qwerty") - def test_Field_cell_method(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - - f = self.f.copy() + f = self.f for identity in ("method:mean", "cellmethod0"): - key = f.construct_key(identity) - c = f.construct(identity) - + key, c = f.construct_item(identity) self.assertTrue(f.cell_method(identity).equals(c, verbose=2)) - self.assertTrue(f.cell_method(identity, key=True) == key) + self.assertEqual(f.cell_method(identity, key=True), key) def test_Field_domain_ancillary(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - - f = self.f.copy() + f = self.f for identity in ("surface_altitude", "domainancillary0"): - key = f.construct_key(identity) - c = f.construct(identity) - + key, c = f.construct_item(identity) self.assertTrue(f.domain_ancillary(identity).equals(c, verbose=2)) - self.assertTrue(f.domain_ancillary(identity, key=True) == key) + self.assertEqual(f.domain_ancillary(identity, key=True), key) - def test_Field_field_ancillary(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return + with self.assertRaises(ValueError): + f.domain_anc("long_name:qweRty") - f = self.f.copy() + def test_Field_field_ancillary(self): + f = self.f for identity in ("ancillary0", "fieldancillary0"): - key = f.construct_key(identity) - c = f.construct(identity) - + key, c = f.construct_item(identity) self.assertTrue(f.field_ancillary(identity).equals(c, verbose=2)) - self.assertTrue(f.field_ancillary(identity, key=True) == key) + self.assertEqual(f.field_ancillary(identity, key=True), key) - identities = ["ancillary1", "ancillary3"] - field_ancillaries = f.field_ancillaries() - field_ancillaries(*identities) - field_ancillaries() - field_ancillaries(identities[0]) + with self.assertRaises(ValueError): + f.field_anc("long_name:qweRty") def test_Field_transpose(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() f0 = f.copy() @@ -2437,24 +2189,16 @@ def test_Field_transpose(self): f.transpose([2, 1]) def test_Field_domain_axis(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - - _ = self.f.domain_axis(1) - _ = self.f.domain_axis("domainaxis2") + self.f.domain_axis(1) + self.f.domain_axis("domainaxis2") with self.assertRaises(ValueError): self.f.domain_axis(99) with self.assertRaises(ValueError): - self.f.domain_axis("qwerty") + self.f.axis("qwerty") def test_Field_where(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - - f = self.f.copy() - f = self.f.copy() f0 = f.copy() @@ -2497,16 +2241,10 @@ def test_Field_where(self): self.assertTrue(f[0].maximum() == 45) def test_Field_mask_invalid(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() self.assertIsNone(f.mask_invalid(inplace=True)) def test_Field_del_domain_axis(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.example_field(0) g = f[0] @@ -2537,9 +2275,6 @@ def test_Field_del_domain_axis(self): f.del_domain_axis("T") def test_Field_percentile(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.example_field(1) for chunksize in self.chunk_sizes: cf.chunksize(chunksize) diff --git a/cf/test/test_FieldAncillary.py b/cf/test/test_FieldAncillary.py index 602df3a886..c9caa4dcc1 100644 --- a/cf/test/test_FieldAncillary.py +++ b/cf/test/test_FieldAncillary.py @@ -1,6 +1,5 @@ import datetime import faulthandler -import os import unittest faulthandler.enable() # to debug seg faults and timeouts @@ -9,27 +8,22 @@ class FieldAncillaryTest(unittest.TestCase): - filename = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "test_file.nc" - ) - # f = cf.read(filename)[0] + f = cf.example_field(1) def test_FieldAncillary(self): f = cf.FieldAncillary() - _ = repr(f) - _ = str(f) - _ = f.dump(display=False) + repr(f) + str(f) + f.dump(display=False) def test_FieldAncillary_source(self): - f = cf.read(self.filename)[0] - - a = f.auxiliary_coordinates(view=True)("latitude").value() + a = self.f.auxiliary_coordinate("latitude") cf.FieldAncillary(source=a) def test_FieldAncillary_properties(self): - f = cf.read(self.filename)[0] - x = f.domain_ancillaries(view=True)("ncvar%a").value() + d = self.f.domain_ancillary("ncvar%a") + x = cf.FieldAncillary(source=d) x.set_property("long_name", "qwerty") @@ -39,8 +33,7 @@ def test_FieldAncillary_properties(self): self.assertIsNone(x.del_property("long_name", None)) def test_FieldAncillary_insert_dimension(self): - f = cf.read(self.filename)[0] - d = f.dimension_coordinates(view=True)("grid_longitude").value() + d = self.f.dimension_coordinate("grid_longitude") x = cf.FieldAncillary(source=d) self.assertEqual(x.shape, (9,)) @@ -52,8 +45,7 @@ def test_FieldAncillary_insert_dimension(self): self.assertEqual(x.shape, (9, 1)) def test_FieldAncillary_transpose(self): - f = cf.read(self.filename)[0] - a = f.auxiliary_coordinates(view=True)("longitude").value() + a = self.f.auxiliary_coordinate("longitude") x = cf.FieldAncillary(source=a) self.assertEqual(x.shape, (9, 10)) @@ -65,8 +57,7 @@ def test_FieldAncillary_transpose(self): self.assertEqual(x.shape, (10, 9)) def test_FieldAncillary_squeeze(self): - f = cf.read(self.filename)[0] - a = f.auxiliary_coordinates(view=True)("longitude").value() + a = self.f.auxiliary_coordinate("longitude") x = cf.FieldAncillary(source=a) x.insert_dimension(1, inplace=True) diff --git a/cf/test/test_FieldList.py b/cf/test/test_FieldList.py index 8bdac3e9bb..743f743240 100644 --- a/cf/test/test_FieldList.py +++ b/cf/test/test_FieldList.py @@ -1,6 +1,5 @@ import datetime import faulthandler -import inspect import os import re import unittest @@ -22,16 +21,10 @@ class FieldTest(unittest.TestCase): test_only = [] def test_FieldList(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - cf.FieldList(self.x) cf.FieldList([self.x]) def test_FieldList__add__iadd__(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.FieldList(self.x) f = f + f.copy() @@ -55,9 +48,6 @@ def test_FieldList__add__iadd__(self): self.assertEqual(len(f), 20) def test_FieldList__contains__(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.FieldList(self.x) f.append(self.x.copy()) @@ -67,18 +57,12 @@ def test_FieldList__contains__(self): self.assertNotIn(34.6, f) def test_FieldList_close(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.FieldList(self.x) self.assertIsNone(f.close()) - _ = repr(f[0]) + repr(f[0]) def test_FieldList__len__(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.FieldList(self.x) self.assertEqual(len(cf.FieldList()), 0) @@ -89,9 +73,6 @@ def test_FieldList__len__(self): self.assertEqual(len(f), 4) def test_FieldList__mul__imul__(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.FieldList() f = f * 4 self.assertEqual(len(f), 0) @@ -120,17 +101,11 @@ def test_FieldList__mul__imul__(self): self.assertIsInstance(f, cf.FieldList) def test_FieldList__repr__(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.FieldList(self.x) f += f - _ = repr(f) + repr(f) def test_FieldList_append_extend(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - # Append f = cf.FieldList() @@ -160,9 +135,6 @@ def test_FieldList_append_extend(self): self.assertEqual(len(f), 4) def test_FieldList_copy(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.FieldList(self.x) f.append(f[0].copy()) @@ -171,22 +143,16 @@ def test_FieldList_copy(self): self.assertTrue(f.equals(g, verbose=2)) def test_FieldList__getslice__(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.FieldList(self.x) f.append(f[0]) - _ = f[0:1] - _ = f[1:2] - _ = f[:1] - _ = f[1:] + f[0:1] + f[1:2] + f[:1] + f[1:] def test_FieldList_count(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.FieldList(self.x) self.assertEqual(f.count(f[0]), 1) @@ -200,9 +166,6 @@ def test_FieldList_count(self): self.assertEqual(f.count(f[3]), 2) def test_FieldList_equals(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.FieldList(self.x) g = f.copy() @@ -241,9 +204,6 @@ def test_FieldList_equals(self): self.assertFalse(f.equals(h, unordered=True)) def test_FieldList_insert_pop_remove(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - # Insert f = cf.FieldList(self.x) g = f[0].copy() @@ -293,9 +253,6 @@ def test_FieldList_insert_pop_remove(self): self.assertIsInstance(f, cf.FieldList) def test_FieldList_reverse(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.FieldList(self.x) g = f[0] @@ -312,9 +269,6 @@ def test_FieldList_reverse(self): self.assertIs(h, f[0]) def test_FieldList_select(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.FieldList(self.x) g = f("not this one") @@ -370,9 +324,6 @@ def test_FieldList_select(self): self.assertEqual(len(f.select_by_ncvar(re.compile("^ta"))), 8) def test_FieldList_select_by_construct(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - x = self.x.copy() x.del_construct("time") @@ -474,13 +425,10 @@ def test_FieldList_select_by_construct(self): self.assertEqual(len(g), 0) def test_FieldList_select_field(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.FieldList(self.x) with self.assertRaises(Exception): - _ = f.select_field("not this one") + f.select_field("not this one") self.assertIsNone(f.select_field("not this one", None)) @@ -494,9 +442,6 @@ def test_FieldList_select_field(self): g = f.select_field(re.compile("^QWERTY")) def test_FieldList_concatenate(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f2[0] g = cf.FieldList([f[0], f[1:456], f[456:]]) @@ -508,9 +453,6 @@ def test_FieldList_concatenate(self): self.assertTrue(f.equals(h, verbose=2)) def test_FieldList_index(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f2[0] a, b, c = [f[0], f[1:456], f[456:]] diff --git a/cf/test/test_Index.py b/cf/test/test_Index.py index a01db799f3..3e56725f10 100644 --- a/cf/test/test_Index.py +++ b/cf/test/test_Index.py @@ -8,6 +8,8 @@ class IndexTest(unittest.TestCase): + indexed = "DSG_timeSeries_indexed.nc" + def setUp(self): # Disable log messages to silence expected warnings cf.log_level("DISABLE") @@ -20,8 +22,6 @@ def setUp(self): # < ... test code ... > # cf.log_level('DISABLE') - self.indexed = "DSG_timeSeries_indexed.nc" - def test_Index__repr__str__dump(self): f = cf.read(self.indexed)[0] diff --git a/cf/test/test_List.py b/cf/test/test_List.py index d13f7e4790..9eded72117 100644 --- a/cf/test/test_List.py +++ b/cf/test/test_List.py @@ -8,6 +8,8 @@ class ListTest(unittest.TestCase): + gathered = "gathered.nc" + def setUp(self): # Disable log messages to silence expected warnings cf.log_level("DISABLE") @@ -20,16 +22,14 @@ def setUp(self): # < ... test code ... > # cf.log_level('DISABLE') - self.gathered = "gathered.nc" - def test_List__repr__str__dump(self): f = cf.read(self.gathered)[0] - list_ = f.data.get_list() + lst = f.data.get_list() - repr(list_) - str(list_) - self.assertIsInstance(list_.dump(display=False), str) + repr(lst) + str(lst) + self.assertIsInstance(lst.dump(display=False), str) if __name__ == "__main__": diff --git a/cf/test/test_Maths.py b/cf/test/test_Maths.py index 4911a87331..26e28d2284 100644 --- a/cf/test/test_Maths.py +++ b/cf/test/test_Maths.py @@ -2,7 +2,6 @@ import faulthandler import os import unittest -import inspect import numpy @@ -16,15 +15,7 @@ class MathTest(unittest.TestCase): os.path.dirname(os.path.abspath(__file__)), "regrid_file1.nc" ) - test_only = [] - # test_only = ('NOTHING!!!!!',) - # test_only = ('test_relative_vorticity_distance') - # test_only = ('test_relative_vorticity_latlong') - def test_relative_vorticity_distance(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - x_min = 0.0 x_max = 100.0 dx = 1.0 @@ -60,9 +51,6 @@ def test_relative_vorticity_distance(self): self.assertTrue((rv.array == 0.0).all()) def test_relative_vorticity_latlong(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - lat_min = -90.0 lat_max = 90.0 dlat = 1.0 diff --git a/cf/test/test_Partition.py b/cf/test/test_Partition.py deleted file mode 100644 index 705d56873d..0000000000 --- a/cf/test/test_Partition.py +++ /dev/null @@ -1,30 +0,0 @@ -import datetime -import faulthandler -import inspect -import os -import unittest - -faulthandler.enable() # to debug seg faults and timeouts - -import cf - - -class PartitionTest(unittest.TestCase): - filename = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "test_file.nc" - ) - chunk_sizes = (17, 34, 300, 100000)[::-1] - original_chunksize = cf.chunksize() - - test_only = [] - - def test_Partition(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - - -if __name__ == "__main__": - print("Run date:", datetime.datetime.now()) - cf.environment() - print() - unittest.main(verbosity=2) diff --git a/cf/test/test_Query.py b/cf/test/test_Query.py index 4a46c2e29f..92b6671cc4 100644 --- a/cf/test/test_Query.py +++ b/cf/test/test_Query.py @@ -2,7 +2,6 @@ import datetime import faulthandler import re -import os import unittest import numpy @@ -13,18 +12,14 @@ class QueryTest(unittest.TestCase): - filename = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "test_file.nc" - ) - - # f = cf.read(filename)[0] + f = cf.example_field(1) def test_Query_contains(self): - f = cf.read(self.filename)[0] - c = f.dim("X") + + c = self.f.dim("X") self.assertTrue( ( - (cf.contains(21.1) == c).array + (cf.contains(-4.26) == c).array == numpy.array([0, 1, 0, 0, 0, 0, 0, 0, 0], bool) ).all() ) @@ -36,41 +31,39 @@ def test_Query_contains(self): ) def test_Query(self): - f = cf.read(self.filename)[0] - q = cf.Query("wi", [2, 5]) r = cf.Query("le", 67) s = q | r t = cf.Query("gt", 12, attr="bounds") u = s & t - _ = repr(q) - _ = repr(s) - _ = repr(t) - _ = repr(u) - _ = str(q) - _ = str(s) - _ = str(t) - _ = str(u) - _ = u.dump(display=False) - - _ = u.attr - _ = u.operator - _ = q.attr - _ = q.operator - _ = q.value + repr(q) + repr(s) + repr(t) + repr(u) + str(q) + str(s) + str(t) + str(u) + u.dump(display=False) + + u.attr + u.operator + q.attr + q.operator + q.value with self.assertRaises(Exception): - _ = u.value + u.value self.assertTrue(u.equals(u.copy(), verbose=2)) self.assertFalse(u.equals(t, verbose=0)) - _ = copy.deepcopy(u) + copy.deepcopy(u) - c = f.dimension_coordinate("X") + c = self.f.dimension_coordinate("X") self.assertTrue( ( - (cf.contains(21.1) == c).array + (cf.contains(-4.26) == c).array == numpy.array([0, 1, 0, 0, 0, 0, 0, 0, 0], bool) ).all() ) @@ -81,15 +74,15 @@ def test_Query(self): ).all() ) - _ = cf.cellsize(34) - _ = cf.cellsize(q) + cf.cellsize(34) + cf.cellsize(q) - _ = cf.celllt(3) - _ = cf.cellle(3) - _ = cf.cellge(3) - _ = cf.cellgt(3) - _ = cf.cellwi(1, 2) - _ = cf.cellwo(1, 2) + cf.celllt(3) + cf.cellle(3) + cf.cellge(3) + cf.cellgt(3) + cf.cellwi(1, 2) + cf.cellwo(1, 2) def test_Query_object_units(self): """Check units are processed correctly in and from queries.""" @@ -236,7 +229,7 @@ def test_Query_datetime1(self): message, ) - _ = cf.seasons() + cf.seasons() [ cf.seasons(n, start) for n in [1, 2, 3, 4, 6, 12] @@ -247,10 +240,10 @@ def test_Query_datetime1(self): with self.assertRaises(Exception): cf.seasons(start=8.456) - _ = cf.mam() - _ = cf.djf() - _ = cf.jja() - _ = cf.son() + cf.mam() + cf.djf() + cf.jja() + cf.son() def test_Query_year_month_day_hour_minute_second(self): d = cf.Data( diff --git a/cf/test/test_TimeDuration.py b/cf/test/test_TimeDuration.py index cda948224a..600eb0c0eb 100644 --- a/cf/test/test_TimeDuration.py +++ b/cf/test/test_TimeDuration.py @@ -96,84 +96,82 @@ def test_TimeDuration(self): cf.TimeDuration(64, "calendar_years") + cf.M(24), cf.Y(66) ) - self.assertTrue( - cf.TimeDuration(36, "calendar_months") / 8 == cf.M(4.5) - ) - self.assertTrue(cf.TimeDuration(36, "calendar_months") // 8 == cf.M(4)) + self.assertEqual(cf.TimeDuration(36, "calendar_months") / 8, cf.M(4.5)) + self.assertEqual(cf.TimeDuration(36, "calendar_months") // 8, cf.M(4)) - self.assertTrue( - cf.TimeDuration(36, "calendar_months") / numpy.array(8.0) - == cf.M(36 / 8.0) + self.assertEqual( + cf.TimeDuration(36, "calendar_months") / numpy.array(8.0), + cf.M(36 / 8.0), ) - self.assertTrue( - cf.TimeDuration(12, "calendar_months") * cf.Data([[1.5]]) - == cf.Y(1.5) + self.assertEqual( + cf.TimeDuration(12, "calendar_months") * cf.Data([[1.5]]), + cf.Y(1.5), ) - self.assertTrue( - cf.TimeDuration(36, "calendar_months") // 8.25 == cf.M(4.0) + self.assertEqual( + cf.TimeDuration(36, "calendar_months") // 8.25, cf.M(4.0) ) - self.assertTrue(cf.TimeDuration(36, "calendar_months") % 10 == cf.M(6)) + self.assertEqual(cf.TimeDuration(36, "calendar_months") % 10, cf.M(6)) - self.assertTrue( - cf.TimeDuration(24, "hours") + cf.TimeDuration(0.5, "days") - == cf.h(36.0) + self.assertEqual( + cf.TimeDuration(24, "hours") + cf.TimeDuration(0.5, "days"), + cf.h(36.0), ) - self.assertTrue( - cf.TimeDuration(0.5, "days") + cf.TimeDuration(24, "hours") - == cf.D(1.5) + self.assertEqual( + cf.TimeDuration(0.5, "days") + cf.TimeDuration(24, "hours"), + cf.D(1.5), ) t = cf.TimeDuration(24, "hours") t += 2 - self.assertTrue(t == cf.h(26)) + self.assertEqual(t, cf.h(26)) t -= cf.Data(3, "hours") - self.assertTrue(t == cf.h(23)) + self.assertEqual(t, cf.h(23)) t = cf.TimeDuration(24.0, "hours") t += 2 - self.assertTrue(t == cf.h(26)) - self.assertTrue(t - cf.Data(2.5, "hours") == cf.h(23.5)) + self.assertEqual(t, cf.h(26)) + self.assertEqual(t - cf.Data(2.5, "hours"), cf.h(23.5)) t *= 2 - self.assertTrue(t == cf.h(52.0)) + self.assertEqual(t, cf.h(52.0)) t -= 1.0 - self.assertTrue(t == cf.h(51)) + self.assertEqual(t, cf.h(51)) t /= 3 - self.assertTrue(t == cf.h(17)) + self.assertEqual(t, cf.h(17)) t += 5.5 - self.assertTrue(t == cf.h(22.5)) + self.assertEqual(t, cf.h(22.5)) t //= numpy.array(2) - self.assertTrue(t == cf.h(11.0)) + self.assertEqual(t, cf.h(11.0)) t *= 10 - self.assertTrue(t == cf.h(110.0)) + self.assertEqual(t, cf.h(110.0)) t %= 3 - self.assertTrue(t == cf.h(2.0)) + self.assertEqual(t, cf.h(2.0)) t = cf.TimeDuration(24.5, "hours") - self.assertTrue(-t == -24.5) - self.assertTrue(int(t) == 24) - self.assertTrue(t / 0.5 == 49) - self.assertTrue(t // 2 == 12) - self.assertTrue(25 - t == 0.5) - self.assertTrue(2 * t == 49) - self.assertTrue(2.0 % t == 2, 2.0 % t) + self.assertEqual(-t, -24.5) + self.assertEqual(int(t), 24) + self.assertEqual(t / 0.5, 49) + self.assertEqual(t // 2, 12) + self.assertEqual(25 - t, 0.5) + self.assertEqual(2 * t, 49) + self.assertEqual(2.0 % t, 2, 2.0 % t) self.assertTrue(cf.TimeDuration(24, "hours").isint) self.assertTrue(cf.TimeDuration(24.0, "hours").isint) self.assertFalse(t.isint) t.Units = "days" - self.assertTrue(t.Units == cf.Units("days")) + self.assertEqual(t.Units, cf.Units("days")) t.Units = "hours" self.assertTrue(cf.TimeDuration(12, "hours").is_day_factor()) self.assertFalse(cf.TimeDuration(13, "hours").is_day_factor()) self.assertFalse(cf.TimeDuration(2, "days").is_day_factor()) - self.assertTrue(cf.TimeDuration(cf.Data(2, "days")) == 2) - self.assertTrue(cf.TimeDuration(cf.Data(48, "hours")) == 48) - self.assertTrue( - cf.TimeDuration(cf.Data(48, "hours"), units="days") == 2 + self.assertEqual(cf.TimeDuration(cf.Data(2, "days")), 2) + self.assertEqual(cf.TimeDuration(cf.Data(48, "hours")), 48) + self.assertEqual( + cf.TimeDuration(cf.Data(48, "hours"), units="days"), 2 ) - self.assertTrue(cf.TimeDuration(0.1, units="seconds") == 0.1) + self.assertEqual(cf.TimeDuration(0.1, units="seconds"), 0.1) self.assertTrue(t.equals(t, verbose=2)) self.assertTrue(t.equals(t.copy(), verbose=2)) @@ -191,95 +189,95 @@ def test_TimeDuration(self): t = t.copy() t = copy.deepcopy(t) - _ = repr(t) - _ = str(t) + repr(t) + str(t) t //= 2 t %= 2 def test_TimeDuration_interval(self): - self.assertTrue( - cf.M().interval(cf.dt(1999, 12)) - == ( + self.assertEqual( + cf.M().interval(cf.dt(1999, 12)), + ( cf.dt("1999-12-01 00:00:00", calendar=None), cf.dt("2000-01-01 00:00:00", calendar=None), - ) + ), ) - self.assertTrue( - cf.Y(2).interval(cf.dt(2000, 2), end=True) - == ( + self.assertEqual( + cf.Y(2).interval(cf.dt(2000, 2), end=True), + ( cf.dt("1998-02-01 00:00:00", calendar=None), cf.dt("2000-02-01 00:00:00", calendar=None), - ) + ), ) - self.assertTrue( - cf.D(30).interval(cf.dt(1983, 12, 1, 6)) - == ( + self.assertEqual( + cf.D(30).interval(cf.dt(1983, 12, 1, 6)), + ( cf.dt("1983-12-01 06:00:00", calendar=None), cf.dt("1983-12-31 06:00:00", calendar=None), - ) + ), ) - self.assertTrue( - cf.D(30).interval(cf.dt(1983, 12, 1, 6), end=True) - == ( + self.assertEqual( + cf.D(30).interval(cf.dt(1983, 12, 1, 6), end=True), + ( cf.dt("1983-11-01 06:00:00", calendar=None), cf.dt("1983-12-01 06:00:00", calendar=None), - ) + ), ) - self.assertTrue( - cf.D(0).interval(cf.dt(1984, 2, 3)) - == ( + self.assertEqual( + cf.D(0).interval(cf.dt(1984, 2, 3)), + ( cf.dt("1984-02-03 00:00:00", calendar=None), cf.dt("1984-02-03 00:00:00", calendar=None), - ) + ), ) - self.assertTrue( - cf.D(5, hour=6).interval(cf.dt(2004, 3, 2), end=True) - == ( + self.assertEqual( + cf.D(5, hour=6).interval(cf.dt(2004, 3, 2), end=True), + ( cf.dt("2004-02-26 00:00:00", calendar=None), cf.dt("2004-03-02 00:00:00", calendar=None), - ) + ), ) - self.assertTrue( + self.assertEqual( cf.D(5, hour=6).interval( cf.dt(2004, 3, 2, calendar="noleap"), end=True - ) - == ( + ), + ( cf.dt("2004-02-25 00:00:00", calendar="noleap"), cf.dt("2004-03-02 00:00:00", calendar="noleap"), - ) + ), ) - self.assertTrue( + self.assertEqual( cf.D(5, hour=6).interval( cf.dt(2004, 3, 2, calendar="360_day"), end=True - ) - == ( + ), + ( cf.dt("2004-02-27 00:00:00", calendar="360_day"), cf.dt("2004-03-02 00:00:00", calendar="360_day"), - ) + ), ) - self.assertTrue( - cf.h(19897.5).interval(cf.dt(1984, 2, 3, 0)) - == ( + self.assertEqual( + cf.h(19897.5).interval(cf.dt(1984, 2, 3, 0)), + ( cf.dt("1984-02-03 00:00:00", calendar=None), cf.dt("1986-05-12 01:30:00", calendar=None), - ) + ), ) - self.assertTrue( - cf.h(19897.6).interval(cf.dt(1984, 2, 3, 0), end=True) - == ( + self.assertEqual( + cf.h(19897.6).interval(cf.dt(1984, 2, 3, 0), end=True), + ( cf.dt("1981-10-26 22:24:00", calendar=None), cf.dt("1984-02-03 00:00:00", calendar=None), - ) + ), ) def test_TimeDuration_iso(self): - self.assertTrue(cf.Y(19).iso == "P19Y") - self.assertTrue(cf.M(9).iso == "P9M") - self.assertTrue(cf.D(34).iso == "P34D") - self.assertTrue(cf.m(16).iso == "PT16M") - self.assertTrue(cf.h(19897.546).iso == "PT19897.546H") - self.assertTrue(cf.s(1989).iso == "PT1989S") + self.assertEqual(cf.Y(19).iso, "P19Y") + self.assertEqual(cf.M(9).iso, "P9M") + self.assertEqual(cf.D(34).iso, "P34D") + self.assertEqual(cf.m(16).iso, "PT16M") + self.assertEqual(cf.h(19897.546).iso, "PT19897.546H") + self.assertEqual(cf.s(1989).iso, "PT1989S") def test_TimeDuration_bounds(self): for direction in (True, False): @@ -334,8 +332,9 @@ def test_TimeDuration_bounds(self): ): if direction is False: y = y[::-1] - self.assertTrue( - x == y, "{}!={} direction={}".format(x, y, direction) + + self.assertEqual( + x, y, "{}!={} direction={}".format(x, y, direction) ) for x, y in zip( @@ -389,7 +388,7 @@ def test_TimeDuration_bounds(self): ): if direction is False: y = y[::-1] - self.assertTrue(x == y, "{}!={}".format(x, y)) + self.assertEqual(x, y, "{}!={}".format(x, y)) for x, y in zip( [ @@ -438,85 +437,85 @@ def test_TimeDuration_bounds(self): if direction is False: y = y[::-1] - self.assertTrue(x == y, "{}!={}".format(x, y)) + self.assertEqual(x, y, "{}!={}".format(x, y)) def test_TimeDuration_arithmetic(self): - self.assertTrue(cf.M() + cf.dt(2000, 1, 1) == cf.dt(2000, 2, 1)) - self.assertTrue(cf.M() * 8 == cf.M(8)) - self.assertTrue(cf.M() * 8.5 == cf.M(8.5)) - self.assertTrue(cf.dt(2000, 1, 1) + cf.M() == cf.dt(2000, 2, 1)) - self.assertTrue(cf.dt(2000, 1, 1) - cf.M() == cf.dt(1999, 12, 1)) - self.assertTrue( - cf.M() + datetime.datetime(2000, 1, 1) - == cf.dt(2000, 2, 1, calendar="gregorian") + self.assertEqual(cf.M() + cf.dt(2000, 1, 1), cf.dt(2000, 2, 1)) + self.assertEqual(cf.M() * 8, cf.M(8)) + self.assertEqual(cf.M() * 8.5, cf.M(8.5)) + self.assertEqual(cf.dt(2000, 1, 1) + cf.M(), cf.dt(2000, 2, 1)) + self.assertEqual(cf.dt(2000, 1, 1) - cf.M(), cf.dt(1999, 12, 1)) + self.assertEqual( + cf.M() + datetime.datetime(2000, 1, 1), + cf.dt(2000, 2, 1, calendar="gregorian"), ) - self.assertTrue( - datetime.datetime(2000, 1, 1) + cf.M() - == cf.dt(2000, 2, 1, calendar="gregorian") + self.assertEqual( + datetime.datetime(2000, 1, 1) + cf.M(), + cf.dt(2000, 2, 1, calendar="gregorian"), ) - self.assertTrue( - datetime.datetime(2000, 1, 1) - cf.M() - == cf.dt(1999, 12, 1, calendar="gregorian") + self.assertEqual( + datetime.datetime(2000, 1, 1) - cf.M(), + cf.dt(1999, 12, 1, calendar="gregorian"), ) d = cf.dt(2000, 1, 1) d += cf.M() - self.assertTrue(d == cf.dt(2000, 2, 1)) + self.assertEqual(d, cf.dt(2000, 2, 1)) d -= cf.M() - self.assertTrue(d == cf.dt(2000, 1, 1)) + self.assertEqual(d, cf.dt(2000, 1, 1)) d = datetime.datetime(2000, 1, 1) d += cf.M() - self.assertTrue(d == cf.dt(2000, 2, 1, calendar="gregorian")) + self.assertEqual(d, cf.dt(2000, 2, 1, calendar="gregorian")) d -= cf.M() - self.assertTrue(d == cf.dt(2000, 1, 1, calendar="gregorian")) + self.assertEqual(d, cf.dt(2000, 1, 1, calendar="gregorian")) - self.assertTrue(cf.M() * 8 == cf.M(8)) - self.assertTrue(cf.M() * 8.5 == cf.M(8.5)) - self.assertTrue(cf.M() / 2.0 == cf.M(0.5)) - self.assertTrue(cf.M(8) / 3 == cf.M(8 / 3)) - self.assertTrue(cf.M(8) // 3 == cf.M(2)) + self.assertEqual(cf.M() * 8, cf.M(8)) + self.assertEqual(cf.M() * 8.5, cf.M(8.5)) + self.assertEqual(cf.M() / 2.0, cf.M(0.5)) + self.assertEqual(cf.M(8) / 3, cf.M(8 / 3)) + self.assertEqual(cf.M(8) // 3, cf.M(2)) def test_Timeduration__days_in_month(self): - self.assertTrue(cf.TimeDuration.days_in_month(1900, 2) == 28) - self.assertTrue(cf.TimeDuration.days_in_month(1999, 2) == 28) - self.assertTrue(cf.TimeDuration.days_in_month(2000, 2) == 29) - self.assertTrue(cf.TimeDuration.days_in_month(2004, 2) == 29) - self.assertTrue( - cf.TimeDuration.days_in_month(1900, 2, calendar="360_day") == 30 + self.assertEqual(cf.TimeDuration.days_in_month(1900, 2), 28) + self.assertEqual(cf.TimeDuration.days_in_month(1999, 2), 28) + self.assertEqual(cf.TimeDuration.days_in_month(2000, 2), 29) + self.assertEqual(cf.TimeDuration.days_in_month(2004, 2), 29) + self.assertEqual( + cf.TimeDuration.days_in_month(1900, 2, calendar="360_day"), 30 ) - self.assertTrue( - cf.TimeDuration.days_in_month(1999, 2, calendar="360_day") == 30 + self.assertEqual( + cf.TimeDuration.days_in_month(1999, 2, calendar="360_day"), 30 ) - self.assertTrue( - cf.TimeDuration.days_in_month(2000, 2, calendar="360_day") == 30 + self.assertEqual( + cf.TimeDuration.days_in_month(2000, 2, calendar="360_day"), 30 ) - self.assertTrue( - cf.TimeDuration.days_in_month(2004, 2, calendar="360_day") == 30 + self.assertEqual( + cf.TimeDuration.days_in_month(2004, 2, calendar="360_day"), 30 ) - self.assertTrue( - cf.TimeDuration.days_in_month(1900, 2, calendar="noleap") == 28 + self.assertEqual( + cf.TimeDuration.days_in_month(1900, 2, calendar="noleap"), 28 ) - self.assertTrue( - cf.TimeDuration.days_in_month(1999, 2, calendar="noleap") == 28 + self.assertEqual( + cf.TimeDuration.days_in_month(1999, 2, calendar="noleap"), 28 ) - self.assertTrue( - cf.TimeDuration.days_in_month(2000, 2, calendar="noleap") == 28 + self.assertEqual( + cf.TimeDuration.days_in_month(2000, 2, calendar="noleap"), 28 ) - self.assertTrue( - cf.TimeDuration.days_in_month(2004, 2, calendar="noleap") == 28 + self.assertEqual( + cf.TimeDuration.days_in_month(2004, 2, calendar="noleap"), 28 ) - self.assertTrue( - cf.TimeDuration.days_in_month(1900, 2, calendar="366_day") == 29 + self.assertEqual( + cf.TimeDuration.days_in_month(1900, 2, calendar="366_day"), 29 ) - self.assertTrue( - cf.TimeDuration.days_in_month(1999, 2, calendar="366_day") == 29 + self.assertEqual( + cf.TimeDuration.days_in_month(1999, 2, calendar="366_day"), 29 ) - self.assertTrue( - cf.TimeDuration.days_in_month(2000, 2, calendar="366_day") == 29 + self.assertEqual( + cf.TimeDuration.days_in_month(2000, 2, calendar="366_day"), 29 ) - self.assertTrue( - cf.TimeDuration.days_in_month(2004, 2, calendar="366_day") == 29 + self.assertEqual( + cf.TimeDuration.days_in_month(2004, 2, calendar="366_day"), 29 ) diff --git a/cf/test/test_cfa.py b/cf/test/test_cfa.py index 8344b91b83..2e96c3883c 100644 --- a/cf/test/test_cfa.py +++ b/cf/test/test_cfa.py @@ -1,6 +1,5 @@ import datetime import faulthandler -import inspect import os import stat import subprocess @@ -13,7 +12,6 @@ class cfaTest(unittest.TestCase): def setUp(self): - self.test_only = () self.test_file = "cfa_test.sh" self.test_path = os.path.join(os.getcwd(), self.test_file) @@ -30,11 +28,9 @@ def setUp(self): ) def test_cfa(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - - # In the script, STDERR from cfa commands is redirected to (overwrite) - # its STDOUT, so Popen's stdout is really the cfa commands' stderr: + # In the script, STDERR from cfa commands is redirected to + # (overwrite) its STDOUT, so Popen's stdout is really the cfa + # commands' stderr: cfa_test = subprocess.Popen( ["./" + self.test_file], stdout=subprocess.PIPE, diff --git a/cf/test/test_collapse.py b/cf/test/test_collapse.py index 02cd9c754d..1130af5cd4 100644 --- a/cf/test/test_collapse.py +++ b/cf/test/test_collapse.py @@ -134,7 +134,7 @@ def test_Field_collapse_CLIMATOLOGICAL_TIME(self): print(g.constructs) self.assertEqual(list(g.shape), expected_shape) - for key in f.cell_methods(view=True): + for key in f.cell_methods(todict=True): f.del_construct(key) g = f.collapse( @@ -309,7 +309,7 @@ def test_Field_collapse(self): for i, year in enumerate( f.subspace(T=cf.month(m)).coord("T").year.unique() ): - _ = cf.month(m) & cf.year(year) + cf.month(m) & cf.year(year) x = f.subspace(T=cf.month(m) & cf.year(year)) x.data.mean(axes=0, inplace=True) a[i] = x.array diff --git a/cf/test/test_decorators.py b/cf/test/test_decorators.py index 4662a6e372..ec3550899e 100644 --- a/cf/test/test_decorators.py +++ b/cf/test/test_decorators.py @@ -1,6 +1,5 @@ import datetime import faulthandler -import inspect import unittest faulthandler.enable() # to debug seg faults and timeouts @@ -83,9 +82,6 @@ def setUp(self): self.test_only = [] def test_deprecated_kwarg_check(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - test_class = dummyClass() # Test without (or with default) deprecated keyword argument @@ -93,7 +89,7 @@ def test_deprecated_kwarg_check(self): res_2 = test_class.decorated_func_2(good_kwarg="good") res_3 = test_class.func_2(good_kwarg="good", traceback=False) res_4 = test_class.decorated_func_2(good_kwarg="good", traceback=False) - _ = test_class.multikwarg_decorated_func_2( + test_class.multikwarg_decorated_func_2( good_kwarg="good", traceback=False ) self.assertEqual(res_1, res_2) @@ -114,9 +110,6 @@ def test_deprecated_kwarg_check(self): ) def test_manage_log_level_via_verbose_attr(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - # Order of decreasing severity/verbosity is crucial to one test below levels = ["WARNING", "INFO", "DETAIL", "DEBUG"] diff --git a/cf/test/test_docstring.py b/cf/test/test_docstring.py index d4e56f8408..562d6f58da 100644 --- a/cf/test/test_docstring.py +++ b/cf/test/test_docstring.py @@ -191,9 +191,6 @@ def test_docstring_docstring_substitutions(self): self.assertIn("{{repr}}", d, "\nCLASS: {}".format(klass)) -# --- End: class - - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() diff --git a/cf/test/test_dsg.py b/cf/test/test_dsg.py index 7181163584..7c5ce1c6b6 100644 --- a/cf/test/test_dsg.py +++ b/cf/test/test_dsg.py @@ -1,7 +1,6 @@ import atexit import datetime import faulthandler -import inspect import os import tempfile import unittest @@ -152,12 +151,7 @@ class DSGTest(unittest.TestCase): b = numpy.ma.where(b == -99, numpy.ma.masked, b) - test_only = [] - def test_DSG_contiguous(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.read(self.contiguous, verbose=0) self.assertEqual(len(f), 2) @@ -231,9 +225,6 @@ def test_DSG_contiguous(self): cf.write(tas, tmpfile) def test_DSG_indexed(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.read(self.indexed) self.assertEqual(len(f), 2) @@ -261,9 +252,6 @@ def test_DSG_indexed(self): self.assertTrue(g[i].equals(f[i], verbose=2)) def test_DSG_indexed_contiguous(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.read(self.indexed_contiguous, verbose=0) self.assertEqual(len(f), 2) @@ -300,9 +288,6 @@ def test_DSG_indexed_contiguous(self): self.assertTrue(g[i].equals(f[i], verbose=2)) def test_DSG_create_contiguous(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - # Define the ragged array values ragged_array = numpy.array([1, 3, 4, 3, 6], dtype="float32") # Define the count array values @@ -362,6 +347,6 @@ def test_DSG_create_contiguous(self): if __name__ == "__main__": print("Run date:", datetime.datetime.now()) - print(cf.environment()) + cf.environment() print() unittest.main(verbosity=2) diff --git a/cf/test/test_external.py b/cf/test/test_external.py index 519e2483d1..130ed24c0d 100644 --- a/cf/test/test_external.py +++ b/cf/test/test_external.py @@ -52,12 +52,13 @@ def test_EXTERNAL_READ(self): # Read the parent file on its own, without the external file f = cf.read(self.parent_file, verbose=0) + self.assertEqual(len(f), 1) + for i in f: - _ = repr(i) - _ = str(i) - _ = i.dump(display=False) + repr(i) + str(i) + i.dump(display=False) - self.assertEqual(len(f), 1) f = f[0] cell_measure = f.constructs.filter_by_identity("measure:area").value() @@ -73,9 +74,9 @@ def test_EXTERNAL_READ(self): c = cf.read(self.combined_file, verbose=0) for i in c + f: - _ = repr(i) - _ = str(i) - _ = i.dump(display=False) + repr(i) + str(i) + i.dump(display=False) cell_measure = ( f[0].constructs.filter_by_identity("measure:area").value() @@ -91,9 +92,9 @@ def test_EXTERNAL_READ(self): f = cf.read(self.parent_file, external=self.combined_file, verbose=0) for i in f: - _ = repr(i) - _ = str(i) - _ = i.dump(display=False) + repr(i) + str(i) + i.dump(display=False) self.assertEqual(len(f), 1) self.assertEqual(len(c), 1) @@ -109,9 +110,9 @@ def test_EXTERNAL_READ(self): ) for i in f: - _ = repr(i) - _ = str(i) - _ = i.dump(display=False) + repr(i) + str(i) + i.dump(display=False) self.assertEqual(len(f), 1) self.assertEqual(len(c), 1) @@ -223,6 +224,6 @@ def test_EXTERNAL_AGGREGATE(self): if __name__ == "__main__": print("Run date:", datetime.datetime.now()) - print(cf.environment()) + cf.environment() print() unittest.main(verbosity=2) diff --git a/cf/test/test_functions.py b/cf/test/test_functions.py index 2fe7247014..d16a581ce5 100644 --- a/cf/test/test_functions.py +++ b/cf/test/test_functions.py @@ -4,7 +4,6 @@ import platform import sys import unittest -import inspect faulthandler.enable() # to debug seg faults and timeouts @@ -16,21 +15,15 @@ def setUp(self): self.test_only = () def test_example_field(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - for n in range(8): f = cf.example_field(n) - _ = f.array - _ = f.dump(display=False) + f.array + f.dump(display=False) with self.assertRaises(Exception): - _ = cf.example_field(-999) + cf.example_field(-999) def test_keyword_deprecation(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - # Use as test case 'i' kwarg, the deprecated old name for # 'inplace': a = cf.Data([list(range(100))]) @@ -41,9 +34,6 @@ def test_keyword_deprecation(self): b.squeeze(i=True) def test_aliases(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - self.assertEqual(cf.log_level(), cf.LOG_LEVEL()) self.assertEqual(cf.free_memory(), cf.FREE_MEMORY()) self.assertEqual(cf.free_memory_factor(), cf.FREE_MEMORY_FACTOR()) @@ -60,9 +50,6 @@ def test_aliases(self): ) def test_configuration(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - # This test assumes 'total_memory' remains constant throughout # the test run, which should be true generally in any # reasonable context. @@ -223,9 +210,6 @@ def test_configuration(self): cf.log_level("DISABLE") def test_context_managers(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - # rtol, atol, chunksize for func in ( cf.atol, @@ -305,23 +289,14 @@ def test_context_managers(self): func(**org) def test_Constant(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - c = cf.atol() self.assertIs(c._func, cf.atol) def test_Configuration(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - c = cf.Configuration() self.assertIs(c._func, cf.configuration) def test_environment(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - e = cf.environment(display=False) ep = cf.environment(display=False, paths=False) self.assertIsInstance(e, str) diff --git a/cf/test/test_gathering.py b/cf/test/test_gathering.py index 1d99aabb10..5d1c17197c 100644 --- a/cf/test/test_gathering.py +++ b/cf/test/test_gathering.py @@ -1,7 +1,6 @@ import atexit import datetime import faulthandler -import inspect import os import tempfile import unittest @@ -212,120 +211,7 @@ class DSGTest(unittest.TestCase): b = numpy.ma.where(b == -99, numpy.ma.masked, b) - test_only = [] - - # def setUp(self): - # self.gathered = os.path.join( - # os.path.dirname(os.path.abspath(__file__)), 'gathered.nc') - # - # (fd, self.tempfilename) = tempfile.mkstemp( - # suffix='.nc', prefix='cf_', dir='.') - # os.close(fd) - # - # a = numpy.ma.masked_all((4, 9), dtype=float) - # a[0, 0:3] = [0.0, 1.0, 2.0] - # a[1, 0:7] = [1.0, 11.0, 21.0, 31.0, 41.0, 51.0, 61.0] - # a[2, 0:5] = [2.0, 102.0, 202.0, 302.0, 402.0] - # a[3, 0:9] = [3.0, 1003.0, 2003.0, 3003.0, 4003.0, 5003.0, 6003.0, - # 7003.0, 8003.0] - # self.a = a - # - # b = numpy.ma.array( - # [[[207.12345561172262, -99, -99, -99], - # [100.65758285427566, 117.72137430364056, 182.1893456150461, -99], - # [109.93898265295516, 117.76872282697526, -99, -99], - # [163.020681064712, 200.09702526477145, -99, -99], - # [138.25879722836117, 182.59075988956565, -99, -99], - # [159.28122555425304, -99, -99, -99], - # [157.0114286059841, 212.14056704399377, -99, -99], - # [225.09002846189756, -99, -99, -99], - # [179.99301151546493, -99, -99, -99], - # [125.56310968736936, 216.60367471282225, -99, -99], - # [105.12035147782414, 129.460917520233, 210.13998569368403, -99], - # [159.75007622045126, 197.101264162631, -99, -99], - # [-99, -99, -99, -99], - # [-99, -99, -99, -99], - # [-99, -99, -99, -99], - # [-99, -99, -99, -99], - # [-99, -99, -99, -99], - # [-99, -99, -99, -99], - # [-99, -99, -99, -99], - # [-99, -99, -99, -99], - # [-99, -99, -99, -99], - # [-99, -99, -99, -99], - # [-99, -99, -99, -99], - # [-99, -99, -99, -99], - # [-99, -99, -99, -99], - # [-99, -99, -99, -99]], - # [[52.1185292100177, 57.51542658633939, 108.49584371709457, - # 137.7109686243953], - # [26.433960062549616, 91.57049700941819, -99, -99], - # [7.015322103368953, 39.551765142093345, -99, -99], - # [157.047493027102, -99, -99, -99], - # [25.18033994582771, 159.67348686580374, -99, -99], - # [45.84635421577662, 97.86781970832622, -99, -99], - # [5.61560792556281, 31.182013232254985, -99, -99], - # [37.78941964121314, -99, -99, -99], - # [57.2927165845568, 129.40831355790502, 181.2962705331917, -99], - # [38.714266913107686, 69.34591875157382, 169.26193063629765, -99], - # [72.52507309225012, 138.22169348672838, 159.82855521564647, -99], - # [45.23406469185547, 97.66633738254326, 112.64049631761776, -99], - # [14.920937817653984, -99, -99, -99], - # [9.071979535527532, 42.527916794472986, 61.8685137936187, -99], - # [17.175098751913993, 99.00403750149574, -99, -99], - # [92.95097491537247, -99, -99, -99], - # [7.11997786817564, -99, -99, -99], - # [156.81807261767003, -99, -99, -99], - # [6.832599021190903, 12.446963835216742, -99, -99], - # [45.19734905410353, 124.30321995608465, 130.4780046562618, -99], - # [35.18924597876244, 68.36858129904569, 78.88837365755683, -99], - # [81.15820119504805, 122.41242448019014, -99, -99], - # [58.95866448059819, -99, -99, -99], - # [10.465638726626635, 96.11859001483036, -99, -99], - # [55.64766876004607, 78.37174486781481, 91.09175506350066, -99], - # [71.46930436420837, 90.43816256387788, 103.76781788802138, -99]], - # [[351.97770529376936, -99, -99, -99], - # [347.0644742747811, 388.5698490238134, 481.0692542795372, -99], - # [352.42430719766776, 393.20047319955916, 395.71509960367075, - # -99], - # [402.8689447636048, 403.74922883226424, 479.8582815909853, -99], - # [300.0199333154121, 365.124061660449, -99, -99], - # [333.35006535728564, 433.143904011861, -99, -99], - # [376.9480484244583, -99, -99, -99], - # [334.99329771076077, -99, -99, -99], - # [319.36684737542186, 337.20913311790446, -99, -99], - # [340.66500823697623, 353.52589668400094, 410.44418671572373, - # -99], - # [301.9005914473572, 337.2055422899861, 386.9573429761627, -99], - # [324.3747437305056, 424.04244158178483, -99, -99], - # [331.52095586074626, 349.4826244342738, 396.81256849354895, -99], - # [331.99043697116906, -99, -99, -99], - # [384.76674803938937, -99, -99, -99], - # [373.0334288724908, 399.47980750739197, -99, -99], - # [300.0106221314076, 390.6371376624527, -99, -99], - # [364.25269358741537, 391.19723635099535, 456.466622863717, -99], - # [410.1246758522543, -99, -99, -99], - # [310.59214185542953, -99, -99, -99], - # [-99, -99, -99, -99], - # [-99, -99, -99, -99], - # [-99, -99, -99, -99], - # [-99, -99, -99, -99], - # [-99, -99, -99, -99], - # [-99, -99, -99, -99]]] - # ) - # - # b = numpy.ma.where(b == -99, numpy.ma.masked, b) - # self.b = b - # - # self.test_only = [] - # - # def tearDown(self): - # os.remove(self.tempfilename) - def test_GATHERING(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = self.f.copy() self.assertEqual(len(f), 3) @@ -341,9 +227,6 @@ def test_GATHERING(self): self.assertTrue(b.equals(a, verbose=2)) def test_GATHERING_create(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - # Define the gathered values gathered_array = numpy.array( [[280, 282.5, 281], [279, 278, 277.5]], dtype="float32" @@ -424,6 +307,6 @@ def test_GATHERING_create(self): if __name__ == "__main__": print("Run date:", datetime.datetime.now()) - print(cf.environment()) + cf.environment() print() unittest.main(verbosity=2) diff --git a/cf/test/test_general.py b/cf/test/test_general.py index c84652bca2..9d5b191424 100644 --- a/cf/test/test_general.py +++ b/cf/test/test_general.py @@ -47,7 +47,7 @@ def test_GENERAL(self): c = cf.set([0, 3, 4, 5]) - _ = f == c + f == c # +, -, *, /, ** h = g.copy() @@ -83,8 +83,8 @@ def test_GENERAL(self): # Access the field's data as a numpy array g.array - g.item("latitude").array - g.item("longitude").array + g.construct("latitude").array + g.construct("longitude").array # Subspace the field g[..., 2:5].array diff --git a/cf/test/test_geometry.py b/cf/test/test_geometry.py index 29e007d72c..5230a6104a 100644 --- a/cf/test/test_geometry.py +++ b/cf/test/test_geometry.py @@ -1,6 +1,5 @@ import datetime import faulthandler -import inspect import os import tempfile import unittest @@ -39,26 +38,17 @@ def setUp(self): suffix=".nc", prefix="cf_", dir="." ) os.close(fd) - # self.tempfilename = 'delme.nc' - - self.test_only = [] - - # self.test_only = ['test_node_count'] - # self.test_only = ['test_geometry_interior_ring'] def tearDown(self): os.remove(self.tempfilename) def test_node_count(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.read(self.geometry_1_file, verbose=0) self.assertEqual(len(f), 2, "f = " + repr(f)) for g in f: self.assertTrue(g.equals(g.copy(), verbose=2)) - self.assertEqual(len(g.auxiliary_coordinates(view=True)), 2) + self.assertEqual(len(g.auxiliary_coordinates()), 2) g = f[0] for axis in ("X", "Y"): @@ -97,16 +87,13 @@ def test_node_count(self): self.assertFalse(c.has_node_count()) def test_geometry_2(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.read(self.geometry_2_file, verbose=0) self.assertEqual(len(f), 2, "f = " + repr(f)) for g in f: self.assertTrue(g.equals(g.copy(), verbose=2)) - self.assertEqual(len(g.auxiliary_coordinates(view=True)), 3) + self.assertEqual(len(g.auxiliary_coordinates()), 3) g = f[0] for axis in ("X", "Y", "Z"): @@ -134,16 +121,13 @@ def test_geometry_2(self): cf.write(f, self.tempfilename, verbose=0) def test_geometry_3(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.read(self.geometry_3_file, verbose=0) self.assertEqual(len(f), 2, "f = " + repr(f)) for g in f: self.assertTrue(g.equals(g.copy(), verbose=2)) - self.assertEqual(len(g.auxiliary_coordinates(view=True)), 3) + self.assertEqual(len(g.auxiliary_coordinates()), 3) g = f[0] for axis in ("X", "Y", "Z"): @@ -162,16 +146,13 @@ def test_geometry_3(self): self.assertTrue(a.equals(b, verbose=2)) def test_geometry_4(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.read(self.geometry_4_file, verbose=0) self.assertEqual(len(f), 2, "f = " + repr(f)) for g in f: self.assertTrue(g.equals(g.copy(), verbose=2)) - self.assertEqual(len(g.auxiliary_coordinates(view=True)), 3) + self.assertEqual(len(g.auxiliary_coordinates()), 3) for axis in ("X", "Y"): coord = g.construct("axis=" + axis) @@ -198,9 +179,6 @@ def test_geometry_4(self): cf.write(f, self.tempfilename, verbose=0) def test_geometry_interior_ring(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - for geometry_file in ( self.geometry_interior_ring_file, self.geometry_interior_ring_file_2, @@ -211,7 +189,7 @@ def test_geometry_interior_ring(self): for g in f: self.assertTrue(g.equals(g.copy(), verbose=2)) - self.assertEqual(len(g.auxiliary_coordinates(view=True)), 4) + self.assertEqual(len(g.auxiliary_coordinates()), 4) g = f[0] for axis in ("X", "Y"): @@ -240,7 +218,7 @@ def test_geometry_interior_ring(self): self.assertEqual(c.interior_ring.data.ndim, c.data.ndim + 1) self.assertEqual(c.interior_ring.data.shape[0], c.data.shape[0]) - _ = g.dump(display=False) + g.dump(display=False) d = c.insert_dimension(0) self.assertEqual(d.data.shape, (1,) + c.data.shape) @@ -298,9 +276,6 @@ def test_geometry_interior_ring(self): cf.write(f, self.tempfilename) def test_geometry_interior_ring_roll(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.read(self.geometry_interior_ring_file, verbose=0)[0] g = f.roll(0, 1) @@ -317,9 +292,6 @@ def test_geometry_interior_ring_roll(self): self.assertFalse(f.equals(h)) def test_geometry_interior_ring_flip(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.read(self.geometry_interior_ring_file, verbose=0)[0] g = f.flip(0) @@ -328,26 +300,17 @@ def test_geometry_interior_ring_flip(self): self.assertTrue(f.equals(h)) def test_geometry_interior_ring_flatten(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.read(self.geometry_interior_ring_file, verbose=0)[0] for i in (0, 1): self.assertTrue(f.equals(f.flatten(i), verbose=1)) def test_geometry_interior_ring_close(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.read(self.geometry_interior_ring_file, verbose=0)[0] self.assertIsNone(f.close()) def test_geometry_interior_ring_files(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.read(self.geometry_interior_ring_file, verbose=0)[0] self.assertTrue(isinstance(f.get_filenames(), set)) diff --git a/cf/test/test_groups.py b/cf/test/test_groups.py index 0900222a5a..48e40a682d 100644 --- a/cf/test/test_groups.py +++ b/cf/test/test_groups.py @@ -368,7 +368,7 @@ def test_groups_dimension(self): # ------------------------------------------------------------ # Move all coordinate bounds constructs to the /forecast group # ------------------------------------------------------------ - for construct in g.coordinates(view=True).values(): + for construct in g.coordinates().values(): try: construct.bounds.nc_set_variable_groups(["forecast"]) except ValueError: diff --git a/cf/test/test_read_write.py b/cf/test/test_read_write.py index 3e7a43b895..3426d133e6 100644 --- a/cf/test/test_read_write.py +++ b/cf/test/test_read_write.py @@ -367,14 +367,14 @@ def test_write_datatype(self): for single in (True, False): for double in (True, False): with self.assertRaises(Exception): - _ = cf.write(g, double=double, single=single) + cf.write(g, double=double, single=single) datatype = {numpy.dtype(float): numpy.dtype("float32")} with self.assertRaises(Exception): - _ = cf.write(g, datatype=datatype, single=True) + cf.write(g, datatype=datatype, single=True) with self.assertRaises(Exception): - _ = cf.write(g, datatype=datatype, double=True) + cf.write(g, datatype=datatype, double=True) def test_write_reference_datetime(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: @@ -479,7 +479,7 @@ def test_read_CDL(self): f0 = cf.read(self.filename)[0] f = cf.read(tmpfile)[0] - _ = cf.read(tmpfileh)[0] + cf.read(tmpfileh)[0] c = cf.read(tmpfilec)[0] self.assertTrue(f0.equals(f, verbose=2)) @@ -496,7 +496,7 @@ def test_read_CDL(self): ) with self.assertRaises(Exception): - _ = cf.read("test_read_write.py") + cf.read("test_read_write.py") def test_read_write_string(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: From 8e13ac3057b89feee1d0547e9756abcc904712af Mon Sep 17 00:00:00 2001 From: David Hassell Date: Thu, 8 Apr 2021 23:21:01 +0100 Subject: [PATCH 20/53] devs --- cf/aggregate.py | 379 ++++++++++++------------------------- cf/constructs.py | 14 +- cf/field.py | 151 ++++++++------- cf/mixin/coordinate.py | 26 +-- cf/mixin/properties.py | 11 +- cf/mixin/propertiesdata.py | 26 +-- cf/read_write/read.py | 6 - cf/read_write/um/umread.py | 35 +--- cf/test/test_aggregate.py | 93 +++++---- cf/test/test_read_write.py | 262 +++++++++---------------- 10 files changed, 369 insertions(+), 634 deletions(-) diff --git a/cf/aggregate.py b/cf/aggregate.py index c71f25c01b..06d043934f 100644 --- a/cf/aggregate.py +++ b/cf/aggregate.py @@ -7,6 +7,12 @@ from collections import namedtuple from operator import itemgetter +from cfdm import ( + is_log_level_info, + is_log_level_debug, + is_log_level_detail, +) + from .auxiliarycoordinate import AuxiliaryCoordinate from .domainaxis import DomainAxis from .fieldlist import FieldList @@ -26,13 +32,13 @@ _numpy_allclose, ) from .functions import rtol as cf_rtol, atol as cf_atol -from .functions import inspect as cf_inspect from .data.data import Data logger = logging.getLogger(__name__) + _dtype_float = numpy_dtype(float) # # -------------------------------------------------------------------- @@ -87,25 +93,6 @@ def __init__(self): self.flb = {} self.hash_to_array = {} - def inspect(self): - """Inspect the object for debugging. - - .. seealso:: `cf.inspect` - - :Returns: - - `None` - - **Examples:** - - >>> f.inspect() - - """ - print(cf_inspect(self)) - - -# --- End: class - class _Meta: """A summary of a field. @@ -245,6 +232,8 @@ def __init__( if field_identity: self.identity = f.get_property(field_identity, None) + construct_axes = f.constructs.data_axes() + # ------------------------------------------------------------ # # ------------------------------------------------------------ @@ -301,7 +290,7 @@ def __init__( # Dictionaries mapping auxiliary coordinate identifiers # to their auxiliary coordinate objects - aux_1d = f.auxiliary_coordinates(filter_by_naxes=(1,), todict=True) + auxs_1d = f.auxiliary_coordinates(filter_by_naxes=(1,), todict=True) # A set containing the identity of each coordinate # @@ -332,20 +321,9 @@ def __init__( # 'aux0', 'units': }] info_dim = [] - # dim_coord = item(axis) - # dim_coords = f.dimension_coordinates(view=True).filter_by_axis( - # axis, mode="and", view=True - # ) - dim_coords = f.dimension_coordinates( - filter_by_axis=(axis,), axis_mode="exact", todict=True + dim_coord_key, dim_coord = f.dimension_coordinate( + filter_by_axis=(axis,), item=True, default=(None, None) ) - if len(dim_coords) == 1: - dim_coord_key, dim_coord = dim_coords.popitem() - else: - dim_coord_key, dim_coord = None, None - - # dim_coord = dim_coords.value(None) - # dim_coord_key = dim_coords.key(None) dim_identity = None if dim_coord is not None: @@ -366,7 +344,7 @@ def __init__( info_dim.append( { "identity": dim_identity, - "key": dim_coord_key, # axis, + "key": dim_coord_key, "units": units, "hasdata": dim_coord.has_data(), "hasbounds": dim_coord.has_bounds(), @@ -376,11 +354,9 @@ def __init__( # 'size' : None}) # Find the 1-d auxiliary coordinates which span this axis - aux_coords = {} - for aux in tuple(aux_1d): # .keys(): - if axis in f.get_data_axes(aux): - aux_coords[aux] = aux_1d.pop(aux) - # --- End: for + aux_coords = {aux: auxs_1d.pop(aux) + for aux in tuple(auxs_1d) + if axis in construct_axes[aux]} info_aux = [] for key, aux_coord in aux_coords.items(): @@ -414,8 +390,6 @@ def __init__( "coordrefs": self.find_coordrefs(key), } ) - # 'size' : None}) - # --- End: for # Sort the 1-d auxiliary coordinate information info_aux.sort(key=itemgetter("identity")) @@ -455,7 +429,6 @@ def __init__( return else: ncdim = True - # --- End: if axis_identities = { "ids": "identity", @@ -484,7 +457,6 @@ def __init__( self.id_to_axis[identity] = axis self.axis_to_id[axis] = identity - # --- End: for # Create a sorted list of the axes' canonical identities # @@ -499,7 +471,7 @@ def __init__( filter_by_naxes=(gt(1),), todict=True ).items(): # Find axes' canonical identities - axes = [self.axis_to_id[axis] for axis in f.get_data_axes(key)] + axes = [self.axis_to_id[axis] for axis in construct_axes[key]] axes = tuple(sorted(axes)) # Find this N-d auxiliary coordinate's identity @@ -522,7 +494,6 @@ def __init__( "hasbounds": nd_aux_coord.has_bounds(), "coordrefs": self.find_coordrefs(key), } - # --- End: for # ------------------------------------------------------------ # Cell methods @@ -546,7 +517,7 @@ def __init__( ) # Find axes' canonical identities - axes = [self.axis_to_id[axis] for axis in f.get_data_axes(key)] + axes = [self.axis_to_id[axis] for axis in construct_axes[key]] #f.get_data_axes(key)] axes = tuple(sorted(axes)) self.field_anc[identity] = { @@ -576,18 +547,19 @@ def __init__( # Firstly process domain ancillaries which are used in # coordinate references for ref in f.coordinate_references(todict=True).values(): + ref_identity = ref.identity() for ( term, identifier, ) in ref.coordinate_conversion.domain_ancillaries().items(): - key = f.domain_ancillary(identifier, key=True, default=None) - if key is None: + key, anc = f.domain_ancillary( + identifier, item=True, default=(None, None) + ) + if anc is None: continue - anc = f.constructs[key] - # Set this domain ancillary's identity - identity = (ref.identity(), term) + identity = (ref_identity, term) identity = self.domain_ancillary_has_identity_and_data( anc, identity ) @@ -598,7 +570,7 @@ def __init__( ) # Find the canonical identities of the axes - axes = [self.axis_to_id[axis] for axis in f.get_data_axes(key)] + axes = [self.axis_to_id[axis] for axis in construct_axes[key]] axes = tuple(sorted(axes)) self.domain_anc[identity] = { @@ -610,7 +582,6 @@ def __init__( self.key_to_identity[key] = identity ancs_in_refs.append(key) - # --- End: for # Secondly process domain ancillaries which are not being used # in coordinate references @@ -629,7 +600,7 @@ def __init__( ) # Find the canonical identities of the axes - axes = [self.axis_to_id[axis] for axis in f.get_data_axes(key)] + axes = [self.axis_to_id[axis] for axis in construct_axes[key]] axes = tuple(sorted(axes)) self.domain_anc[identity] = { @@ -658,15 +629,18 @@ def __init__( self.field = self.field.copy() # copy as will delete msr f = self.field copied_field = True - f.del_construct(msr.identity()) - logger.info( - "Removed '{}' construct from a copy of input field {!r} " - "pre-aggregation because it is an external variable so it " - "is not possible to determine the influence the " - "aggregation process should have on it.".format( - msr.identity(), f.identity() + + f.del_construct(key) + + if is_log_level_info(logger): + logger.info( + f"Removed {msr.identity()!r} construct from a copy " + f"of input field {f.identity()!r} pre-aggregation " + "because it is an external variable so it " + "is not possible to determine the influence the " + "aggregation process should have on it." ) - ) + continue if not self.cell_measure_has_data_and_units(msr): @@ -682,7 +656,7 @@ def __init__( ) # Find axes' canonical identities - axes = [self.axis_to_id[axis] for axis in f.get_data_axes(key)] + axes = [self.axis_to_id[axis] for axis in construct_axes[key]] axes = tuple(sorted(axes)) if units in info_msr: @@ -690,13 +664,12 @@ def __init__( # have the same units and span the same axes. for value in info_msr[units]: if axes == value["axes"]: - self.message = "duplicate {0!r}".format(msr) + self.message = f"duplicate {msr!r}" return else: info_msr[units] = [] info_msr[units].append({"key": key, "axes": axes}) - # --- End: for # For each cell measure's canonical units, sort the # information by axis identities. @@ -753,12 +726,10 @@ def __init__( for p in properties if (equal and p not in eq) or (exist and p not in ex) ] - # --- End: if self.properties = tuple( sorted(ex_all + ex + list(eq_all.items()) + list(eq.items())) ) - # --- End: if # Attributes self.attributes = set(("file",)) @@ -785,8 +756,8 @@ def __bool__(self): def __repr__(self): """x.__repr__() <==> repr(x)""" - return "".format( - self.__class__.__name__, getattr(self, "field", None) + return ( + f"" ) def __str__(self): @@ -794,9 +765,7 @@ def __str__(self): strings = [] for attr in sorted(self.__dict__): strings.append( - "{}.{} = {!r}".format( - self.__class__.__name__, attr, getattr(self, attr) - ) + f"{self.__class__.__name__}.{attr} = {getattr(self, attr)!r}" ) return "\n".join(strings) @@ -811,8 +780,9 @@ def coordinate_values(self): return "\n".join(string) def copy(self): - """Replace the field associated with a summary class with a deep - copy.""" + """Replace the field associated with a summary class with a deep copy. + + """ new = _Meta.__new__(_Meta) new.__dict__ = self.__dict__.copy() new.field = new.field.copy() @@ -847,7 +817,6 @@ def canonical_units(self, variable, identity, relaxed_units=False): for u in _canonical_units[identity]: if var_units.equivalent(u): return u - # --- End: for # Still here? _canonical_units[identity].append(var_units) @@ -859,7 +828,6 @@ def canonical_units(self, variable, identity, relaxed_units=False): _canonical_units[identity] = [var_units] elif relaxed_units or variable.dtype.kind in ("S", "U"): var_units = _no_units - # --- End: if # Still here? return var_units @@ -885,7 +853,7 @@ def canonical_cell_methods(self, rtol=None, atol=None): # cms = getattr(self.field, 'CellMethods', None) # TODO if not cell_methods: - return None + return cms = [] for cm in cell_methods.values(): @@ -904,11 +872,9 @@ def canonical_cell_methods(self, rtol=None, atol=None): if not cm.equivalent(canonical_cm, rtol=rtol, atol=atol): equivalent = False break - # --- End: for if equivalent: return canonical_cms - # --- End: for # Still here? cms = tuple(cms) @@ -930,15 +896,11 @@ def cell_measure_has_data_and_units(self, msr): """ if not msr.Units: - self.message = "{0!r} cell measure has no units".format( - msr.identity() - ) + self.message = f"{msr.identity()!r} cell measure has no units" return if not msr.has_data(): - self.message = "{0!r} cell measure has no data".format( - msr.identity() - ) + self.message = f"{msr.identity()!r} cell measure has no data" return return True @@ -969,43 +931,26 @@ def coord_has_identity_and_data(self, coord, axes=None): default=None, ) - # if self.relaxed_identities and identity is not None: - # identity = identity.replace('long_name=', '', 1) - # identity = identity.replace('ncvar%', '', 1) - - if identity is None: - # Coordinate has no identity, but it may have a recognised - # axis. - for ctype in ("T", "X", "Y", "Z"): - if getattr(coord, ctype): - identity = ctype - break - # --- End: if - if identity is not None: all_coord_identities = self.all_coord_identities.setdefault( axes, set() ) if identity in all_coord_identities: - self.message = "multiple {0!r} coordinates".format(identity) - return None + self.message = f"multiple {identity!r} coordinates" + return if coord.has_data() or ( coord.has_bounds() and coord.bounds.has_data() ): all_coord_identities.add(identity) return identity - # --- End: if # Still here? - self.message = "{!r} has no identity or no data".format(coord) - - return None + self.message = f"{coord!r} has no identity or no data" def field_ancillary_has_identity_and_data(self, anc): - """Return a field ancillary's identity if it has one and has - data. + """Return a field ancillary's identity if it has one and has data. :Parameters: @@ -1014,42 +959,36 @@ def field_ancillary_has_identity_and_data(self, anc): :Returns: `str` or `None` - The coordinate construct's identity, or `None` if there is - no identity and/or no data. + The coordinate construct's identity, or `None` if + there is no identity and/or no data. """ identity = anc.identity( strict=self.strict_identities, relaxed=self.relaxed_identities, nc_only=self.ncvar_identities, + default=None ) if identity is not None: all_field_anc_identities = self.all_field_anc_identities if identity in all_field_anc_identities: - self.message = "multiple {0!r} field ancillaries".format( - identity - ) - - return None + self.message = f"multiple {identity!r} field ancillaries" + return if anc.has_data(): all_field_anc_identities.add(identity) return identity - # --- End: if # Still here? self.message = ( - "{0!r} field ancillary has no identity or " - "no data".format(anc.identity()) + f"{anc.identity()!r} field ancillary has no identity or " + "no data" ) - return None - def coordinate_reference_signatures(self, refs): - """List the structural signatures of given coordinate - references. + """List the structural signatures of given coordinate references. :Parameters: @@ -1058,7 +997,8 @@ def coordinate_reference_signatures(self, refs): :Returns: `list` - A structural signature of each coordinate reference object. + A structural signature of each coordinate reference + object. **Examples:** @@ -1075,21 +1015,18 @@ def coordinate_reference_signatures(self, refs): for signature in signatures: if signature[0] is None: self.messsage = ( - "{0!r} field can't be aggregated due " + f"{self.f.identity()!r} field can't be aggregated due " "to it having an unidentifiable " - "coordinate " - "reference".format(self.f.identity()) + "coordinate reference" ) return - # --- End: for signatures.sort() return signatures def domain_ancillary_has_identity_and_data(self, anc, identity=None): - """Return a domain ancillary's identity if it has one and has - data. + """Return a domain ancillary's identity if it has one and has data. :Parameters: @@ -1111,26 +1048,23 @@ def domain_ancillary_has_identity_and_data(self, anc, identity=None): strict=self.strict_identities, relaxed=self.relaxed_identities, nc_only=self.ncvar_identities, + default=None ) if anc_identity is None: - self.message = "{0!r} domain ancillary has no identity".format( - anc.identity() + self.message = ( + f"{anc.identity()!r} domain ancillary has no identity" ) return all_domain_anc_identities = self.all_domain_anc_identities if anc_identity in all_domain_anc_identities: - self.message = "multiple {0!r} domain ancillaries".format( - anc_identity - ) + self.message = f"multiple {anc.identity()!r} domain ancillaries" return if not anc.has_data(): - self.message = "{0!r} domain ancillary has no data".format( - anc.identity() - ) + self.message = f"{anc.identity()!r} domain ancillary has no data" return all_domain_anc_identities.add(anc_identity) @@ -1151,6 +1085,9 @@ def print_info(self, signature=True): `None` """ + if not is_log_level_detail(logger): + return + if signature: logger.detail( "STRUCTURAL SIGNATURE:\n" + self.string_structural_signature() @@ -1160,7 +1097,7 @@ def print_info(self, signature=True): "CANONICAL COORDINATES:\n" + self.coordinate_values() ) - logger.debug("COMPLETE AGGREGATION METADATA:\n{}".format(self)) + logger.debug(f"COMPLETE AGGREGATION METADATA:\n{self}") def string_structural_signature(self): """Return a multi-line string giving a field's structual @@ -1174,7 +1111,7 @@ def string_structural_signature(self): string = [] for key, value in self.signature._asdict().items(): - string.append("-> {0}: {1!r}".format(key, value)) + string.append(f"-> {key}: {value!r}") return "\n".join(string) @@ -1268,7 +1205,6 @@ def structural_signature(self): ) for identity in self.axis_ids ] - # signature_append(tuple(['Axes'] + x)) Axes = tuple(x) # Whether or not each axis has a dimension coordinate @@ -1295,10 +1231,6 @@ def structural_signature(self): ) for identity in sorted(nd_aux) ] - # if not x: - # x = [None]# - # - # signature_append(tuple(['N-d coordinates'] + x)) Nd_coordinates = tuple(x) # Cell measures @@ -1310,10 +1242,6 @@ def structural_signature(self): ) for units in sorted(msr) ] - # if not x: - # x = [None] - # - # signature_append(tuple(['Cell measures'] + x)) Cell_measures = tuple(x) # Domain ancillaries @@ -1329,10 +1257,6 @@ def structural_signature(self): ) for identity in sorted(domain_anc) ] - # if not x: - # x = [None] - # - # signature_append(tuple(['Domain ancillaries'] + x)) Domain_ancillaries = tuple(x) # Field ancillaries @@ -1348,10 +1272,6 @@ def structural_signature(self): ) for identity in sorted(field_anc) ] - # if not x: - # x = [None] - # - # signature_append(tuple(['Field ancillaries'] + x)) Field_ancillaries = tuple(x) self.signature = self._structural_signature( @@ -1396,7 +1316,7 @@ def find_coordrefs(self, key): coordrefs = self.coordrefs if not coordrefs: - return None + return # Select the coordinate references which contain a pointer to # this coordinate @@ -1405,14 +1325,11 @@ def find_coordrefs(self, key): ] if not names: - return None + return return tuple(sorted(names)) -# --- End: class - - @_manage_log_level_via_verbosity def aggregate( fields, @@ -1783,7 +1700,6 @@ def aggregate( key, type(value) ) ) - # --- End: for equal = properties["equal"] exist = properties["exist"] @@ -1837,11 +1753,11 @@ def aggregate( unaggregatable = True status = 1 - logger.info( - "Unaggregatable {0!r} has{1} been output: {2}".format( - f, exclude, meta.message + if is_log_level_info(logger): + logger.info( + f"Unaggregatable {f!r} has{exclude} been output: " + f"{meta.message}" ) - ) if not exclude: # This field does not have a structural signature, so @@ -1851,7 +1767,6 @@ def aggregate( output_fields_append(f) else: output_fields_append(f.copy()) - # --- End: if continue @@ -1860,7 +1775,6 @@ def aggregate( # list of fields with the same structural signature. # ------------------------------------------------------------ signatures.setdefault(meta.signature, []).append(meta) - # --- End: for # ================================================================ # 2. Within each group of fields with the same structural @@ -1957,6 +1871,7 @@ def aggregate( strict=strict_identities, relaxed=relaxed_identities, nc_only=ncvar_identities, + default=None, ) for identity, value in axis_items: if ( @@ -1965,7 +1880,6 @@ def aggregate( ): aggregating_axes.append(identity) break - # --- End: for _create_hash_and_first_values( meta, @@ -2011,21 +1925,21 @@ def aggregate( grouped_meta = _group_fields(meta, axis) if not grouped_meta: - logger.info( - "Unaggregatable {0!r} fields have{1} been output: " - "{2}".format( - meta[0].field.identity(), exclude, meta[0].message + if is_log_level_info(logger): + logger.info( + f"Unaggregatable {meta[0].field.identity()!r} fields " + f"have{exclude} been output: {meta[0].message}" ) - ) unaggregatable = True break if len(grouped_meta) == number_of_fields: - logger.debug( - "{0!r} fields can't be aggregated along their " - "{1!r} axis".format(meta[0].field.identity(), axis) - ) + if is_log_level_debug(logger): + logger.debug( + f"{meta[0].field.identity()!r} fields can't be " + f"aggregated along their {axis!r} axis" + ) continue # -------------------------------------------------------- @@ -2051,12 +1965,13 @@ def aggregate( if not _ok_coordinate_arrays( m, axis, overlap, contiguous, verbose ): - logger.info( - "Unaggregatable {!r} fields have{} been " - "output: {}".format( - m[0].field.identity(), exclude, m[0].message + if is_log_level_info(logger): + logger.info( + "Unaggregatable {!r} fields have{} been " + "output: {}".format( + m[0].field.identity(), exclude, m[0].message + ) ) - ) unaggregatable = True break @@ -2082,19 +1997,17 @@ def aggregate( # abandon all aggregations on the fields with # this structural signature, including those # already done. - logger.info( - "Unaggregatable {!r} fields have{} been " - "output: {}".format( - m1.field.identity(), exclude, m1.message - ) - ) - + if is_log_level_info(logger): + logger.info( + f"Unaggregatable {m1.field.identity()!r} " + f"fields have{exclude} been output: " + f"{m1.message}" + ) + unaggregatable = True break - # --- End: for m[:] = [m0] - # --- End: for if unaggregatable: break @@ -2106,7 +2019,6 @@ def aggregate( # aggregation along the next axis. # -------------------------------------------------------- meta = [m for gm in grouped_meta for m in gm] - # --- End: for # Add fields to the output list if unaggregatable: @@ -2119,8 +2031,6 @@ def aggregate( else: output_fields.extend((m.field for m in meta)) - # --- End: for - aggregate.status = status if status: @@ -2202,10 +2112,8 @@ def _create_hash_and_first_values( continue # Still here? - # dim_coord = m.field.dimension_coordinates( - # ).filter_by_axis(axis, mode="and", view=True) dim_coord = m.field.dimension_coordinate( - filter_by_axis=(axis,), axis_mode="exact", default=None + filter_by_axis=(axis,), default=None ) # Find the sort indices for this axis ... @@ -2336,18 +2244,15 @@ def _create_hash_and_first_values( # array0 = array[-1, ...].copy() # array0.sort() # m.last_bounds[identity] = array0 - # # --- End: if # # hash_values.append(hash_value) # # # Reinstate the coordinate's original units # coord.Units = coord_units - # --- End: for m_hash_values[identity] = hash_values m_first_values[identity] = first_values m_last_values[identity] = last_values - # --- End: for # ------------------------------------------------------------ # N-d auxiliary coordinates @@ -2402,7 +2307,6 @@ def _create_hash_and_first_values( h = (h,) aux["hash_value"] = h - # --- End: if # ------------------------------------------------------------ # Cell measures @@ -2442,7 +2346,6 @@ def _create_hash_and_first_values( hash_values.append((h,)) msr["hash_values"] = hash_values - # --- End: if # ------------------------------------------------------------ # Field ancillaries @@ -2482,7 +2385,6 @@ def _create_hash_and_first_values( ) anc["hash_value"] = (h,) - # --- End: if # ------------------------------------------------------------ # Domain ancillaries @@ -2561,10 +2463,8 @@ def _create_hash_and_first_values( # False, False, False, hfl_cache, rtol, atol) # # anc['hash_value'] = h - # --- End: if m.cell_values = True - # --- End: for def _get_hfl( @@ -2620,7 +2520,6 @@ def _get_hfl( if first_and_last_bounds: first, last = hfl_cache.flb.get(key, (None, None)) create_flb = first is None - # --- End: if if create_hash or create_fl or create_flb: # Change the data type if required @@ -2659,7 +2558,6 @@ def _get_hfl( hash_value = hash_value0 found_close = True break - # --- End: for if not found_close: hfl_cache.hash_to_array[hash_value] = array @@ -2667,7 +2565,6 @@ def _get_hfl( pass hfl_cache.hash[key] = hash_value - # --- End: if if create_fl: first = array.item(0) @@ -2679,7 +2576,6 @@ def _get_hfl( first = numpy_sort(array[0, ...]) last = numpy_sort(array[-1, ...]) hfl_cache.flb[key] = (first, last) - # --- End: if if first_and_last_values or first_and_last_bounds: return hash_value, first, last @@ -2716,8 +2612,6 @@ def _group_fields(meta, axis): def _hash_values(m): return sort_by_axis_ids(m.hash_values) - # --- End: def - meta.sort(key=_hash_values) # Create a new group of potentially aggregatable fields (which @@ -2739,7 +2633,6 @@ def _hash_values(m): if value != hash1[identity]: count += 1 a_identity = identity - # --- End: for hash0 = hash1 @@ -2768,7 +2661,6 @@ def _hash_values(m): # different data array values ok = False break - # --- End: for if not ok: groups_of_fields.append([m1]) @@ -2790,7 +2682,6 @@ def _hash_values(m): # data array values ok = False break - # --- End: for if not ok: groups_of_fields.append([m1]) @@ -2822,7 +2713,6 @@ def _hash_values(m): # fields which contains field1. # -------------------------------------------------------- groups_of_fields.append([m1]) - # --- End: for return groups_of_fields @@ -2846,8 +2736,6 @@ def _sorted_by_first_values(meta, axis): def _first_values(m): return sort_by_axis_ids(m.first_values) - # --- End: def - meta.sort(key=_first_values) @@ -2937,7 +2825,6 @@ def _ok_coordinate_arrays(meta, axis, overlap, contiguous, verbose=None): ) return - # --- End: if # else: # for m0, m1 in zip(meta[:-1], meta[1:]): @@ -2958,7 +2845,6 @@ def _ok_coordinate_arrays(meta, axis, overlap, contiguous, verbose=None): # m1_first_bounds[1], m0_last_bounds[1] # ) # return - # # --- End: if if contiguous: for m0, m1 in zip(meta[:-1], meta[1:]): @@ -2979,8 +2865,6 @@ def _ok_coordinate_arrays(meta, axis, overlap, contiguous, verbose=None): ) ) return - # --- End: if - # --- End: if else: # ------------------------------------------------------------ @@ -3007,7 +2891,6 @@ def _ok_coordinate_arrays(meta, axis, overlap, contiguous, verbose=None): ) return - # --- End: if # ---------------------------------------------------------------- # Still here? Then the aggregating axis does not overlap between @@ -3053,13 +2936,11 @@ def _aggregate_2_fields( # ---------------------------------------------------------------- # Map the axes of field1 to those of field0 # ---------------------------------------------------------------- - dim1_name_map = {} - for identity in m0.axis_ids: - dim1_name_map[m1.id_to_axis[identity]] = m0.id_to_axis[identity] + dim1_name_map = {m1.id_to_axis[identity]: m0.id_to_axis[identity] + for identity in m0.axis_ids} - dim0_name_map = {} - for axis1, axis0 in dim1_name_map.items(): - dim0_name_map[axis0] = axis1 + dim0_name_map = {axis0: axis1 + for axis1, axis0 in dim1_name_map.items()} # ---------------------------------------------------------------- # In each field, find the identifier of the aggregating axis. @@ -3111,11 +2992,7 @@ def _aggregate_2_fields( hash_value0 = aux0["hash_value"] hash_value1 = aux1["hash_value"] - # try: - # hash_value0.append(hash_value1) - # except AttributeError: aux0["hash_value"] = hash_value0 + hash_value1 - # --- End: for # Cell measures for units in m0.msr: @@ -3138,11 +3015,7 @@ def _aggregate_2_fields( ) ) - # try: - # hash_values0[i].append(hash_values1[i]) - # except AttributeError: hash_values0[i] = hash_values0[i] + hash_values1[i] - # --- End: for # Field ancillaries for identity in m0.field_anc: @@ -3157,11 +3030,8 @@ def _aggregate_2_fields( hash_value0 = anc0["hash_value"] hash_value1 = anc1["hash_value"] - # try: - # hash_value0.append(hash_value1) - # except AttributeError: + anc0["hash_value"] = hash_value0 + hash_value1 - # --- End: for # Domain ancillaries for identity in m0.domain_anc: @@ -3176,30 +3046,27 @@ def _aggregate_2_fields( hash_value0 = anc0["hash_value"] hash_value1 = anc1["hash_value"] - # try: - # hash_value0.append(hash_value1) - # except AttributeError: + anc0["hash_value"] = hash_value0 + hash_value1 - # --- End: for # ---------------------------------------------------------------- # For each matching pair of coordinates, cell measures, field and # domain ancillaries which span the aggregating axis, insert the # one from field1 into the one from field0 # ---------------------------------------------------------------- + construct_axes0 = field0.constructs.data_axes() + construct_axes1 = field1.constructs.data_axes() + for key0, key1, construct0, construct1 in spanning_variables: - construct_axes0 = field0.get_data_axes(key0) - construct_axes1 = field1.get_data_axes(key1) + axes0 = construct_axes0[key0] + axes1 = construct_axes1[key1] # Ensure that the axis orders are the same in both constructs - iaxes = [ - construct_axes1.index(dim0_name_map[axis0]) - for axis0 in construct_axes0 - ] + iaxes = [axes1.index(dim0_name_map[axis0]) for axis0 in axes0] construct1.transpose(iaxes, inplace=True) # Find the position of the concatenating axis - axis = construct_axes0.index(adim0) + axis = axes0.index(adim0) if direction0: # The fields are increasing along the aggregating axis @@ -3240,7 +3107,6 @@ def _aggregate_2_fields( _preserve=False, ) construct0.bounds.set_data(data) - # --- End: for # ---------------------------------------------------------------- # Insert the data array from field1 into the data array of field0 @@ -3256,13 +3122,11 @@ def _aggregate_2_fields( if axis0 not in data_axes0: field0.insert_dimension(axis0, position=0, inplace=True) data_axes0.insert(0, axis0) - # --- End: for for axis0 in data_axes0: axis1 = dim0_name_map[axis0] if axis1 not in data_axes1: field1.insert_dimension(axis1, position=0, inplace=True) - # --- End: for # Find the position of the concatenating axis if adim0 not in data_axes0: @@ -3274,7 +3138,8 @@ def _aggregate_2_fields( else: axis = data_axes0.index(adim0) - # Get the data axes again, in case we've inserted new dimensions + # Get the data axes again, in case we've inserted new + # dimensions data_axes0 = field0.get_data_axes() data_axes1 = field1.get_data_axes() @@ -3310,7 +3175,6 @@ def _aggregate_2_fields( # Insert the concatentated data into the field field0.set_data(data, set_axes=False, copy=False) - # --- End: if # Make sure that field0 has a standard_name, if possible. if getattr(field0, "id", None) is not None: @@ -3318,7 +3182,6 @@ def _aggregate_2_fields( if standard_name is not None: field0.set_property("standard_name", standard_name, copy=False) del field0.id - # --- End: if # ----------------------------------------------------------------- # Update the properties in field0 @@ -3355,7 +3218,6 @@ def _aggregate_2_fields( else: if value0 is not None: field0.del_property(prop) - # --- End: for # # ---------------------------------------------------------------- # # Update the attributes in field0 @@ -3379,7 +3241,6 @@ def _aggregate_2_fields( # m0.attributes.discard(attr) # if value0 is not None: # delattr(field0, attr) - # # --- End: for # Note that the field in this _Meta object has already been # aggregated diff --git a/cf/constructs.py b/cf/constructs.py index 0d8264a5bf..7321ccf985 100644 --- a/cf/constructs.py +++ b/cf/constructs.py @@ -166,21 +166,21 @@ def _filter_by_identity(self, arg, todict, _config, identities): """ # Allow keys without the 'key%' prefix - for n, identity in enumerate(identities): - if identity in self: - identities = list(identities) - identities[n] = "key%" + identity - break - + construct_types = self._construct_type + identities = ["key%" + i if i in construct_types else i + for i in identities] + ctypes = [i for i in "XTYZ" if i in identities] config = {"identities_kwargs": {"ctypes": ctypes}} + if ctypes: # Exclude a ctype from the short circuit test config["short_circuit_test"] = lambda x: ( x not in ctypes and self._short_circuit_test(x) ) - config.update(_config) + if _config: + config.update(_config) return super()._filter_by_identity(arg, todict, config, identities) diff --git a/cf/field.py b/cf/field.py index fe5b188c33..684965467d 100644 --- a/cf/field.py +++ b/cf/field.py @@ -99,6 +99,7 @@ # -------------------------------------------------------------------- # Commonly used units # -------------------------------------------------------------------- +_units_degrees = Units("degrees") _units_radians = Units("radians") _units_metres = Units("m") _units_1 = Units("1") @@ -15279,7 +15280,7 @@ def argmax(self, axis=None): # return out @_manage_log_level_via_verbosity - def autocyclic(self, verbose=None): + def autocyclic(self, key=None, coord=None, verbose=None): """Set dimensions to be cyclic. A dimension is set to be cyclic if it has a unique longitude (or @@ -15299,43 +15300,41 @@ def autocyclic(self, verbose=None): `bool` - **Examples:** - - >>> f.autocyclic() - """ - key, dim = self.dimension_coordinate( - "X", item=True, default=(None, None) - ) + if coord is None: + key, coord = self.dimension_coordinate( + "X", item=True, default=(None, None) + ) - if dim is None: + if coord is None: return False - if not dim.Units.islongitude: - if dim.get_property("standard_name", None) not in ( - "longitude", - "grid_longitude", - ): - self.cyclic(key, iscyclic=False) - return False - - bounds = dim.get_bounds(None) + bounds = coord.get_bounds(None) if bounds is None: self.cyclic(key, iscyclic=False) return False - - bounds_data = bounds.get_data(None, _fill_value=False) - if bounds_data is None: + + data = bounds.get_data(None, _fill_value=False) + if data is None: + self.cyclic(key, iscyclic=False) + return False + + units = bounds.Units + if units.islongitude: + period = Data(360.0, units="degrees_east") + elif units == _units_degrees: + period = Data(360.0, units="degrees") + else: self.cyclic(key, iscyclic=False) return False - bounds = bounds_data.array - - period = Data(360.0, units="degrees") + period.Units = data.Units - period.Units = bounds_data.Units +# diff = bounds.last_element() - bounds.first_element() - if abs(bounds[-1, -1] - bounds[0, 0]) != period.array: +# if abs(bounds[-1, -1] - bounds[0, 0]) != period.array: +# if abs(bounds.last_element() - bounds.first_element()) != period: #.array: + if abs(data.last_element() - data.first_element()) != period.array: self.cyclic(key, iscyclic=False) return False @@ -15813,18 +15812,19 @@ def auxiliary_coordinate( if c is not None: return c - da_key = self.domain_axis(*identity, key=True, default=None) - if da_key is not None: - return self._select_construct( - ("auxiliary_coordinate",), - "auxiliary_coordinate", - (), - key=key, - item=item, - default=default, - filter_by_axis=(da_key,), - axis_mode="exact", - ) + if identity: + da_key = self.domain_axis(*identity, key=True, default=None) + if da_key is not None: + return self._select_construct( + ("auxiliary_coordinate",), + "auxiliary_coordinate", + (), + key=key, + item=item, + default=default, + filter_by_axis=(da_key,), + axis_mode="exact", + ) if default is None: return default @@ -16456,19 +16456,20 @@ def coordinate( if c is not None: return c - da_key = self.domain_axis(*identity, key=True, default=None) - if da_key is not None: - return self._select_construct( - ("dimension_coordinate", "auxiliary_coordinate"), - "coordinate", - (), - key=key, - item=item, - default=default, - filter_by_axis=(da_key,), - axis_mode="exact", - ) - + if identity: + da_key = self.domain_axis(*identity, key=True, default=None) + if da_key is not None: + return self._select_construct( + ("dimension_coordinate", "auxiliary_coordinate"), + "coordinate", + (), + key=key, + item=item, + default=default, + filter_by_axis=(da_key,), + axis_mode="exact", + ) + if default is None: return default @@ -16822,18 +16823,19 @@ def dimension_coordinate( if c is not None: return c - da_key = self.domain_axis(*identity, key=True, default=None) - if da_key is not None: - return self._select_construct( - ("dimension_coordinate",), - "dimension_coordinate", - (), - key=key, - item=item, - default=default, - filter_by_axis=(da_key,), - axis_mode="exact", - ) + if identity: + da_key = self.domain_axis(*identity, key=True, default=None) + if da_key is not None: + return self._select_construct( + ("dimension_coordinate",), + "dimension_coordinate", + (), + key=key, + item=item, + default=default, + filter_by_axis=(da_key,), + axis_mode="exact", + ) if default is None: return None @@ -17350,10 +17352,6 @@ def set_construct( if construct_type == "dimension_coordinate": data_axes = self.constructs.data_axes() - # dimension_coordinates = self.dimension_coordinates(todict=True) - # for dim, dim_axes in tuple( - # dimension_coordinates.data_axes().items() - # ): for dim in self.dimension_coordinates(todict=True): if dim == key: continue @@ -17366,7 +17364,7 @@ def set_construct( if construct_type == "dimension_coordinate": construct.autoperiod(inplace=True) self._conform_coordinate_references(out) - self.autocyclic() + self.autocyclic(key=out, coord=construct) self._conform_cell_methods() elif construct_type == "auxiliary_coordinate": @@ -17453,13 +17451,24 @@ def get_data_axes(self, identity=None, default=ValueError()): """ if identity is None: + # Get axes of Field data array return super().get_data_axes(default=default) + axes = super().get_data_axes(identity, default=None) + if axes is not None: + return axes + key = self.construct_key(identity, default=None) - if key is None: - return self.construct_key(identity, default=default) + if key is not None: + return super().get_data_axes(key=key, default=default) - return super().get_data_axes(key=key, default=default) + if default is None: + return default + + return self._default( + default, + f"Can't get axes for non-existent construct {identify!r}" + ) @_inplace_enabled(default=False) @_manage_log_level_via_verbosity diff --git a/cf/mixin/coordinate.py b/cf/mixin/coordinate.py index 41299b888a..632531bfac 100644 --- a/cf/mixin/coordinate.py +++ b/cf/mixin/coordinate.py @@ -50,7 +50,7 @@ def ctype(self): if self.Y: return "Y" - + if self.Z: return "Z" @@ -492,7 +492,9 @@ def identity( 'no identity' """ - out = super().identity(default=None) + out = super().identity(strict=strict, + relaxed=relaxed, + nc_only=nc_only, default=None) if out is not None: return out @@ -502,7 +504,7 @@ def identity( return default - def identities(self, generator=False, ctypes="XTYZ", **kwargs): + def identities(self, generator=False, ctypes="XTYZ"): """Return all possible identities. The identities comprise: @@ -566,14 +568,6 @@ def identities(self, generator=False, ctypes="XTYZ", **kwargs): 'ncvar%tas'] """ - - def _ctypes_iter(self, ctypes): - for c in ctypes: - if getattr(self, c): - # This coordinate constructs is of this type - yield c - break - identities = super().identities(generator=True) g = chain(identities, _ctypes_iter(self, ctypes)) @@ -581,3 +575,13 @@ def _ctypes_iter(self, ctypes): return g return list(g) + + +def _ctypes_iter(coord, ctypes): + """Generator for returning the coordinate type letter.""" + for c in ctypes: + if getattr(coord, c): + # This coordinate construct is of this type + yield c + return + diff --git a/cf/mixin/properties.py b/cf/mixin/properties.py index fc3472b11c..8fd8a743fc 100644 --- a/cf/mixin/properties.py +++ b/cf/mixin/properties.py @@ -123,18 +123,17 @@ def id(self): Note that `id` is not a CF property and so is not read from, nor written to, datasets. - .. seealso:: `identity`, `identities`, `match_by_identity` + .. seealso:: `identity`, `identities` **Examples:** - >>> f.id = 'um01002' + >>> f = {{package}}.{{class}}() + >>> f.id = "foo" >>> f.id - 'um01002' - >>> f.match_by_identity('id%um10002') - True + 'foo' >>> del f.id - """ + """ try: return self._custom["id"] except KeyError: diff --git a/cf/mixin/propertiesdata.py b/cf/mixin/propertiesdata.py index ab120039a4..691e1c8dcc 100644 --- a/cf/mixin/propertiesdata.py +++ b/cf/mixin/propertiesdata.py @@ -1878,30 +1878,22 @@ def period(self, *value): if value is not None: value = Data.asdata(value) - units = value.Units - if not units: - value = value.override_units(self.Units) - elif units != self.Units: - if units.equivalent(self.Units): - value.Units = self.Units + value_units = value.Units + units = self.Units + if not value_units: + value = value.override_units(units) + elif value_units != units: + if value_units.equivalent(units): + value.Units = units else: raise ValueError( - f"Period units {units!r} are not equivalent to data " - f"units {self.Units!r}" + f"Period units {value_units!r} are not " + f"equivalent to data units {units!r}" ) value = abs(value) value.dtype = float - # array = self.array - # r = abs(array[-1] - array[0]) - # - # if r >= value.datum(0): - # raise ValueError( - # "The data range of {!r} is not less than the " - # "period of {!r}".format(r, value) - # ) - self._custom["period"] = value return old diff --git a/cf/read_write/read.py b/cf/read_write/read.py index 79f578adf9..daf36380c2 100644 --- a/cf/read_write/read.py +++ b/cf/read_write/read.py @@ -971,12 +971,6 @@ def _read_a_file( else: fields = () - # ---------------------------------------------------------------- - # Check for cyclic dimensions - # ---------------------------------------------------------------- - for f in fields: - f.autocyclic() - # ---------------------------------------------------------------- # Return the fields # ---------------------------------------------------------------- diff --git a/cf/read_write/um/umread.py b/cf/read_write/um/umread.py index c58dc23cef..f584a27f9e 100644 --- a/cf/read_write/um/umread.py +++ b/cf/read_write/um/umread.py @@ -633,7 +633,6 @@ def __init__( # records. split_group = True nz = 1 - # --- End: if if split_group: # This group doesn't form a complete nz x nt @@ -648,10 +647,8 @@ def __init__( groups2.append(group) groups_nz.append(nz) groups_nt.append(group_size / nz) - # --- End: for groups = groups2 - # --- End: if rec0 = groups[0][0] @@ -821,7 +818,6 @@ def __init__( self.cf_info = cf_info break - # --- End: if if stash: section, item = divmod(stash, 1000) @@ -864,12 +860,6 @@ def __init__( extra = recs[0].get_extra_data() self.extra = extra - # -------------------------------------------------------- - # Set some derived metadata quantities - # -------------------------------------------------------- - logger.detail(self.__dict__) # pragma: no cover - self.printfdr() # pragma: no cover - # -------------------------------------------------------- # Create the 'T' dimension coordinate # -------------------------------------------------------- @@ -922,7 +912,6 @@ def __init__( ) else: ykey, yc = self.xy_coordinate(axiscode, "y") - # --- End: if # -------------------------------------------------------- # Create the 'X' dimension coordinate @@ -944,7 +933,6 @@ def __init__( ) else: xkey, xc = self.xy_coordinate(axiscode, "x") - # --- End: if # -10: rotated latitude (not an official axis code) # -11: rotated longitude (not an official axis code) @@ -1443,7 +1431,6 @@ def create_cell_methods(self): cell_methods.append(cf_info["where"]) if "over" in cf_info: cell_methods.append(cf_info["over"]) - # --- End: if if LBPROC == 64: cell_methods.append("x: mean") @@ -1477,7 +1464,6 @@ def create_cell_methods(self): elif LBTIM_IB == 3: cell_methods.append(axis + ": mean within years") cell_methods.append(axis + ": mean over years") - # --- End: if if not cell_methods: return [] @@ -1593,7 +1579,6 @@ def coord_positive(self, c, axiscode, domain_axis_key): c.positive = positive if positive == "down" and axiscode != 4: self.down_axes.add(domain_axis_key) - # --- End: if return c @@ -1866,7 +1851,6 @@ def create_data(self): rec.hdr_offset, location ) ) # pragma: no cover - # --- End: for # Populate the 1-d partition matrix matrix = numpy_array(partitions, dtype=object) @@ -1925,12 +1909,10 @@ def create_data(self): logger.info( " location = {}".format(location) ) # pragma: no cover - # --- End: for # Populate the 2-d partition matrix matrix = numpy_array(partitions, dtype=object) matrix.resize(pmshape) - # --- End: if data_axes = pmaxes + data_axes @@ -1949,7 +1931,6 @@ def create_data(self): data._size = data_size data.partitions = PartitionMatrix(matrix, pmaxes) data.dtype = numpy_result_type(*file_data_types) - # --- End: if self.data = data self.data_axes = data_axes @@ -2001,7 +1982,6 @@ def decode_lbexp(self): index = int(bits[i : i + 6], 2) if index < _n_characters: runid.append(_characters[index]) - # --- End: for runid = "".join(runid) @@ -2053,7 +2033,6 @@ def dtime(self, rec): _cached_date2num[key] = time except ValueError: time = numpy_nan # ppp - # --- End: if return time @@ -2085,7 +2064,6 @@ def fdr(self): out.append("EXTRA DATA:") for key in sorted(self.extra): out.append("{0}: {1}".format(key, str(self.extra[key]))) - # --- End: if out.append("file: " + self.filename) out.append( @@ -2134,7 +2112,6 @@ def latitude_longitude_2d_aux_coordinates(self, yc, xc): atol = self.atol if abs(BDX) >= atol and abs(BDY) >= atol: _cached_latlon[cache_key] = (lat, lon) - # --- End: if if xc.has_bounds() and yc.has_bounds(): # TODO push to implementation cache_key = ("bounds",) + cache_key @@ -2295,7 +2272,6 @@ def data_type_in_file(self, rec): # else: # # Float # data_type = 'float%d' % (rec_file.word_size * 8) - # # --- End: if # # return numpy_dtype(data_type) @@ -2686,7 +2662,6 @@ def vtime(self, rec): _cached_date2num[key] = time except ValueError: time = numpy_nan # ppp - # --- End: if return time @@ -2760,8 +2735,6 @@ def vtime(self, rec): # pubattr={"axis": None}, # dimensions=[xdim], # ) # DCH xdim? - # # --- End: if - # # --- End: for def unrotated_latlon(self, rotated_lat, rotated_lon, pole_lat, pole_lon): """Create 2-d arrays of unrotated latitudes and longitudes. @@ -3057,7 +3030,6 @@ def z_reference_coordinate(self, axiscode): _cached_z_reference_coordinate[key] = dc copy = False - # --- End: if self.implementation.set_dimension_coordinate( self.field, dc, axes=[_axis["z"]], copy=copy @@ -3066,8 +3038,6 @@ def z_reference_coordinate(self, axiscode): return dc -# --- End: class - # _stash2standard_name = {} # # def load_stash2standard_name(table=None, delimiter='!', merge=True): @@ -3188,7 +3158,6 @@ def z_reference_coordinate(self, axiscode): # stash2sn[key] += line # else: # stash2sn[key] = line -# # --- End: for # # if not merge: # _stash2standard_name.clear() @@ -3388,9 +3357,7 @@ def file_open(self, filename): ) -# --- End: class - - + """ Problems: diff --git a/cf/test/test_aggregate.py b/cf/test/test_aggregate.py index 9c4cab64ad..6559bbd817 100644 --- a/cf/test/test_aggregate.py +++ b/cf/test/test_aggregate.py @@ -176,66 +176,65 @@ def test_aggregate_exist_equal_ignore_opts(self): cf.chunksize(self.original_chunksize) def test_aggregate_verbosity(self): - for chunksize in self.chunk_sizes: - f0 = cf.example_field(0) - f1 = cf.example_field(1) - - detail_header = "DETAIL:cf.aggregate:STRUCTURAL SIGNATURE:" - debug_header = "DEBUG:cf.aggregate:COMPLETE AGGREGATION METADATA:" - - # 'DEBUG' (-1) verbosity should output both log message headers... - with self.assertLogs(level="NOTSET") as catch: - cf.aggregate([f0, f1], verbose=-1) - for header in (detail_header, debug_header): - self.assertTrue( - any( - log_item.startswith(header) - for log_item in catch.output - ), - "No log entry begins with '{}'".format(header), - ) - - # ...but with 'DETAIL' (3), should get only the detail-level one. - with self.assertLogs(level="NOTSET") as catch: - cf.aggregate([f0, f1], verbose=3) + f0 = cf.example_field(0) + f1 = cf.example_field(1) + + detail_header = "DETAIL:cf.aggregate:STRUCTURAL SIGNATURE:" + debug_header = "DEBUG:cf.aggregate:COMPLETE AGGREGATION METADATA:" + + # 'DEBUG' (-1) verbosity should output both log message headers... + with self.assertLogs(level="NOTSET") as catch: + cf.aggregate([f0, f1], verbose=-1) + for header in (detail_header, debug_header): self.assertTrue( any( - log_item.startswith(detail_header) + log_item.startswith(header) for log_item in catch.output ), - "No log entry begins with '{}'".format(detail_header), + "No log entry begins with '{}'".format(header), ) + + # ...but with 'DETAIL' (3), should get only the detail-level one. + with self.assertLogs(level="NOTSET") as catch: + cf.aggregate([f0, f1], verbose=3) + self.assertTrue( + any( + log_item.startswith(detail_header) + for log_item in catch.output + ), + "No log entry begins with '{}'".format(detail_header), + ) + self.assertFalse( + any( + log_item.startswith(debug_header) + for log_item in catch.output + ), + "A log entry begins with '{}' but should not".format( + debug_header + ), + ) + + # and neither should emerge at the 'WARNING' (1) level. + with self.assertLogs(level="NOTSET") as catch: + logger.warning( + "Dummy message to log something at warning level so that " + "'assertLog' does not error when no logs messages emerge." + ) + # Note: can use assertNoLogs in Python 3.10 to avoid this, see: + # https://bugs.python.org/issue39385 + + cf.aggregate([f0, f1], verbose=1) + for header in (detail_header, debug_header): self.assertFalse( any( - log_item.startswith(debug_header) + log_item.startswith(header) for log_item in catch.output ), "A log entry begins with '{}' but should not".format( - debug_header + header ), ) - # and neither should emerge at the 'WARNING' (1) level. - with self.assertLogs(level="NOTSET") as catch: - logger.warning( - "Dummy message to log something at warning level so that " - "'assertLog' does not error when no logs messages emerge." - ) - # Note: can use assertNoLogs in Python 3.10 to avoid this, see: - # https://bugs.python.org/issue39385 - - cf.aggregate([f0, f1], verbose=1) - for header in (detail_header, debug_header): - self.assertFalse( - any( - log_item.startswith(header) - for log_item in catch.output - ), - "A log entry begins with '{}' but should not".format( - header - ), - ) - if __name__ == "__main__": print("Run date:", datetime.datetime.now()) diff --git a/cf/test/test_read_write.py b/cf/test/test_read_write.py index 3426d133e6..e13b685730 100644 --- a/cf/test/test_read_write.py +++ b/cf/test/test_read_write.py @@ -56,24 +56,12 @@ class read_writeTest(unittest.TestCase): ) chunk_sizes = (100000, 300) - original_chunksize = cf.chunksize() - - test_only = [] - # test_only = ['NOTHING!!!!!'] - # test_only = ['test_write_filename'] - # test_only = ['test_read_write_unlimited'] - # test_only = ['test_write_datatype'] - # test_only = ['test_read_directory'] - # test_only = ['test_read_string'] - # test_only = ['test_read_write_netCDF4_compress_shuffle'] + f0 = cf.example_field(0) + f1 = cf.example_field(1) + def test_write_filename(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - - tmpfiles.append(tmpfile) - - f = cf.example_field(0) + f = self.f0 a = f.array cf.write(f, tmpfile) @@ -85,10 +73,7 @@ def test_write_filename(self): self.assertTrue((a == g[0].array).all()) def test_read_mask(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - - f = cf.example_field(0) + f = self.f0.copy() N = f.size @@ -123,9 +108,6 @@ def test_read_mask(self): self.assertEqual(numpy.ma.count(g.data.array), N - 2) def test_read_directory(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - pwd = os.getcwd() + "/" dir = "dir_" + inspect.stack()[0][3] @@ -135,7 +117,7 @@ def test_read_directory(self): except FileExistsError: pass except Exception: - raise ValueError("Can not mkdir {}{}".format(pwd, dir)) + raise ValueError(f"Can not mkdir {pwd}{dir}") f = "test_file2.nc" try: @@ -149,7 +131,7 @@ def test_read_directory(self): except FileExistsError: pass except Exception: - raise ValueError("Can not mkdir {}{}".format(pwd, subdir)) + raise ValueError(f"Can not mkdir {pwd}{subdir}") for f in ("test_file3.nc", "test_file.nc"): try: @@ -161,38 +143,32 @@ def test_read_directory(self): self.assertEqual(len(f), 1, f) f = cf.read(dir, recursive=True, aggregate=False) - self.assertEqual(len(f), 3, f) + self.assertEqual(len(f), 3) f = cf.read([dir, subdir], aggregate=False) - self.assertEqual(len(f), 3, f) + self.assertEqual(len(f), 3) f = cf.read([subdir, dir], aggregate=False) - self.assertEqual(len(f), 3, f) + self.assertEqual(len(f), 3) f = cf.read([dir, subdir], recursive=True, aggregate=False) - self.assertEqual(len(f), 5, f) + self.assertEqual(len(f), 5) f = cf.read(subdir, aggregate=False) - self.assertEqual(len(f), 2, f) + self.assertEqual(len(f), 2) f = cf.read(subdir, recursive=True, aggregate=False) - self.assertEqual(len(f), 2, f) + self.assertEqual(len(f), 2) shutil.rmtree(dir) def test_read_select(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - # select on field list f = cf.read(self.filename, select="eastward_wind") g = cf.read(self.filename) self.assertTrue(f.equals(g, verbose=2), "Bad read with select keyword") def test_read_squeeze(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - # select on field list cf.read(self.filename, squeeze=True) cf.read(self.filename, unsqueeze=True) @@ -200,17 +176,11 @@ def test_read_squeeze(self): cf.read(self.filename, unsqueeze=True, squeeze=True) def test_read_aggregate(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - cf.read(self.filename, aggregate=True) cf.read(self.filename, aggregate=False) cf.read(self.filename, aggregate={}) def test_read_extra(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - # Test field keyword of cf.read filename = self.filename @@ -262,85 +232,66 @@ def test_read_extra(self): self.assertEqual(len(f), 15, "\n" + str(f)) def test_read_write_format(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - + cf.write(self.f1, tmpfile) + for chunksize in self.chunk_sizes: - cf.chunksize(chunksize) - for fmt in ( - "NETCDF3_CLASSIC", - "NETCDF3_64BIT", - "NETCDF3_64BIT_OFFSET", - "NETCDF3_64BIT_DATA", - "NETCDF4", - "NETCDF4_CLASSIC", - "CFA", - ): - # print (fmt, string) - f = cf.read(self.filename)[0] - f0 = f.copy() - cf.write(f, tmpfile, fmt=fmt) - g = cf.read(tmpfile, verbose=0) - self.assertEqual(len(g), 1, "g = " + repr(g)) - g0 = g[0] - - self.assertTrue( - f0.equals(g0, verbose=1), - "Bad read/write of format {!r}".format(fmt), - ) + with cf.chunksize(chunksize): + for fmt in ( + "NETCDF3_CLASSIC", + "NETCDF3_64BIT", + "NETCDF3_64BIT_OFFSET", + "NETCDF3_64BIT_DATA", + "NETCDF4", + "NETCDF4_CLASSIC", + "CFA", + ): + f = cf.read(tmpfile)[0] + + cf.write(f, tmpfile2, fmt=fmt) + g = cf.read(tmpfile2, verbose=0) + self.assertEqual(len(g), 1) + g = g[0] + + self.assertTrue( + f.equals(g, verbose=1), + f"Bad read/write of format {fmt!r}" + ) def test_read_write_netCDF4_compress_shuffle(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - - tmpfiles.append(tmpfile) - for chunksize in self.chunk_sizes: - cf.chunksize(chunksize) - f = cf.read(self.filename)[0] - for fmt in ("NETCDF4", "NETCDF4_CLASSIC", "CFA4"): - for shuffle in (True,): - for compress in (1,): # range(10): - cf.write( - f, - tmpfile, - fmt=fmt, - compress=compress, - shuffle=shuffle, - ) - g = cf.read(tmpfile)[0] - self.assertTrue( - f.equals(g, verbose=2), - "Bad read/write with lossless compression: " - "{0}, {1}, {2}".format(fmt, compress, shuffle), - ) - - cf.chunksize(self.original_chunksize) + with cf.chunksize(chunksize): + f = cf.read(self.filename)[0] + for fmt in ("NETCDF4", "NETCDF4_CLASSIC", "CFA4"): + cf.write( + f, + tmpfile, + fmt=fmt, + compress=1, + shuffle=True, + ) + g = cf.read(tmpfile)[0] + self.assertTrue( + f.equals(g, verbose=2), + f"Bad read/write with lossless compression: {fmt}" + ) def test_write_datatype(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - - tmpfiles.append(tmpfile) - for chunksize in self.chunk_sizes: - cf.chunksize(chunksize) - f = cf.read(self.filename)[0] - self.assertEqual(f.dtype, numpy.dtype(float)) - cf.write( - f, - tmpfile, - fmt="NETCDF4", - datatype={numpy.dtype(float): numpy.dtype("float32")}, - ) - g = cf.read(tmpfile)[0] - self.assertEqual( - g.dtype, - numpy.dtype("float32"), - "datatype read in is " + str(g.dtype), - ) - - cf.chunksize(self.original_chunksize) + with cf.chunksize(chunksize): + f = cf.read(self.filename)[0] + self.assertEqual(f.dtype, numpy.dtype(float)) + cf.write( + f, + tmpfile, + fmt="NETCDF4", + datatype={numpy.dtype(float): numpy.dtype("float32")}, + ) + g = cf.read(tmpfile)[0] + self.assertEqual( + g.dtype, + numpy.dtype("float32"), + "datatype read in is " + str(g.dtype), + ) # Keyword single f = cf.read(self.filename)[0] @@ -353,8 +304,6 @@ def test_write_datatype(self): "datatype read in is " + str(g.dtype), ) - tmpfiles.append(tmpfile2) - # Keyword double f = g self.assertEqual(f.dtype, numpy.dtype("float32")) @@ -377,72 +326,44 @@ def test_write_datatype(self): cf.write(g, datatype=datatype, double=True) def test_write_reference_datetime(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - for reference_datetime in ("1751-2-3", "1492-12-30"): - for chunksize in self.chunk_sizes: - cf.chunksize(chunksize) - f = cf.read(self.filename)[0] - t = cf.DimensionCoordinate( - data=cf.Data([123], "days since 1750-1-1") - ) - - t.standard_name = "time" - axisT = f.set_construct(cf.DomainAxis(1)) - f.set_construct(t, axes=[axisT]) - cf.write( - f, - tmpfile, - fmt="NETCDF4", - reference_datetime=reference_datetime, - ) - g = cf.read(tmpfile)[0] - t = g.dimension_coordinate("T") - self.assertEqual( - t.Units, - cf.Units("days since " + reference_datetime), - ( - "Units written were " - + repr(t.Units.reftime) - + " not " - + repr(reference_datetime) - ), - ) - - cf.chunksize(self.original_chunksize) + cf.write(self.f0, tmpfile, reference_datetime=reference_datetime) + + g = cf.read(tmpfile)[0] + + t = g.dimension_coordinate("T") + self.assertEqual( + t.Units, + cf.Units("days since " + reference_datetime), + f"Units written were {t.Units.reftime!r} not " + f"{reference_datetime!r}" + ) def test_read_write_unlimited(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - for fmt in ("NETCDF4", "NETCDF3_CLASSIC"): - f = cf.read(self.filename)[0] - domain_axes = f.domain_axes(view=True) + f = self.f1.copy() + domain_axes = f.domain_axes() domain_axes["domainaxis0"].nc_set_unlimited(True) cf.write(f, tmpfile, fmt=fmt) f = cf.read(tmpfile)[0] - domain_axes = f.domain_axes(view=True) + domain_axes = f.domain_axes() self.assertTrue(domain_axes["domainaxis0"].nc_is_unlimited()) fmt = "NETCDF4" - f = cf.read(self.filename)[0] - domain_axes = f.domain_axes(view=True) + f = self.f1.copy() + domain_axes = f.domain_axes() domain_axes["domainaxis0"].nc_set_unlimited(True) domain_axes["domainaxis2"].nc_set_unlimited(True) cf.write(f, tmpfile, fmt=fmt) f = cf.read(tmpfile)[0] - domain_axes = f.domain_axes(view=True) + domain_axes = f.domain_axes() self.assertTrue(domain_axes["domainaxis0"].nc_is_unlimited()) self.assertTrue(domain_axes["domainaxis2"].nc_is_unlimited()) def test_read_pp(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - p = cf.read("wgdos_packed.pp")[0] p0 = cf.read( "wgdos_packed.pp", @@ -458,9 +379,6 @@ def test_read_pp(self): self.assertTrue(p.equals(p0, verbose=2)) def test_read_CDL(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - subprocess.run( " ".join(["ncdump", self.filename, ">", tmpfile]), shell=True, @@ -499,15 +417,11 @@ def test_read_CDL(self): cf.read("test_read_write.py") def test_read_write_string(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.read(self.string_filename) n = int(len(f) / 2) - for i in range(0, n): - + for i in range(n): j = i + n self.assertTrue( f[i].data.equals(f[j].data, verbose=1), @@ -518,22 +432,18 @@ def test_read_write_string(self): "{!r} {!r}".format(f[j], f[i]), ) - f0 = cf.read(self.string_filename) for string0 in (True, False): for fmt0 in ("NETCDF4", "NETCDF3_CLASSIC"): - cf.write(f0, tmpfile0, fmt=fmt0, string=string0) + cf.write(f, tmpfile0, fmt=fmt0, string=string0) for string1 in (True, False): for fmt1 in ("NETCDF4", "NETCDF3_CLASSIC"): - cf.write(f0, tmpfile1, fmt=fmt1, string=string1) + cf.write(f, tmpfile1, fmt=fmt1, string=string1) for i, j in zip(cf.read(tmpfile1), cf.read(tmpfile0)): self.assertTrue(i.equals(j, verbose=1)) def test_read_broken_bounds(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return - f = cf.read(self.broken_bounds, verbose=0) self.assertEqual(len(f), 2) From d54938c450ef07e0ee9ffd5240d283b432f3571b Mon Sep 17 00:00:00 2001 From: David Hassell Date: Fri, 9 Apr 2021 00:42:31 +0100 Subject: [PATCH 21/53] devs --- cf/data/data.py | 4 ++-- cf/field.py | 20 +++++++++++------ cf/functions.py | 5 ----- cf/regrid.py | 3 +-- cf/test/test_Regrid.py | 51 +++++++++++++++++++++++++----------------- 5 files changed, 46 insertions(+), 37 deletions(-) diff --git a/cf/data/data.py b/cf/data/data.py index 7bd0c455f2..414137ed35 100644 --- a/cf/data/data.py +++ b/cf/data/data.py @@ -11024,7 +11024,7 @@ def equals( config = self.partition_configuration(readonly=True) other.to_memory() - + import numpy for partition in self.partitions.matrix.flat: partition.open(config) array0 = partition.array @@ -11032,7 +11032,7 @@ def equals( partition.close() if not _numpy_allclose( - array0, array1, rtol=float(rtol), atol=float(atol) + array0, array1, rtol=float(rtol), atol=float(atol) ): logger.info( "{0}: Different array values (atol={1}, " diff --git a/cf/field.py b/cf/field.py index 684965467d..4a7c1ecd4d 100644 --- a/cf/field.py +++ b/cf/field.py @@ -3685,21 +3685,23 @@ def _regrid_get_regridded_data( """ if method in conservative_regridding_methods: - frac = dstfracfield.data[...].copy() + frac = dstfracfield.data.copy() if fracfield: regridded_data = frac else: frac[frac == 0.0] = 1.0 regridded_data = numpy_ma_MaskedArray( - dstfield.data[...].copy() / frac, + dstfield.data / frac, mask=(dstfield.data == self.fill_value(default="netCDF")), ) else: + print (99999, dstfield.data.max(), self.fill_value(default="netCDF")) + mask = dstfield.data == self.fill_value(default="netCDF") + print (mask.sum()) regridded_data = numpy_ma_MaskedArray( - dstfield.data[...].copy(), - mask=(dstfield.data == self.fill_value(default="netCDF")), - ) - + dstfield.data.copy(), + mask=mask) + return regridded_data def _regrid_update_coordinate_references( @@ -19998,13 +20000,16 @@ def regrids( # Data object. Note that the reshape is necessary to # replace any size 1 dimensions that we squeezed out # earlier. + print ('DDD', type(regridded_data)) sections[k] = Data( regridded_data.transpose(src_order).reshape(shape), units=self.Units, ) # Construct new data from regridded sections + print (sections) new_data = Data.reconstruct_sectioned_data(sections) + print ('A@ 0', type(new_data.array)) # Construct new field. # Note: cannot call `_inplace_enabled_define_and_cleanup(self)` to @@ -20049,6 +20054,7 @@ def regrids( f._regrid_copy_coordinate_references(dst, dst_axis_keys) # Insert regridded data into new field + print ('A@', type(new_data.array)) f.set_data(new_data, axes=self.get_data_axes(), copy=False) # Set the cyclicity of the destination longitude @@ -20459,7 +20465,7 @@ def regridc( # create sections that exceed 1 chunk of memory proceed to get # the coordinate and associated data for the extra dimension. if src_shape[src_axis_indices].prod() * max_length * 8 < ( - chunksize() + float(chunksize()) ): axis_keys_ext, coords_ext = f._regrid_get_cartesian_coords( "source", [max_ind] diff --git a/cf/functions.py b/cf/functions.py index 3a20e53dcf..e1cc43c7df 100644 --- a/cf/functions.py +++ b/cf/functions.py @@ -1828,11 +1828,6 @@ def _numpy_allclose(a, b, rtol=None, atol=None, verbose=None): return False - # if verbose: - # print('Different masks 4') - # - # return False - try: return _numpy_ma_allclose(a, b, rtol=rtol, atol=atol) except (IndexError, NotImplementedError, TypeError): diff --git a/cf/regrid.py b/cf/regrid.py index fcf6482f58..ebbe7397c2 100644 --- a/cf/regrid.py +++ b/cf/regrid.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from numpy import array as numpy_array from numpy import empty as numpy_empty from numpy import where as numpy_where @@ -121,7 +120,7 @@ def initialize(): "The ESMF package is needed to support regridding." ) - manager = ESMF.Manager(debug=regrid_logging()) + manager = ESMF.Manager(debug=bool(regrid_logging())) return manager diff --git a/cf/test/test_Regrid.py b/cf/test/test_Regrid.py index 6e70222eb0..f5ea779c1f 100644 --- a/cf/test/test_Regrid.py +++ b/cf/test/test_Regrid.py @@ -1,6 +1,5 @@ import datetime import faulthandler -import inspect import os import unittest @@ -43,17 +42,8 @@ class RegridTest(unittest.TestCase): chunk_sizes = (300, 10000, 100000)[::-1] - test_only = [] - # test_only = ('NOTHING!!!!!',) - # test_only = ('test_Field_regrids',) - # test_only = ('test_Field_regridc',) - # test_only('test_Field_section',) - # test_only('test_Data_section',) - @unittest.skipUnless(cf._found_ESMF, "Requires esmf package.") def test_Field_regrids(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return self.assertFalse(cf.regrid_logging()) with cf.atol(1e-12): for chunksize in self.chunk_sizes: @@ -99,31 +89,50 @@ def test_Field_regrids(self): f5 = cf.read(self.filename5)[0] r = f1.regrids(f5, "linear") - self.assertTrue( - f4.equals(r), - "destination=regional Field, CHUNKSIZE={}".format( - chunksize - ), - ) - r = f1.regrids(f5, method="linear") + print (f1) + print (f5) + print (r) + print (f4) +# print (f1.array.mask.sum()) + #print (r.array - f4.array) + #print (abs(f4.array).max()) + #print (abs(r.array - f4.array).max()) + #print (abs(r.array - f4.array)) + # + #print (cf.atol()) + #print ((float(cf.atol()) + float(cf.rtol()) * abs(f4.array)).max()) + # + #for a, b in zip(r.array.flat, f4.array.flat): + # print (abs(a-b), float(cf.atol()) + float(cf.rtol()) * abs(b)) + # if abs(a-b) > float(cf.atol()) + float(cf.rtol()) * abs(b): + # raise ValueError() + + self.assertTrue(f4.data.equals(r.data, verbose=-1)) + print (1/0) self.assertTrue( - f4.equals(r), + f4.equals(r, verbose=2), "destination=regional Field, CHUNKSIZE={}".format( chunksize ), ) +# r = f1.regrids(f5, method="linear") +# self.assertTrue( +# f4.equals(r), +# "destination=regional Field, CHUNKSIZE={}".format( +# chunksize +# ), +# ) + f6 = cf.read(self.filename6)[0] with self.assertRaises(Exception): f1.regridc(f6, axes="T", method="linear") @unittest.skipUnless(cf._found_ESMF, "Requires esmf package.") def test_Field_regridc(self): - if self.test_only and inspect.stack()[0][3] not in self.test_only: - return self.assertFalse(cf.regrid_logging()) - with cf.atol(1e-12): + with cf.atol(1e-11): for chunksize in self.chunk_sizes: self.assertFalse(cf.regrid_logging()) with cf.chunksize(chunksize): From 2be31b12b95d3e9637467c18179c1e3ca03d95d0 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Fri, 9 Apr 2021 10:53:24 +0100 Subject: [PATCH 22/53] _cyclic --- cf/field.py | 279 +++++++++++++++++++++++------------------ cf/test/test_Regrid.py | 49 ++------ 2 files changed, 168 insertions(+), 160 deletions(-) diff --git a/cf/field.py b/cf/field.py index 4a7c1ecd4d..b536b55216 100644 --- a/cf/field.py +++ b/cf/field.py @@ -244,6 +244,8 @@ "data_dimension", ["size", "axis", "key", "coord", "coord_type", "scalar"] ) +_empty_set = set() + class Field(mixin.PropertiesData, cfdm.Field): """A field construct of the CF data model. @@ -590,6 +592,20 @@ def __setitem__(self, indices, value): data = self.get_data(_fill_value=False) data[indices] = value + @property + def _cyclic(self): + """Storage for axis cyclicity. Do not change the value in-place.""" + return self._custom.get("_cyclic", _empty_set) + + @_cyclic.setter + def _cyclic(self, value): + """value must be a set. Do not change the value in-place.""" + self._custom["_cyclic"] = value + + @_cyclic.deleter + def _cyclic(self): + self._custom["_cyclic"] = _empty_set + def analyse_items(self, relaxed_identities=None): """Analyse a domain. @@ -3061,6 +3077,8 @@ def _regrid_get_latlong(self, name, axes=None): and Y coordinates are returned, which are not long/lat. """ + data_axes = self.constructs.data_axes() + if axes is None: # Retrieve the field construct's X and Y dimension # coordinates @@ -3083,8 +3101,8 @@ def _regrid_get_latlong(self, name, axes=None): ), ) - x_axis = self.get_data_axes(x_key)[0] - y_axis = self.get_data_axes(y_key)[0] + x_axis = data_axes[x_key][0] + y_axis = data_axes[y_key][0] x_size = x.size y_size = y.size @@ -3103,7 +3121,7 @@ def _regrid_get_latlong(self, name, axes=None): # Axes specified by integer position in dimensions of # lat and lon 2-d auxiliary coordinates if axes["X"] == axes["Y"]: - raise ValueError("TODO") + raise ValueError("TODO 0") lon_key, lon = self.auxiliary_coordinate( "X", item=True, filter_by_naxes=(2,), default=(None, None) @@ -3119,8 +3137,8 @@ def _regrid_get_latlong(self, name, axes=None): if lat.shape != lon.shape: raise ValueError("TODO 222222") - lon_axes = self.get_data_axes(lon_key) - lat_axes = self.get_data_axes(lat_key) + lon_axes = data_axes[lon_key] + lat_axes = data_axes[lat_key] if lat_axes != lon_axes: raise ValueError("TODO 3333333") @@ -3192,13 +3210,13 @@ def _regrid_get_latlong(self, name, axes=None): ) if axes is not None: - if set(axis_keys) != set(self.get_data_axes(x_key)): + if set(axis_keys) != set(data_axes[x_key]): raise ValueError( "Axes of longitude do not match " f"those specified for {name} field." ) - if set(axis_keys) != set(self.get_data_axes(y_key)): + if set(axis_keys) != set(data_axes[y_key]): raise ValueError( "Axes of latitude do not match " f"those specified for {name} field." @@ -3279,14 +3297,16 @@ def _regrid_get_axis_indices(self, axis_keys, i=False): A numpy array of the rank order of the axes. """ + data_axes = self.get_data_axes() + # Get the positions of the axes axis_indices = [] for axis_key in axis_keys: try: - axis_index = self.get_data_axes().index(axis_key) + axis_index = data_axes.index(axis_key) except ValueError: self.insert_dimension(axis_key, position=0, inplace=True) - axis_index = self.get_data_axes().index(axis_key) + axis_index = data_axes.index(axis_key) axis_indices.append(axis_index) @@ -3347,7 +3367,7 @@ def _regrid_get_section_shape(self, axis_sizes, axis_indices): """ - shape = [1] * self.data.ndim + shape = [1] * self.ndim for i, axis_index in enumerate(axis_indices): shape[axis_index] = axis_sizes[i] @@ -3385,37 +3405,39 @@ def _regrid_check_bounds( `None` """ - if method in conservative_regridding_methods: - for x, coords in zip( - ("Source", "Destination"), (src_coords, dst_coords) - ): - for coord in coords: - if not coord.has_bounds(): - raise ValueError( - f"{x} {coord!r} coordinates must have bounds " - "for conservative regridding." - ) + if method not in conservative_regridding_methods: + return + + for name, coords in zip( + ("Source", "Destination"), (src_coords, dst_coords) + ): + for coord in coords: + if not coord.has_bounds(): + raise ValueError( + f"{name} {coord!r} coordinates must have bounds " + "for conservative regridding." + ) - if not coord.contiguous(overlap=False): - raise ValueError( - f"{x} {coord!r} coordinates must have " - "contiguous, non-overlapping bounds " - "for conservative regridding." - ) + if not coord.contiguous(overlap=False): + raise ValueError( + f"{name} {coord!r} coordinates must have " + "contiguous, non-overlapping bounds " + "for conservative regridding." + ) - if ext_coords is not None: - for coord in ext_coords: - if not coord.has_bounds(): - raise ValueError( - f"{coord!r} dimension coordinates must have " - "bounds for conservative regridding." - ) - if not coord.contiguous(overlap=False): - raise ValueError( - f"{coord!r} dimension coordinates must have " - "contiguous, non-overlapping bounds " - "for conservative regridding." - ) + if ext_coords is not None: + for coord in ext_coords: + if not coord.has_bounds(): + raise ValueError( + f"{coord!r} dimension coordinates must have " + "bounds for conservative regridding." + ) + if not coord.contiguous(overlap=False): + raise ValueError( + f"{coord!r} dimension coordinates must have " + "contiguous, non-overlapping bounds " + "for conservative regridding." + ) @classmethod def _regrid_check_method(cls, method): @@ -3432,11 +3454,13 @@ def _regrid_check_method(cls, method): elif method not in regridding_methods: raise ValueError(f"Can't regrid: Invalid method: {method!r}") + elif method == "bilinear": # TODO use logging.info() once have logging print( "Note the 'bilinear' method argument has been renamed to " "'linear' at version 3.2.0. It is still supported for now " - "but please use 'linear' in future." + "but please use 'linear' in future. " + "'bilinear' will be removed at version 4.0.0" ) @classmethod @@ -3494,9 +3518,8 @@ def _regrid_get_reordered_sections( # Data.section. However, we don't have it, so this allows us to # possibibly reduce the number of trasnistions between different masks # - each change is slow. - - # dimensions_coordinates = self.dimension_coordinates(view=True) - + data_axes = self.get_data_axes() + axis_indices = [] if axis_order is not None: for axis in axis_order: @@ -3510,15 +3533,13 @@ def _regrid_get_reordered_sections( raise ValueError("Cannot loop over regridding axes.") try: - axis_indices.append( - self.get_data_axes().index(axis_key) - ) + axis_indices.append(data_axes.index(axis_key)) except ValueError: # The axis has been squeezed so do nothing pass else: - raise ValueError("Axis not found: " + str(axis)) + raise ValueError(f"Axis not found: {axis!r}") # Section the data sections = self.data.section(regrid_axis_indices) @@ -3559,9 +3580,9 @@ def _regrid_get_destination_mask( A numpy array with the mask. """ - indices = { - axis: [0] for axis in self.get_data_axes() if axis not in axes - } + data_axes = self.get_data_axes() + + indices = {axis: [0] for axis in data_axes if axis not in axes} f = self.subspace(**indices) f = f.squeeze(tuple(indices)).transpose(dst_order) @@ -3641,6 +3662,8 @@ def _regrid_compute_field_mass( "Expected _compute_field_mass to be a dictionary." ) + fill_value = self.fill_value(default="netCDF") + # Calculate the mass of the source field srcareafield = Regrid.create_field(srcgrid, "srcareafield") srcmass = Regrid.compute_mass_grid( @@ -3648,13 +3671,13 @@ def _regrid_compute_field_mass( srcareafield, dofrac=True, fracfield=srcfracfield, - uninitval=self.fill_value(default="netCDF"), + uninitval=fill_value, ) # Calculate the mass of the destination field dstareafield = Regrid.create_field(dstgrid, "dstareafield") dstmass = Regrid.compute_mass_grid( - dstfield, dstareafield, uninitval=self.fill_value(default="netCDF") + dstfield, dstareafield, uninitval=fill_value ) # Insert the two masses into the dictionary for comparison @@ -3695,12 +3718,10 @@ def _regrid_get_regridded_data( mask=(dstfield.data == self.fill_value(default="netCDF")), ) else: - print (99999, dstfield.data.max(), self.fill_value(default="netCDF")) - mask = dstfield.data == self.fill_value(default="netCDF") - print (mask.sum()) regridded_data = numpy_ma_MaskedArray( dstfield.data.copy(), - mask=mask) + mask=(dstfield.data == self.fill_value(default="netCDF")), + ) return regridded_data @@ -3758,13 +3779,17 @@ def _regrid_update_coordinate_references( regridding. """ - domain_axes = None domain_ancillaries = self.domain_ancillaries(todict=True) + # Initialise cached value for domain_axes + domain_axes = None + + data_axes = self.constructs.data_axes() + for key, ref in self.coordinate_references(todict=True).items(): ref_axes = [] for k in ref.coordinates(): - ref_axes.extend(self.get_data_axes(k)) + ref_axes.extend(data_axes[k]) if set(ref_axes).intersection(src_axis_keys): self.del_construct(key) @@ -3787,8 +3812,8 @@ def _regrid_update_coordinate_references( filter_by_axis=(x, y), axis_mode="exact", key=True, - default=False, - ): + default=None, + ) is not None: # Convert the domain ancillary into an independent # field value = self.convert(key) @@ -3819,10 +3844,10 @@ def _regrid_update_coordinate_references( ref.coordinate_conversion.set_domain_ancillary( term, key ) - d_axes = self.get_data_axes(key) + d_axes = data_axes[key] domain_axes = self.domain_axes( - todict=True, cached=domain_axes + cached=domain_axes, todict=True ) for k_s, new_size in zip( @@ -3854,10 +3879,12 @@ def _regrid_copy_coordinate_references(self, dst, dst_axis_keys): `None` """ + dst_data_axes = dst.constructs.data_axes() + for ref in dst.coordinate_references(todict=True).values(): axes = set() for key in ref.coordinates(): - axes.update(dst.get_data_axes(key)) + axes.update(dst_data_axes[key]) if axes and set(axes).issubset(dst_axis_keys): # This coordinate reference's coordinates span the X @@ -3932,46 +3959,40 @@ def _regrid_update_coordinates( # could save some lines of code. # Remove the source coordinates of new field - # self.remove_items(axes=src_axis_keys) - # for key in self.constructs.filter_by_axis('or', *src_axis_keys): for key in self.coordinates( filter_by_axis=src_axis_keys, axis_mode="or", todict=True ): self.del_construct(key) domain_axes = self.domain_axes(todict=True) - # dst_auxiliary_coordinates = None if cartesian: - # Make axes map - if not dst_dict: - axis_map = {} - for k_s, k_d in zip(src_axis_keys, dst_axis_keys): - axis_map[k_d] = k_s - # Insert coordinates from dst into new field if dst_dict: - for k_s, d in zip(src_axis_keys, dst_coords): - domain_axes[k_s].set_size(d.size) - self.set_construct(d, axes=[k_s]) + for k_s, coord in zip(src_axis_keys, dst_coords): + domain_axes[k_s].set_size(coord.size) + self.set_construct(coord, axes=[k_s]) else: - for k_d in dst_axis_keys: - d = dst.dimension_coordinate(k_d) - k_s = axis_map[k_d] - domain_axes[k_s].set_size(d.size) - self.set_construct(d, axes=[k_s]) - - # dst_auxiliary_coordinates = dst.auxiliary_coordinates( - # view=True, cached=dst_auxiliary_coordinates - # ) + axis_map = { + key_d: key_s + for key_s, key_d in zip(src_axis_keys, dst_axis_keys) + } + + for key_d in dst_axis_keys: + dim = dst.dimension_coordinate(key_d) + key_s = axis_map[key_d] + domain_axes[key_s].set_size(dim.size) + self.set_construct(dim, axes=[key_s]) + dst_data_axes = dst.constructs.data_axes() + for aux_key, aux in dst.auxiliary_coordinates( filter_by_axis=dst_axis_keys, axis_mode="subset", todict=True, ).items(): aux_axes = [ - axis_map[k_d] for k_d in dst.get_data_axes(aux_key) + axis_map[key_d] for key_d in dst_data_axes[aux_key] ] self.set_construct(aux, axes=aux_axes) else: @@ -3993,21 +4014,11 @@ def _regrid_update_coordinates( else: for coord, axis_key in zip(dst_coords, src_axis_keys): self.set_construct(coord, axes=[axis_key]) + else: - # dst_auxiliary_coordinates = dst.auxiliary_coordinates( - # view=True, cached=dst_auxiliary_coordinates - # )# - for src_axis_key, dst_axis_key in zip( src_axis_keys, dst_axis_keys ): - # try: - # self.set_construct( - # dst.dimension_coordinate(dst_axis_key), - # axes=[src_axis_key] - # ) - # except AttributeError: - # pass dim_coord = dst.dimension_coordinate( dst_axis_key, default=None ) @@ -6310,24 +6321,24 @@ def cyclic(self, identity=None, iscyclic=True, period=None, **kwargs): self, "cyclic", kwargs ) # pragma: no cover - data = self.get_data(None, _fill_value=False) - if data is None: - return set() - - data_axes = self.get_data_axes() - - old = set([data_axes[i] for i in data.cyclic()]) + old = self._cyclic.copy() if identity is None: return old - + axis = self.domain_axis(identity, key=True) - try: - data.cyclic(data_axes.index(axis), iscyclic) - except ValueError: - pass + data = self.get_data(None, _fill_value=False) + if data is not None: + try: + data_axes = self.get_data_axes() + data.cyclic(data_axes.index(axis), iscyclic) + except ValueError: + pass + # Never change _cyclic in-place if iscyclic: + self._cyclic = old.union((axis,)) + dim = self.dimension_coordinate(axis, default=None) if dim is not None: if period is not None: @@ -6336,7 +6347,11 @@ def cyclic(self, identity=None, iscyclic=True, period=None, **kwargs): raise ValueError( "A cyclic dimension coordinate must have a period" ) - + else: + cyclic = old.copy() + cyclic.discard(axis) + self._cyclic = cyclic + return old def weights( @@ -13283,6 +13298,9 @@ def set_data( data, axes=None, copy=copy, inplace=True ) +# # Apply cyclic axes +# data.cyclic([data_axes.index(axis) for axis in self._cyclic], True) + return f if data.isscalar: @@ -13383,6 +13401,12 @@ def set_data( super(cfdm.Field, f).set_data(data, axes=axes, copy=copy, inplace=True) + # Apply cyclic axes + data.cyclic( + [axes.index(axis) for axis in self._cyclic if axis in axes], + True + ) + return f def domain_mask(self, **kwargs): @@ -15307,20 +15331,21 @@ def autocyclic(self, key=None, coord=None, verbose=None): key, coord = self.dimension_coordinate( "X", item=True, default=(None, None) ) - - if coord is None: + if coord is None: + return False + elif not coord.X: return False - + bounds = coord.get_bounds(None) if bounds is None: self.cyclic(key, iscyclic=False) return False - + data = bounds.get_data(None, _fill_value=False) if data is None: self.cyclic(key, iscyclic=False) return False - + units = bounds.Units if units.islongitude: period = Data(360.0, units="degrees_east") @@ -15332,10 +15357,6 @@ def autocyclic(self, key=None, coord=None, verbose=None): period.Units = data.Units -# diff = bounds.last_element() - bounds.first_element() - -# if abs(bounds[-1, -1] - bounds[0, 0]) != period.array: -# if abs(bounds.last_element() - bounds.first_element()) != period: #.array: if abs(data.last_element() - data.first_element()) != period.array: self.cyclic(key, iscyclic=False) return False @@ -16813,6 +16834,18 @@ def dimension_coordinate( TODO """ +# if not filter_kwargs and len(identity) == 1 and identity[0] in self.domain_axes(todict=True): +# return self._select_construct( +# ("dimension_coordinate",), +# "dimension_coordinate", +# (), +# key=key, +# item=item, +# default=default, +# filter_by_axis=identity, +# axis_mode="exact", +# ) + c = self._select_construct( ("dimension_coordinate",), "dimension_coordinate", @@ -19924,6 +19957,7 @@ def regrids( ) srcfield = Regrid.create_field(srcgrid, "srcfield") srcfracfield = Regrid.create_field(srcgrid, "srcfracfield") + # (Re)initialise the regridder regridSrc2Dst = Regrid( srcfield, @@ -19949,6 +19983,7 @@ def regrids( ) srcfield = Regrid.create_field(srcgrid, "srcfield") srcfracfield = Regrid.create_field(srcgrid, "srcfracfield") + # Initialise the regridder. This also creates the # weights needed for the regridding. regridSrc2Dst = Regrid( @@ -19966,7 +20001,7 @@ def regrids( # field gets filled with a fill value, the source field # with the section's data) self._regrid_fill_fields(src_data, srcfield, dstfield) - + # Run regridding (dstfield is an ESMF field) dstfield = regridSrc2Dst.run_regridding(srcfield, dstfield) @@ -19987,7 +20022,7 @@ def regrids( dstgrid, dstfield, ) - + # Get the regridded data or frac field as a numpy array # (regridded_data is a numpy array) regridded_data = self._regrid_get_regridded_data( @@ -20000,16 +20035,13 @@ def regrids( # Data object. Note that the reshape is necessary to # replace any size 1 dimensions that we squeezed out # earlier. - print ('DDD', type(regridded_data)) sections[k] = Data( regridded_data.transpose(src_order).reshape(shape), units=self.Units, ) # Construct new data from regridded sections - print (sections) new_data = Data.reconstruct_sectioned_data(sections) - print ('A@ 0', type(new_data.array)) # Construct new field. # Note: cannot call `_inplace_enabled_define_and_cleanup(self)` to @@ -20054,7 +20086,6 @@ def regrids( f._regrid_copy_coordinate_references(dst, dst_axis_keys) # Insert regridded data into new field - print ('A@', type(new_data.array)) f.set_data(new_data, axes=self.get_data_axes(), copy=False) # Set the cyclicity of the destination longitude diff --git a/cf/test/test_Regrid.py b/cf/test/test_Regrid.py index f5ea779c1f..2c4d78d09a 100644 --- a/cf/test/test_Regrid.py +++ b/cf/test/test_Regrid.py @@ -51,6 +51,8 @@ def test_Field_regrids(self): f1 = cf.read(self.filename1)[0] f2 = cf.read(self.filename2)[0] f3 = cf.read(self.filename3)[0] + f4 = cf.read(self.filename4)[0] + f5 = cf.read(self.filename5)[0] r = f1.regrids(f2, "conservative") @@ -60,6 +62,7 @@ def test_Field_regrids(self): chunksize ), ) + r = f1.regrids(f2, method="conservative") self.assertTrue( @@ -68,7 +71,9 @@ def test_Field_regrids(self): chunksize ), ) + dst = {"longitude": f2.dim("X"), "latitude": f2.dim("Y")} + r = f1.regrids(dst, "conservative", dst_cyclic=True) self.assertTrue( @@ -77,7 +82,9 @@ def test_Field_regrids(self): chunksize ), ) + r = f1.regrids(dst, method="conservative", dst_cyclic=True) + self.assertTrue( f3.equals(r), "destination=global dict, CHUNKSIZE={}".format( @@ -85,31 +92,9 @@ def test_Field_regrids(self): ), ) - f4 = cf.read(self.filename4)[0] - f5 = cf.read(self.filename5)[0] + # Regrid global to regional roated pole + r = f1.regrids(f5, method="linear") - r = f1.regrids(f5, "linear") - - print (f1) - print (f5) - print (r) - print (f4) -# print (f1.array.mask.sum()) - #print (r.array - f4.array) - #print (abs(f4.array).max()) - #print (abs(r.array - f4.array).max()) - #print (abs(r.array - f4.array)) - # - #print (cf.atol()) - #print ((float(cf.atol()) + float(cf.rtol()) * abs(f4.array)).max()) - # - #for a, b in zip(r.array.flat, f4.array.flat): - # print (abs(a-b), float(cf.atol()) + float(cf.rtol()) * abs(b)) - # if abs(a-b) > float(cf.atol()) + float(cf.rtol()) * abs(b): - # raise ValueError() - - self.assertTrue(f4.data.equals(r.data, verbose=-1)) - print (1/0) self.assertTrue( f4.equals(r, verbose=2), "destination=regional Field, CHUNKSIZE={}".format( @@ -117,18 +102,10 @@ def test_Field_regrids(self): ), ) -# r = f1.regrids(f5, method="linear") -# self.assertTrue( -# f4.equals(r), -# "destination=regional Field, CHUNKSIZE={}".format( -# chunksize -# ), -# ) - - f6 = cf.read(self.filename6)[0] - with self.assertRaises(Exception): - f1.regridc(f6, axes="T", method="linear") - + f6 = cf.read(self.filename6)[0] + with self.assertRaises(Exception): + f1.regridc(f6, axes="T", method="linear") + @unittest.skipUnless(cf._found_ESMF, "Requires esmf package.") def test_Field_regridc(self): self.assertFalse(cf.regrid_logging()) From 96d28f7acd2d4fd49ffdb09629d4187f3afd6b7e Mon Sep 17 00:00:00 2001 From: David Hassell Date: Fri, 9 Apr 2021 10:55:08 +0100 Subject: [PATCH 23/53] linting --- cf/aggregate.py | 64 ++++++++++++---------- cf/constructs.py | 9 ++-- cf/data/data.py | 4 +- cf/field.py | 107 ++++++++++++++++++++----------------- cf/mixin/coordinate.py | 21 ++++---- cf/mixin/properties.py | 6 +-- cf/read_write/um/umread.py | 1 - cf/test/test_Regrid.py | 2 +- cf/test/test_read_write.py | 16 +++--- 9 files changed, 123 insertions(+), 107 deletions(-) diff --git a/cf/aggregate.py b/cf/aggregate.py index 06d043934f..d7b75b0d9f 100644 --- a/cf/aggregate.py +++ b/cf/aggregate.py @@ -233,7 +233,7 @@ def __init__( self.identity = f.get_property(field_identity, None) construct_axes = f.constructs.data_axes() - + # ------------------------------------------------------------ # # ------------------------------------------------------------ @@ -322,7 +322,7 @@ def __init__( info_dim = [] dim_coord_key, dim_coord = f.dimension_coordinate( - filter_by_axis=(axis,), item=True, default=(None, None) + filter_by_axis=(axis,), item=True, default=(None, None) ) dim_identity = None @@ -354,9 +354,11 @@ def __init__( # 'size' : None}) # Find the 1-d auxiliary coordinates which span this axis - aux_coords = {aux: auxs_1d.pop(aux) - for aux in tuple(auxs_1d) - if axis in construct_axes[aux]} + aux_coords = { + aux: auxs_1d.pop(aux) + for aux in tuple(auxs_1d) + if axis in construct_axes[aux] + } info_aux = [] for key, aux_coord in aux_coords.items(): @@ -517,7 +519,9 @@ def __init__( ) # Find axes' canonical identities - axes = [self.axis_to_id[axis] for axis in construct_axes[key]] #f.get_data_axes(key)] + axes = [ + self.axis_to_id[axis] for axis in construct_axes[key] + ] # f.get_data_axes(key)] axes = tuple(sorted(axes)) self.field_anc[identity] = { @@ -629,9 +633,9 @@ def __init__( self.field = self.field.copy() # copy as will delete msr f = self.field copied_field = True - + f.del_construct(key) - + if is_log_level_info(logger): logger.info( f"Removed {msr.identity()!r} construct from a copy " @@ -640,7 +644,7 @@ def __init__( "is not possible to determine the influence the " "aggregation process should have on it." ) - + continue if not self.cell_measure_has_data_and_units(msr): @@ -656,7 +660,7 @@ def __init__( ) # Find axes' canonical identities - axes = [self.axis_to_id[axis] for axis in construct_axes[key]] + axes = [self.axis_to_id[axis] for axis in construct_axes[key]] axes = tuple(sorted(axes)) if units in info_msr: @@ -780,9 +784,8 @@ def coordinate_values(self): return "\n".join(string) def copy(self): - """Replace the field associated with a summary class with a deep copy. - - """ + """Replace the field associated with a summary class with a deep + copy.""" new = _Meta.__new__(_Meta) new.__dict__ = self.__dict__.copy() new.field = new.field.copy() @@ -950,7 +953,8 @@ def coord_has_identity_and_data(self, coord, axes=None): self.message = f"{coord!r} has no identity or no data" def field_ancillary_has_identity_and_data(self, anc): - """Return a field ancillary's identity if it has one and has data. + """Return a field ancillary's identity if it has one and has + data. :Parameters: @@ -967,7 +971,7 @@ def field_ancillary_has_identity_and_data(self, anc): strict=self.strict_identities, relaxed=self.relaxed_identities, nc_only=self.ncvar_identities, - default=None + default=None, ) if identity is not None: @@ -983,12 +987,12 @@ def field_ancillary_has_identity_and_data(self, anc): # Still here? self.message = ( - f"{anc.identity()!r} field ancillary has no identity or " - "no data" + f"{anc.identity()!r} field ancillary has no identity or " "no data" ) def coordinate_reference_signatures(self, refs): - """List the structural signatures of given coordinate references. + """List the structural signatures of given coordinate + references. :Parameters: @@ -1026,7 +1030,8 @@ def coordinate_reference_signatures(self, refs): return signatures def domain_ancillary_has_identity_and_data(self, anc, identity=None): - """Return a domain ancillary's identity if it has one and has data. + """Return a domain ancillary's identity if it has one and has + data. :Parameters: @@ -1048,7 +1053,7 @@ def domain_ancillary_has_identity_and_data(self, anc, identity=None): strict=self.strict_identities, relaxed=self.relaxed_identities, nc_only=self.ncvar_identities, - default=None + default=None, ) if anc_identity is None: @@ -1086,7 +1091,7 @@ def print_info(self, signature=True): """ if not is_log_level_detail(logger): - return + return if signature: logger.detail( @@ -2002,8 +2007,8 @@ def aggregate( f"Unaggregatable {m1.field.identity()!r} " f"fields have{exclude} been output: " f"{m1.message}" - ) - + ) + unaggregatable = True break @@ -2936,11 +2941,12 @@ def _aggregate_2_fields( # ---------------------------------------------------------------- # Map the axes of field1 to those of field0 # ---------------------------------------------------------------- - dim1_name_map = {m1.id_to_axis[identity]: m0.id_to_axis[identity] - for identity in m0.axis_ids} + dim1_name_map = { + m1.id_to_axis[identity]: m0.id_to_axis[identity] + for identity in m0.axis_ids + } - dim0_name_map = {axis0: axis1 - for axis1, axis0 in dim1_name_map.items()} + dim0_name_map = {axis0: axis1 for axis1, axis0 in dim1_name_map.items()} # ---------------------------------------------------------------- # In each field, find the identifier of the aggregating axis. @@ -3030,7 +3036,7 @@ def _aggregate_2_fields( hash_value0 = anc0["hash_value"] hash_value1 = anc1["hash_value"] - + anc0["hash_value"] = hash_value0 + hash_value1 # Domain ancillaries @@ -3046,7 +3052,7 @@ def _aggregate_2_fields( hash_value0 = anc0["hash_value"] hash_value1 = anc1["hash_value"] - + anc0["hash_value"] = hash_value0 + hash_value1 # ---------------------------------------------------------------- diff --git a/cf/constructs.py b/cf/constructs.py index 7321ccf985..3bc4e797ce 100644 --- a/cf/constructs.py +++ b/cf/constructs.py @@ -1,5 +1,3 @@ -from functools import partial - import cfdm from .query import Query @@ -167,9 +165,10 @@ def _filter_by_identity(self, arg, todict, _config, identities): """ # Allow keys without the 'key%' prefix construct_types = self._construct_type - identities = ["key%" + i if i in construct_types else i - for i in identities] - + identities = [ + "key%" + i if i in construct_types else i for i in identities + ] + ctypes = [i for i in "XTYZ" if i in identities] config = {"identities_kwargs": {"ctypes": ctypes}} diff --git a/cf/data/data.py b/cf/data/data.py index 414137ed35..7bd0c455f2 100644 --- a/cf/data/data.py +++ b/cf/data/data.py @@ -11024,7 +11024,7 @@ def equals( config = self.partition_configuration(readonly=True) other.to_memory() - import numpy + for partition in self.partitions.matrix.flat: partition.open(config) array0 = partition.array @@ -11032,7 +11032,7 @@ def equals( partition.close() if not _numpy_allclose( - array0, array1, rtol=float(rtol), atol=float(atol) + array0, array1, rtol=float(rtol), atol=float(atol) ): logger.info( "{0}: Different array values (atol={1}, " diff --git a/cf/field.py b/cf/field.py index b536b55216..4dd8de1a6e 100644 --- a/cf/field.py +++ b/cf/field.py @@ -594,12 +594,20 @@ def __setitem__(self, indices, value): @property def _cyclic(self): - """Storage for axis cyclicity. Do not change the value in-place.""" + """Storage for axis cyclicity. + + Do not change the value in-place. + + """ return self._custom.get("_cyclic", _empty_set) @_cyclic.setter def _cyclic(self, value): - """value must be a set. Do not change the value in-place.""" + """value must be a set. + + Do not change the value in-place. + + """ self._custom["_cyclic"] = value @_cyclic.deleter @@ -3078,7 +3086,7 @@ def _regrid_get_latlong(self, name, axes=None): """ data_axes = self.constructs.data_axes() - + if axes is None: # Retrieve the field construct's X and Y dimension # coordinates @@ -3298,7 +3306,7 @@ def _regrid_get_axis_indices(self, axis_keys, i=False): """ data_axes = self.get_data_axes() - + # Get the positions of the axes axis_indices = [] for axis_key in axis_keys: @@ -3407,7 +3415,7 @@ def _regrid_check_bounds( """ if method not in conservative_regridding_methods: return - + for name, coords in zip( ("Source", "Destination"), (src_coords, dst_coords) ): @@ -3454,7 +3462,7 @@ def _regrid_check_method(cls, method): elif method not in regridding_methods: raise ValueError(f"Can't regrid: Invalid method: {method!r}") - + elif method == "bilinear": # TODO use logging.info() once have logging print( "Note the 'bilinear' method argument has been renamed to " @@ -3519,7 +3527,7 @@ def _regrid_get_reordered_sections( # possibibly reduce the number of trasnistions between different masks # - each change is slow. data_axes = self.get_data_axes() - + axis_indices = [] if axis_order is not None: for axis in axis_order: @@ -3581,7 +3589,7 @@ def _regrid_get_destination_mask( """ data_axes = self.get_data_axes() - + indices = {axis: [0] for axis in data_axes if axis not in axes} f = self.subspace(**indices) @@ -3663,7 +3671,7 @@ def _regrid_compute_field_mass( ) fill_value = self.fill_value(default="netCDF") - + # Calculate the mass of the source field srcareafield = Regrid.create_field(srcgrid, "srcareafield") srcmass = Regrid.compute_mass_grid( @@ -3721,8 +3729,8 @@ def _regrid_get_regridded_data( regridded_data = numpy_ma_MaskedArray( dstfield.data.copy(), mask=(dstfield.data == self.fill_value(default="netCDF")), - ) - + ) + return regridded_data def _regrid_update_coordinate_references( @@ -3785,7 +3793,7 @@ def _regrid_update_coordinate_references( domain_axes = None data_axes = self.constructs.data_axes() - + for key, ref in self.coordinate_references(todict=True).items(): ref_axes = [] for k in ref.coordinates(): @@ -3808,12 +3816,15 @@ def _regrid_update_coordinate_references( # then regrid it, otherwise remove it x = self.domain_axis("X", key=True) y = self.domain_axis("Y", key=True) - if self.domain_ancillary( - filter_by_axis=(x, y), - axis_mode="exact", - key=True, - default=None, - ) is not None: + if ( + self.domain_ancillary( + filter_by_axis=(x, y), + axis_mode="exact", + key=True, + default=None, + ) + is not None + ): # Convert the domain ancillary into an independent # field value = self.convert(key) @@ -3880,7 +3891,7 @@ def _regrid_copy_coordinate_references(self, dst, dst_axis_keys): """ dst_data_axes = dst.constructs.data_axes() - + for ref in dst.coordinate_references(todict=True).values(): axes = set() for key in ref.coordinates(): @@ -3985,7 +3996,7 @@ def _regrid_update_coordinates( self.set_construct(dim, axes=[key_s]) dst_data_axes = dst.constructs.data_axes() - + for aux_key, aux in dst.auxiliary_coordinates( filter_by_axis=dst_axis_keys, axis_mode="subset", @@ -4014,7 +4025,7 @@ def _regrid_update_coordinates( else: for coord, axis_key in zip(dst_coords, src_axis_keys): self.set_construct(coord, axes=[axis_key]) - + else: for src_axis_key, dst_axis_key in zip( src_axis_keys, dst_axis_keys @@ -6324,7 +6335,7 @@ def cyclic(self, identity=None, iscyclic=True, period=None, **kwargs): old = self._cyclic.copy() if identity is None: return old - + axis = self.domain_axis(identity, key=True) data = self.get_data(None, _fill_value=False) @@ -6338,7 +6349,7 @@ def cyclic(self, identity=None, iscyclic=True, period=None, **kwargs): # Never change _cyclic in-place if iscyclic: self._cyclic = old.union((axis,)) - + dim = self.dimension_coordinate(axis, default=None) if dim is not None: if period is not None: @@ -6351,7 +6362,7 @@ def cyclic(self, identity=None, iscyclic=True, period=None, **kwargs): cyclic = old.copy() cyclic.discard(axis) self._cyclic = cyclic - + return old def weights( @@ -13298,9 +13309,9 @@ def set_data( data, axes=None, copy=copy, inplace=True ) -# # Apply cyclic axes -# data.cyclic([data_axes.index(axis) for axis in self._cyclic], True) - + # # Apply cyclic axes + # data.cyclic([data_axes.index(axis) for axis in self._cyclic], True) + return f if data.isscalar: @@ -13403,10 +13414,9 @@ def set_data( # Apply cyclic axes data.cyclic( - [axes.index(axis) for axis in self._cyclic if axis in axes], - True + [axes.index(axis) for axis in self._cyclic if axis in axes], True ) - + return f def domain_mask(self, **kwargs): @@ -15335,7 +15345,7 @@ def autocyclic(self, key=None, coord=None, verbose=None): return False elif not coord.X: return False - + bounds = coord.get_bounds(None) if bounds is None: self.cyclic(key, iscyclic=False) @@ -16492,7 +16502,7 @@ def coordinate( filter_by_axis=(da_key,), axis_mode="exact", ) - + if default is None: return default @@ -16834,18 +16844,18 @@ def dimension_coordinate( TODO """ -# if not filter_kwargs and len(identity) == 1 and identity[0] in self.domain_axes(todict=True): -# return self._select_construct( -# ("dimension_coordinate",), -# "dimension_coordinate", -# (), -# key=key, -# item=item, -# default=default, -# filter_by_axis=identity, -# axis_mode="exact", -# ) - + # if not filter_kwargs and len(identity) == 1 and identity[0] in self.domain_axes(todict=True): + # return self._select_construct( + # ("dimension_coordinate",), + # "dimension_coordinate", + # (), + # key=key, + # item=item, + # default=default, + # filter_by_axis=identity, + # axis_mode="exact", + # ) + c = self._select_construct( ("dimension_coordinate",), "dimension_coordinate", @@ -17492,7 +17502,7 @@ def get_data_axes(self, identity=None, default=ValueError()): axes = super().get_data_axes(identity, default=None) if axes is not None: return axes - + key = self.construct_key(identity, default=None) if key is not None: return super().get_data_axes(key=key, default=default) @@ -17501,8 +17511,7 @@ def get_data_axes(self, identity=None, default=ValueError()): return default return self._default( - default, - f"Can't get axes for non-existent construct {identify!r}" + default, f"Can't get axes for non-existent construct {identity!r}" ) @_inplace_enabled(default=False) @@ -20001,7 +20010,7 @@ def regrids( # field gets filled with a fill value, the source field # with the section's data) self._regrid_fill_fields(src_data, srcfield, dstfield) - + # Run regridding (dstfield is an ESMF field) dstfield = regridSrc2Dst.run_regridding(srcfield, dstfield) @@ -20022,7 +20031,7 @@ def regrids( dstgrid, dstfield, ) - + # Get the regridded data or frac field as a numpy array # (regridded_data is a numpy array) regridded_data = self._regrid_get_regridded_data( diff --git a/cf/mixin/coordinate.py b/cf/mixin/coordinate.py index 632531bfac..09347900a5 100644 --- a/cf/mixin/coordinate.py +++ b/cf/mixin/coordinate.py @@ -50,13 +50,14 @@ def ctype(self): if self.Y: return "Y" - + if self.Z: return "Z" @property def T(self): - """True if and only if the data are coordinates for a CF 'T' axis. + """True if and only if the data are coordinates for a CF 'T' + axis. CF 'T' axis coordinates are defined by having one or more of the following: @@ -91,7 +92,8 @@ def T(self): @property def X(self): - """True if and only if the data are coordinates for a CF 'X' axis. + """True if and only if the data are coordinates for a CF 'X' + axis. CF 'X' axis coordinates are defined by having one or more of the following: @@ -154,7 +156,8 @@ def X(self): @property def Y(self): - """True if and only if the data are coordinates for a CF 'Y' axis. + """True if and only if the data are coordinates for a CF 'Y' + axis. CF 'Y' axis coordinates are defined by having one or more of the following: @@ -203,7 +206,8 @@ def Y(self): @property def Z(self): - """True if and only if the data are coordinates for a CF 'Z' axis. + """True if and only if the data are coordinates for a CF 'Z' + axis. CF 'Z' axis coordinates are defined by having one or more of the following: @@ -492,9 +496,9 @@ def identity( 'no identity' """ - out = super().identity(strict=strict, - relaxed=relaxed, - nc_only=nc_only, default=None) + out = super().identity( + strict=strict, relaxed=relaxed, nc_only=nc_only, default=None + ) if out is not None: return out @@ -584,4 +588,3 @@ def _ctypes_iter(coord, ctypes): # This coordinate construct is of this type yield c return - diff --git a/cf/mixin/properties.py b/cf/mixin/properties.py index 8fd8a743fc..76f5373382 100644 --- a/cf/mixin/properties.py +++ b/cf/mixin/properties.py @@ -30,8 +30,8 @@ def __new__(cls, *args, **kwargs): """Store component classes. .. note:: If a child class requires a different component - classes than the ones defined here, then they must - be redefined in the child class. + classes than the ones defined here, then they must be + redefined in the child class. """ instance = super().__new__(cls) @@ -133,7 +133,7 @@ def id(self): 'foo' >>> del f.id - """ + """ try: return self._custom["id"] except KeyError: diff --git a/cf/read_write/um/umread.py b/cf/read_write/um/umread.py index f584a27f9e..5ce50aeb1c 100644 --- a/cf/read_write/um/umread.py +++ b/cf/read_write/um/umread.py @@ -3357,7 +3357,6 @@ def file_open(self, filename): ) - """ Problems: diff --git a/cf/test/test_Regrid.py b/cf/test/test_Regrid.py index 2c4d78d09a..68d468ddf6 100644 --- a/cf/test/test_Regrid.py +++ b/cf/test/test_Regrid.py @@ -105,7 +105,7 @@ def test_Field_regrids(self): f6 = cf.read(self.filename6)[0] with self.assertRaises(Exception): f1.regridc(f6, axes="T", method="linear") - + @unittest.skipUnless(cf._found_ESMF, "Requires esmf package.") def test_Field_regridc(self): self.assertFalse(cf.regrid_logging()) diff --git a/cf/test/test_read_write.py b/cf/test/test_read_write.py index e13b685730..75b0a7e622 100644 --- a/cf/test/test_read_write.py +++ b/cf/test/test_read_write.py @@ -59,7 +59,7 @@ class read_writeTest(unittest.TestCase): f0 = cf.example_field(0) f1 = cf.example_field(1) - + def test_write_filename(self): f = self.f0 a = f.array @@ -233,7 +233,7 @@ def test_read_extra(self): def test_read_write_format(self): cf.write(self.f1, tmpfile) - + for chunksize in self.chunk_sizes: with cf.chunksize(chunksize): for fmt in ( @@ -251,10 +251,10 @@ def test_read_write_format(self): g = cf.read(tmpfile2, verbose=0) self.assertEqual(len(g), 1) g = g[0] - + self.assertTrue( f.equals(g, verbose=1), - f"Bad read/write of format {fmt!r}" + f"Bad read/write of format {fmt!r}", ) def test_read_write_netCDF4_compress_shuffle(self): @@ -272,7 +272,7 @@ def test_read_write_netCDF4_compress_shuffle(self): g = cf.read(tmpfile)[0] self.assertTrue( f.equals(g, verbose=2), - f"Bad read/write with lossless compression: {fmt}" + f"Bad read/write with lossless compression: {fmt}", ) def test_write_datatype(self): @@ -328,15 +328,15 @@ def test_write_datatype(self): def test_write_reference_datetime(self): for reference_datetime in ("1751-2-3", "1492-12-30"): cf.write(self.f0, tmpfile, reference_datetime=reference_datetime) - + g = cf.read(tmpfile)[0] - + t = g.dimension_coordinate("T") self.assertEqual( t.Units, cf.Units("days since " + reference_datetime), f"Units written were {t.Units.reftime!r} not " - f"{reference_datetime!r}" + f"{reference_datetime!r}", ) def test_read_write_unlimited(self): From 656f6d2e257905cc447e438b17e08c16a6c51486 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Fri, 9 Apr 2021 12:38:23 +0100 Subject: [PATCH 24/53] devs --- cf/data/data.py | 5 +++-- cf/field.py | 14 ++++++++------ 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/cf/data/data.py b/cf/data/data.py index 7bd0c455f2..ecf06d1517 100644 --- a/cf/data/data.py +++ b/cf/data/data.py @@ -10559,7 +10559,8 @@ def cyclic(self, axes=None, iscyclic=True): if axes is None: return old - axes = [data_axes[i] for i in self._parse_axes(axes)] + parsed_axes = self._parse_axes(axes) + axes = [data_axes[i] for i in parsed_axes] if iscyclic: self._cyclic = cyclic_axes.union(axes) @@ -10571,7 +10572,7 @@ def cyclic(self, axes=None, iscyclic=True): if auxiliary_mask is not None: self._auxiliary_mask = [mask.copy() for mask in auxiliary_mask] for mask in self._auxiliary_mask: - mask.cyclic(axes, iscyclic) + mask.cyclic(parsed_axes, iscyclic) return old diff --git a/cf/field.py b/cf/field.py index 4dd8de1a6e..35d4848fe3 100644 --- a/cf/field.py +++ b/cf/field.py @@ -13309,9 +13309,6 @@ def set_data( data, axes=None, copy=copy, inplace=True ) - # # Apply cyclic axes - # data.cyclic([data_axes.index(axis) for axis in self._cyclic], True) - return f if data.isscalar: @@ -13413,9 +13410,14 @@ def set_data( super(cfdm.Field, f).set_data(data, axes=axes, copy=copy, inplace=True) # Apply cyclic axes - data.cyclic( - [axes.index(axis) for axis in self._cyclic if axis in axes], True - ) + if axes: + cyclic = self._cyclic + if cyclic: + cyclic_axes = [ + axes.index(axis) for axis in cyclic if axis in axes + ] + if cyclic_axes: + data.cyclic(cyclic_axes, True) return f From 06d4c8e6dc4a5325b84920e7f8f721d3027d93fb Mon Sep 17 00:00:00 2001 From: David Hassell Date: Fri, 9 Apr 2021 21:17:48 +0100 Subject: [PATCH 25/53] devs --- cf/constructs.py | 8 +- cf/coordinatereference.py | 2 +- cf/field.py | 268 +++++++++++++++++--------------------- cf/maths.py | 19 ++- 4 files changed, 135 insertions(+), 162 deletions(-) diff --git a/cf/constructs.py b/cf/constructs.py index 3bc4e797ce..8813ad753c 100644 --- a/cf/constructs.py +++ b/cf/constructs.py @@ -164,10 +164,10 @@ def _filter_by_identity(self, arg, todict, _config, identities): """ # Allow keys without the 'key%' prefix - construct_types = self._construct_type - identities = [ - "key%" + i if i in construct_types else i for i in identities - ] + # construct_types = self._construct_type + # identities = [ + # "key%" + i if i in construct_types else i for i in identities + # ] ctypes = [i for i in "XTYZ" if i in identities] diff --git a/cf/coordinatereference.py b/cf/coordinatereference.py index 864ba0a29d..d8f08e8f22 100644 --- a/cf/coordinatereference.py +++ b/cf/coordinatereference.py @@ -216,7 +216,7 @@ def has_bounds(self): # # units is a standard_name of a coordinate # if field is None: # raise ValueError("Set the field parameter") - # coord = field.coord(canonical_units, exact=True) + # coord = field.coordinate(canonical_units, exact=True) # if coord is not None: # canonical_units = coord.Units # diff --git a/cf/field.py b/cf/field.py index 35d4848fe3..dd411f49ef 100644 --- a/cf/field.py +++ b/cf/field.py @@ -3274,7 +3274,7 @@ def _regrid_get_cartesian_coords(self, name, axes): coords = [] for key in axis_keys: - d = self.dimension_coordinate(key, default=None) + d = self.dimension_coordinate(filter_by_axis=(key,), default=None) if d is None: raise ValueError( f"No unique {name} dimension coordinate " @@ -3990,7 +3990,7 @@ def _regrid_update_coordinates( } for key_d in dst_axis_keys: - dim = dst.dimension_coordinate(key_d) + dim = dst.dimension_coordinate(filter_by_axis=(key_d,)) key_s = axis_map[key_d] domain_axes[key_s].set_size(dim.size) self.set_construct(dim, axes=[key_s]) @@ -4031,7 +4031,7 @@ def _regrid_update_coordinates( src_axis_keys, dst_axis_keys ): dim_coord = dst.dimension_coordinate( - dst_axis_key, default=None + filter_by_axis=(dst_axis_key,), default=None ) if dim_coord is not None: self.set_construct(dim_coord, axes=[src_axis_key]) @@ -5064,7 +5064,7 @@ def _weights_linear( f"matching {axis!r}" ) - dim = self.dimension_coordinate(da_key, default=None) + dim = self.dimension_coordinate(filter_by_axis=(da_key,), default=None) if dim is None: if auto: return False @@ -6350,7 +6350,9 @@ def cyclic(self, identity=None, iscyclic=True, period=None, **kwargs): if iscyclic: self._cyclic = old.union((axis,)) - dim = self.dimension_coordinate(axis, default=None) + dim = self.dimension_coordinate( + filter_by_axis=(axis,), default=None + ) if dim is not None: if period is not None: dim.period(period) @@ -8126,7 +8128,9 @@ def bin( domain_axes = self.domain_axes(filter_by_size=(ge(2),), todict=True) for da_key in domain_axes: - dim = self.dimension_coordinate(da_key, default=None) + dim = self.dimension_coordinate( + filter_by_axis=(da_key,), default=None + ) if dim is None: continue @@ -8231,7 +8235,7 @@ def has_construct(self, identity=None): False """ - return bool(self.construct(identity, default=False)) + return self.construct(identity, default=None) is not None def histogram(self, digitized): """Return a multi-dimensional histogram of the data. @@ -8241,97 +8245,97 @@ def histogram(self, digitized): """ raise RuntimeError("Use cf.histogram instead.") - def del_construct(self, identity, default=ValueError()): - """Remove a metadata construct. - - If a domain axis construct is selected for removal then it can't - be spanned by any metadata construct data, nor the field - construct's data; nor be referenced by any cell method constructs. - - However, a domain ancillary construct may be removed even if it is - referenced by coordinate reference construct. In this case the - reference is replace with `None`. - - .. versionadded:: 3.0.0 - - .. seealso:: `constructs`, `get_construct`, `has_construct`, - `set_construct`, `del_domain_axis`, - `del_coordinate_reference` - - :Parameters: - - identity: - Select the construct to removed. Must be - - * The identity or key of a metadata construct. - - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); a `Query` object - (e.g. ``cf.eq('longitude')``); or a compiled regular - expression (e.g. ``re.compile('^atmosphere')``) that - selects the relevant constructs whose identities match via - `re.search`. - - A construct has a number of identities, and is selected if - any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: - - >>> x.identities() - ['time', - 'long_name=Time', - 'foo=bar', - 'standard_name=time', - 'ncvar%t', - 'T'] - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. - - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. - - *Parameter example:* - ``identity='measure:area'`` - - *Parameter example:* - ``identity='cell_area'`` - - *Parameter example:* - ``identity='long_name=Cell Area'`` - - *Parameter example:* - ``identity='cellmeasure1'`` - - default: optional - Return the value of the *default* parameter if the - construct can not be removed, or does not exist. If set to - an `Exception` instance then it will be raised instead. - - :Returns: - - The removed metadata construct. - - **Examples:** - - >>> f.del_construct('X') - - - """ - key = self.construct_key(identity, default=None) - if key is None: - return self._default( - default, - "Can't identify construct to delete from identity " - f"{identity!r}", - ) - - return super().del_construct(key, default=default) + # def del_construct(self, identity, default=ValueError()): + # """Remove a metadata construct. + # + # If a domain axis construct is selected for removal then it can't + # be spanned by any metadata construct data, nor the field + # construct's data; nor be referenced by any cell method constructs. + # + # However, a domain ancillary construct may be removed even if it is + # referenced by coordinate reference construct. In this case the + # reference is replace with `None`. + # + # .. versionadded:: 3.0.0 + # + # .. seealso:: `constructs`, `get_construct`, `has_construct`, + # `set_construct`, `del_domain_axis`, + # `del_coordinate_reference` + # + # :Parameters: + # + # identity: + # Select the construct to removed. Must be + # + # * The identity or key of a metadata construct. + # + # A construct identity is specified by a string + # (e.g. ``'latitude'``, ``'long_name=time'``, + # ``'ncvar%lat'``, etc.); a `Query` object + # (e.g. ``cf.eq('longitude')``); or a compiled regular + # expression (e.g. ``re.compile('^atmosphere')``) that + # selects the relevant constructs whose identities match via + # `re.search`. + # + # A construct has a number of identities, and is selected if + # any of them match any of those provided. A construct's + # identities are those returned by its `!identities` + # method. In the following example, the construct ``x`` has + # six identities: + # + # >>> x.identities() + # ['time', + # 'long_name=Time', + # 'foo=bar', + # 'standard_name=time', + # 'ncvar%t', + # 'T'] + # + # A construct key may optionally have the ``'key%'`` + # prefix. For example ``'dimensioncoordinate2'`` and + # ``'key%dimensioncoordinate2'`` are both acceptable keys. + # + # Note that in the output of a `print` call or `!dump` + # method, a construct is always described by one of its + # identities, and so this description may always be used as + # an *identity* argument. + # + # *Parameter example:* + # ``identity='measure:area'`` + # + # *Parameter example:* + # ``identity='cell_area'`` + # + # *Parameter example:* + # ``identity='long_name=Cell Area'`` + # + # *Parameter example:* + # ``identity='cellmeasure1'`` + # + # default: optional + # Return the value of the *default* parameter if the + # construct can not be removed, or does not exist. If set to + # an `Exception` instance then it will be raised instead. + # + # :Returns: + # + # The removed metadata construct. + # + # **Examples:** + # + # >>> f.del_construct('X') + # + # + # """ + # key = self.construct_key(identity, default=None) + # if key is None: + # return self._default( + # default, + # "Can't identify construct to delete from identity " + # f"{identity!r}", + # ) + # + # return super().del_construct(key, default=default) def del_coordinate_reference( self, identity=None, construct=None, default=ValueError() @@ -12755,7 +12759,6 @@ def indices(self, *mode, **kwargs): indices = [slice(None)] * self.ndim domain_axes = self.domain_axes(todict=True) - # constructs = self.constructs.filter_by_data(view=True) parsed = {} unique_axes = set() @@ -12766,16 +12769,6 @@ def indices(self, *mode, **kwargs): key = None construct = None else: - # c = constructs.filter_by_identity(identity, view=True) - # c = self.constructs.filter( - # filter_by_data=True, - # filter_by_identity=(identity,), - # todict=True - # ) - # if len(c) != 1: - # raise ValueError( - # "Can't find indices: Ambiguous axis or axes: " - # f"{identity!r}" key, construct = self.construct( identity, filter_by_data=True, @@ -12788,8 +12781,6 @@ def indices(self, *mode, **kwargs): f"{identity!r}" ) - # key, construct = c.popitem() - axes = self.get_data_axes(key) sorted_axes = tuple(sorted(axes)) @@ -14634,7 +14625,9 @@ def convolution_filter( # Update the bounds of the convolution axis if necessary if update_bounds: - coord = f.dimension_coordinate(axis_key, default=None) + coord = f.dimension_coordinate( + filter_by_axis=(axis_key,), default=None + ) if coord is not None and coord.has_bounds(): old_bounds = coord.bounds.array length = old_bounds.shape[0] @@ -14771,19 +14764,18 @@ def convert(self, identity, full_domain=True, cellsize=False): TODO """ - key = self.construct_key(identity, default=None) + key, construct = self.construct( + identity, item=True, default=(None, None) + ) if key is None: raise ValueError( - "Can't find metadata construct with identity {!r}".format( - identity - ) + f"Can't find metadata construct with identity {identity!r}" ) f = super().convert(key, full_domain=full_domain) if cellsize: # Change the new field's data to cell sizes - construct = self.construct(key) try: cs = construct.cellsize except AttributeError as error: @@ -14915,7 +14907,9 @@ def cumsum( if self.domain_axis(axis_key).get_size() > 1: # Update the bounds of the summed axis if necessary - coord = f.dimension_coordinate(axis_key, default=None) + coord = f.dimension_coordinate( + filter_by_axis=(axis_key,), default=None + ) if coord is not None and coord.has_bounds(): bounds = coord.get_bounds() bounds[:, 0] = bounds[0, 0] @@ -15203,7 +15197,7 @@ def anchor( f.roll(axis, shift, inplace=True) # TODO should this call be like the one above? - dim = f.dimension_coordinate(axis) + dim = f.dimension_coordinate(filter_by_axis=(axis,)) n = ((value - dim.data[0]) / period).floor() @@ -16762,23 +16756,16 @@ def dimension_coordinate( identity: optional Select dimension coordinate constructs that have an identity, defined by their `!identities` methods, that - matches any of the given values. In addition to a + matches any of the given values. In addition to construct identities, the values are matched against: - * The construct identifier, with or without the - ``'key%'`` prefix, of a dimension coordinate - construct. - - *Parameter example:* - ``'dimensioncoordinate1'`` - - *Parameter example:* - ``'key%dimensioncoordinate0'`` + Additionally, the values are matched against construct + identifiers, with or without the ``'key%'`` prefix. - * The identity or construct identifier, with or - without the ``'key%'`` prefix, of a domain axis - construct that is spanned by a dimension coordinate - construct's data. + Additionly, TODOx the values are matched against the identity or + construct identifier, with or without the ``'key%'`` + prefix, of a domain axis construct that is spanned by + a dimension coordinate construct's data. *Parameter example:* ``'domainaxis2'`` @@ -16846,18 +16833,6 @@ def dimension_coordinate( TODO """ - # if not filter_kwargs and len(identity) == 1 and identity[0] in self.domain_axes(todict=True): - # return self._select_construct( - # ("dimension_coordinate",), - # "dimension_coordinate", - # (), - # key=key, - # item=item, - # default=default, - # filter_by_axis=identity, - # axis_mode="exact", - # ) - c = self._select_construct( ("dimension_coordinate",), "dimension_coordinate", @@ -18378,8 +18353,7 @@ def replace_construct(self, identity, construct, copy=True): >>> f.replace_construct('X', new_X_construct) """ - key = self.construct(identity, key=True) - c = self.constructs[key] + key, c = self.construct(identity, item=True) if not isinstance(construct, c.__class__): raise ValueError( diff --git a/cf/maths.py b/cf/maths.py index 7edc99bd71..096ab9b226 100644 --- a/cf/maths.py +++ b/cf/maths.py @@ -127,10 +127,11 @@ def relative_vorticity( y_units = u_y.Units # Change the units of the lat/longs to radians - u_x.Units = Units("radians") - u_y.Units = Units("radians") - v_x.Units = Units("radians") - v_y.Units = Units("radians") + radians = Units("radians") + u_x.Units = radians + u_y.Units = radians + v_x.Units = radians + v_y.Units = radians # Find cos and tan of latitude cos_lat = u_y.cos() @@ -164,14 +165,12 @@ def relative_vorticity( radius = Data.asdata(radius).squeeze() radius.dtype = float if radius.size != 1: - raise ValueError("Multiple radii: radius={!r}".format(radius)) + raise ValueError(f"Multiple radii: radius={radius!r}") if not radius.Units: radius.override_units(Units("metres"), inplace=True) elif not radius.Units.equivalent(Units("metres")): - raise ValueError( - "Invalid units for radius: {!r}".format(radius.Units) - ) + raise ValueError(f"Invalid units for radius: {radius.Units!r}") # Calculate the relative vorticity. Do v-(u-corr) rather than # v-u+corr to be nice with coordinate reference corner cases. @@ -179,8 +178,8 @@ def relative_vorticity( rv.data /= radius # Convert the units of latitude and longitude to canonical units - rv.dim("X").Units = x_units - rv.dim("Y").Units = y_units + rv.dimension_coordinate("X").Units = x_units + rv.dimension_coordinate("Y").Units = y_units else: v.derivative( From 6060785746c19fbb42c001451b7cb41007ff46d0 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Mon, 12 Apr 2021 22:18:52 +0100 Subject: [PATCH 26/53] devs --- cf/constructs.py | 46 +++++++- cf/field.py | 246 ++++++++++++++++++++++++++++++++++++++++- cf/mixin/coordinate.py | 29 +++-- 3 files changed, 300 insertions(+), 21 deletions(-) diff --git a/cf/constructs.py b/cf/constructs.py index 8813ad753c..2cb572cad0 100644 --- a/cf/constructs.py +++ b/cf/constructs.py @@ -173,13 +173,49 @@ def _filter_by_identity(self, arg, todict, _config, identities): config = {"identities_kwargs": {"ctypes": ctypes}} - if ctypes: - # Exclude a ctype from the short circuit test - config["short_circuit_test"] = lambda x: ( - x not in ctypes and self._short_circuit_test(x) - ) +# if ctypes: +# # Exclude a ctype from the short circuit test +# config["short_circuit_test"] = lambda x: ( +# self._short_circuit_test(x) and x not in ctypes +# ) + if ctypes: + config["short_circuit_test"] = lambda x: False + if _config: config.update(_config) return super()._filter_by_identity(arg, todict, config, identities) + + def _filter_by_coordinate_type(self, arg, todict, ctypes): + """Worker function for `filter_by_identity` and `filter`. + + See `filter_by_identity` for details. + + .. versionadded:: 3.9.0 + + """ + out, pop = self._filter_preprocess( + arg, + filter_applied={"filter_by_identity": ctypes}, + todict=todict, + ) + + if not ctypes: + # Return all constructs if no coordinate types have been + # provided + return out + + for cid, construct in tuple(out.items()): + ok = False + for ctype in ctypes: + if getattr(construct, ctype, False): + ok = True + break + + if not ok: + pop(cid) + + return out + + diff --git a/cf/field.py b/cf/field.py index dd411f49ef..b1028d9fd5 100644 --- a/cf/field.py +++ b/cf/field.py @@ -16833,19 +16833,46 @@ def dimension_coordinate( TODO """ - c = self._select_construct( + ctypes = [i for i in "XTYZ" if i in identity] + if ctypes: + identity = [i for i in identity if i not in ctypes] + filter_kwargs["filter_by_coordinate_type"] = ctypes + last_filter = ("filter_by_coordinate_type",) + else: + last_filter = None + + + c = self._filter_interface( ("dimension_coordinate",), "dimension_coordinate", identity, + construct=True, key=key, item=item, default=None, + _last_filter=last_filter, + _identity_config={"identities_kwargs": {"ctype": False}}, **filter_kwargs, ) if c is not None: return c - - if identity: +# +# c = self._select_construct( +# ("dimension_coordinate",), +# "dimension_coordinate", +# identity, +# key=key, +# item=item, +# default=None, +# _last_filter=last_filter, +# _identity_config={"identities_kwargs": {"ctype": False}}, +# **filter_kwargs, +# ) +# if c is not None: +# return c + + if not filter_kwargs and len(identity) == 1 and identity in self.domain_axes(todict=True): + raise DeprecationError() da_key = self.domain_axis(*identity, key=True, default=None) if da_key is not None: return self._select_construct( @@ -16868,6 +16895,72 @@ def dimension_coordinate( "return a unique construct", ) + def dimension_coordinates(self, *identities, **filter_kwargs): + """Return dimension coordinate constructs. + + .. versionadded:: 3.0.0 + + .. seealso:: `constructs` + + :Parameters: + + identities: optional + Select dimension coordinate constructs that have an + identity, defined by their `!identities` methods, that + matches any of the given values. + + If no identities are provided then all dimension + coordinate constructs are selected. + + {{value match}} + + {{displayed identity}} + + {{filter_kwargs: optional}} + + :Returns: + + `Constructs` + The selected constructs, unless modified by any + *filter_kwargs* parameters. + + **Examples:** + + >>> f.dimension_coordinates() + Constructs: + {} + + >>> f.dimension_coordinates() + Constructs: + {'dimensioncoordinate0': <{{repr}}DimensionCoordinate: atmosphere_hybrid_height_coordinate(1) >, + 'dimensioncoordinate1': <{{repr}}DimensionCoordinate: grid_latitude(10) degrees>, + 'dimensioncoordinate2': <{{repr}}DimensionCoordinate: grid_longitude(9) degrees>, + 'dimensioncoordinate3': <{{repr}}DimensionCoordinate: time(1) days since 2018-12-01 >} + + """ + filter_by_identity = filter_kwargs.pop("filter_by_identity", None) + if identities: + if filter_by_identity is not None: + raise TypeError( + f"Can't set {self.__class__.__name__}." + "dimension_coordinates() " + "keyword argument 'filter_by_identity' when " + "positional *identities arguments are also set" + ) + elif filter_by_identity is not None: + identities = filter_by_identity + + ctypes = [i for i in "XTYZ" if i in identities] + if len(ctypes) == len(identities): + filter_kwargs["filter_by_coordinate_type"] = ctypes + return super().dimension_coordinates( + _last_filter=("filter_by_coordinate_type",), + **filter_kwargs + ) + + return super().dimension_coordinates( *identities, + **filter_kwargs) + def domain_axis( self, *identity, @@ -16987,13 +17080,23 @@ def domain_axis( identity = identity2 - c = self._select_construct( +# c = self._select_construct( +# ("domain_axis",), +# "domain_axis", +# identity, +# key=key, +# default=None, +# item=item, +# **filter_kwargs, +# ) + c = self._filter_interface( ("domain_axis",), "domain_axis", identity, + construct=True, key=key, - default=None, item=item, + default=None, **filter_kwargs, ) if c is not None: @@ -17114,6 +17217,71 @@ def domain_axis_position(self, *identity): key = self.domain_axis(*identity, key=True) return self.get_data_axes().index(key) + def auxiliary_coordinates(self, *identities, **filter_kwargs): + """Return auxiliary coordinate constructs. + + .. versionadded:: 3.0.0 + + .. seealso:: `constructs` + + :Parameters: + + identities: optional + Select auxiliary coordinate constructs that have an + identity, defined by their `!identities` methods, that + matches any of the given values. + + If no identities are provided then all auxiliary + coordinate constructs are selected. + + {{value match}} + + {{displayed identity}} + + {{filter_kwargs: optional}} + + :Returns: + + `Constructs` + The selected constructs, unless modified by any + *filter_kwargs* parameters. + + **Examples:** + + >>> f.auxiliary_coordinates() + Constructs: + {} + + >>> f.auxiliary_coordinates() + Constructs: + {'auxiliarycoordinate0': <{{repr}}AuxiliaryCoordinate: latitude(10, 9) degrees_N>, + 'auxiliarycoordinate1': <{{repr}}AuxiliaryCoordinate: longitude(9, 10) degrees_E>, + 'auxiliarycoordinate2': <{{repr}}AuxiliaryCoordinate: long_name:Grid latitude name(10) >} + + """ + filter_by_identity = filter_kwargs.pop("filter_by_identity", None) + if identities: + if filter_by_identity is not None: + raise TypeError( + f"Can't set {self.__class__.__name__}." + "auxiliary_coordinates() " + "keyword argument 'filter_by_identity' when " + "positional *identities arguments are also set" + ) + elif filter_by_identity is not None: + identities = filter_by_identity + + ctypes = [i for i in "XTYZ" if i in identities] + if len(ctypes) == len(identities): + filter_kwargs["filter_by_coordinate_type"] = ctypes + return super().auxiliary_coordinates( + _last_filter=("filter_by_coordinate_type",), + **filter_kwargs + ) + + return super().auxiliary_coordinates( *identities, + **filter_kwargs) + def axes_names(self, *identities, **kwargs): """Return canonical identities for each domain axis construct. @@ -19225,6 +19393,74 @@ def subspace(self): """ return SubspaceField(self) + def coordinates(self, *identities, **filter_kwargs): + """Return dimension and auxiliary coordinate constructs. + + . versionadded:: 3.0.0 + + . seealso:: `auxiliary_coordinates`, `constructs`, + `dimension_coordinates` + + :Parameters: + + identities: optional + Select coordinate constructs that have an identity, + defined by their `!identities` methods, that matches + any of the given values. + + If no identities are provided then all coordinate + constructs are selected. + + {{value match}} + + {{displayed identity}} + + {{filter_kwargs: optional}} + + :Returns: + + `Constructs` + The selected constructs, unless modified by any + *filter_kwargs* parameters. + + *Examples:** + + >> f.coordinates() + onstructs: + } + + >> f.coordinates() + onstructs: + 'auxiliarycoordinate0': <{{repr}}AuxiliaryCoordinate: latitude(10, 9) degrees_N>, + 'auxiliarycoordinate1': <{{repr}}AuxiliaryCoordinate: longitude(9, 10) degrees_E>, + 'auxiliarycoordinate2': <{{repr}}AuxiliaryCoordinate: long_name=Grid latitude name(10) >, + 'dimensioncoordinate0': <{{repr}}DimensionCoordinate: atmosphere_hybrid_height_coordinate(1) >, + 'dimensioncoordinate1': <{{repr}}DimensionCoordinate: grid_latitude(10) degrees>, + 'dimensioncoordinate2': <{{repr}}DimensionCoordinate: grid_longitude(9) degrees>, + 'dimensioncoordinate3': <{{repr}}DimensionCoordinate: time(1) days since 2018-12-01 >} + + """ + filter_by_identity = filter_kwargs.pop("filter_by_identity", None) + if identities: + if filter_by_identity is not None: + raise TypeError( + f"Can't set {self.__class__.__name__}.coordinates() " + "keyword argument 'filter_by_identity' when " + "positional *identities arguments are also set" + ) + elif filter_by_identity is not None: + identities = filter_by_identity + + ctypes = [i for i in "XTYZ" if i in identities] + if ctypes and len(ctypes) == len(identities): + filter_kwargs["filter_by_coordinate_type"] = ctypes + return super().coordinates( + _last_filter=("filter_by_coordinate_type",), + **filter_kwargs + ) + + return super().coordinates(*identities, **filter_kwargs) + def coordinate_reference_domain_axes(self, identity): """Return the domain axes that apply to a coordinate reference construct. diff --git a/cf/mixin/coordinate.py b/cf/mixin/coordinate.py index 09347900a5..1288a7e050 100644 --- a/cf/mixin/coordinate.py +++ b/cf/mixin/coordinate.py @@ -508,7 +508,7 @@ def identity( return default - def identities(self, generator=False, ctypes="XTYZ"): + def identities(self, generator=False, ctypes=None): """Return all possible identities. The identities comprise: @@ -531,11 +531,14 @@ def identities(self, generator=False, ctypes="XTYZ"): {{generator: `bool`, optional}} - ctype: (sequence of) `str` - Restrict coordinate type identities to be any of these - characters. Setting to a subset of ``'XTYZ'`` can give - performance improvements, as it will reduce the number - of coordinate types that are checked in circumstances + ctypes: (sequence of) `str` + If set then return the coordinate type (if any) as the + first identity and restrict the possible coordinate + types to be any of these characters. By default, a + coordinate type is the last identity. Setting to a + subset of ``'XTYZ'`` can give performance + improvements, as it will reduce the number of + coordinate types that are checked in circumstances when particular coordinate types have been ruled out a priori. If a coordinate type is omitted then it will not be in the returned identities even if the @@ -543,13 +546,13 @@ def identities(self, generator=False, ctypes="XTYZ"): are checked in the order given. *Parameter example:* - ``ctype='Y'`` + ``ctypes='Y'`` *Parameter example:* - ``ctype='XY'`` + ``ctypes='XY'`` *Parameter example:* - ``ctype=('T', 'X')`` + ``ctypes=('T', 'X')`` :Returns: @@ -572,9 +575,13 @@ def identities(self, generator=False, ctypes="XTYZ"): 'ncvar%tas'] """ - identities = super().identities(generator=True) + identities = super().identities(generator=True, ctypes=ctypes) - g = chain(identities, _ctypes_iter(self, ctypes)) + if ctypes: + g = chain(_ctypes_iter(self, ctypes), identities) + else: + g = chain(identities, _ctypes_iter(self, 'XTYZ')) + if generator: return g From d65477004e329bf85bac43c409520c63e5d4ca77 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Tue, 13 Apr 2021 16:21:17 +0100 Subject: [PATCH 27/53] fielddomain.py --- cf/constructs.py | 54 +- cf/field.py | 15915 +++++++++++++++++------------------ cf/mixin/__init__.py | 1 + cf/mixin/coordinate.py | 43 +- cf/mixin/fielddomain.py | 3135 +++++++ cf/mixin/properties.py | 4 - cf/mixin/propertiesdata.py | 37 +- cf/test/test_Field.py | 39 +- 8 files changed, 11165 insertions(+), 8063 deletions(-) create mode 100644 cf/mixin/fielddomain.py diff --git a/cf/constructs.py b/cf/constructs.py index 2cb572cad0..eab1b48ffa 100644 --- a/cf/constructs.py +++ b/cf/constructs.py @@ -163,25 +163,13 @@ def _filter_by_identity(self, arg, todict, _config, identities): .. versionadded:: 3.9.0 """ - # Allow keys without the 'key%' prefix - # construct_types = self._construct_type - # identities = [ - # "key%" + i if i in construct_types else i for i in identities - # ] - ctypes = [i for i in "XTYZ" if i in identities] + if len(ctypes) == len(identities): + # All identities are coordinate types (X, T, Y or Z) + return self._filter_by_coordinate_type(arg, todict, ctypes) + config = {"identities_kwargs": {"ctypes": ctypes}} - -# if ctypes: -# # Exclude a ctype from the short circuit test -# config["short_circuit_test"] = lambda x: ( -# self._short_circuit_test(x) and x not in ctypes -# ) - - if ctypes: - config["short_circuit_test"] = lambda x: False - if _config: config.update(_config) @@ -218,4 +206,36 @@ def _filter_by_coordinate_type(self, arg, todict, ctypes): return out - + @classmethod + def _short_iteration(cls, x): + """The default short cicuit test. + + If this method returns True then only ther first identity + return by the construct's `!identities` method will be + checked. + + See `_filter_by_identity` for details. + + .. versionadded:: (cfdm) 1.8.9.0 + + :Parameters: + + x: `str` + The value against which the construct's identities are + being compared. + + :Returns: + + `bool` + Returns `True` if a construct's `identities` method + is to short circuit after the first identity is + computed, otherwise `False`. + + """ + if not isinstance(x, str): + return False + + if x in "XTYZ" or x.startswith('measure:') or x.startswith('id%'): + return True + + return "=" not in x and ":" not in x and "%" not in x diff --git a/cf/field.py b/cf/field.py index b1028d9fd5..0d983c693e 100644 --- a/cf/field.py +++ b/cf/field.py @@ -99,7 +99,7 @@ # -------------------------------------------------------------------- # Commonly used units # -------------------------------------------------------------------- -_units_degrees = Units("degrees") +#_units_degrees = Units("degrees") _units_radians = Units("radians") _units_metres = Units("m") _units_1 = Units("1") @@ -247,7 +247,7 @@ _empty_set = set() -class Field(mixin.PropertiesData, cfdm.Field): +class Field(mixin.FieldDomain, mixin.PropertiesData, cfdm.Field): """A field construct of the CF data model. The field construct is central to the CF data model, and includes @@ -8337,7230 +8337,6629 @@ def histogram(self, digitized): # # return super().del_construct(key, default=default) - def del_coordinate_reference( - self, identity=None, construct=None, default=ValueError() - ): - """Remove a coordinate reference construct and all of its domain - ancillary constructs. +# def del_coordinate_reference( +# self, identity=None, construct=None, default=ValueError() +# ): +# """Remove a coordinate reference construct and all of its domain +# ancillary constructs. +# +# .. versionadded:: 3.0.0 +# +# .. seealso:: `del_construct` +# +# :Parameters: +# +# identity: optional +# Select the coordinate reference construct by one of: +# +# * The identity or key of a coordinate reference +# construct. +# +# A construct identity is specified by a string +# (e.g. ``'grid_mapping_name:latitude_longitude'``, +# ``'latitude_longitude'``, ``'ncvar%lat_lon'``, etc.); a +# `Query` object (e.g. ``cf.eq('latitude_longitude')``); or +# a compiled regular expression +# (e.g. ``re.compile('^atmosphere')``) that selects the +# relevant constructs whose identities match via +# `re.search`. +# +# Each construct has a number of identities, and is selected +# if any of them match any of those provided. A construct's +# identities are those returned by its `!identities` +# method. In the following example, the construct ``x`` has +# two identities: +# +# >>> x.identities() +# ['grid_mapping_name:latitude_longitude', 'ncvar%lat_lon'] +# +# A identity's prefix of ``'grid_mapping_name:'`` or +# ``'standard_name:'`` may be omitted +# (e.g. ``'standard_name:atmosphere_hybrid_height_coordinate'`` +# and ``'atmosphere_hybrid_height_coordinate'`` are both +# acceptable identities). +# +# A construct key may optionally have the ``'key%'`` +# prefix. For example ``'coordinatereference2'`` and +# ``'key%coordinatereference2'`` are both acceptable keys. +# +# Note that in the output of a `print` call or `!dump` +# method, a construct is always described by one of its +# identities, and so this description may always be used as +# an *identity* argument. +# +# *Parameter example:* +# ``identity='standard_name:atmosphere_hybrid_height_coordinate'`` +# +# *Parameter example:* +# ``identity='grid_mapping_name:rotated_latitude_longitude'`` +# +# *Parameter example:* +# ``identity='transverse_mercator'`` +# +# *Parameter example:* +# ``identity='coordinatereference1'`` +# +# *Parameter example:* +# ``identity='key%coordinatereference1'`` +# +# *Parameter example:* +# ``identity='ncvar%lat_lon'`` +# +# construct: optional +# The coordinate reference construct to remove. This may +# alternatively be specified via the *identity* parameter. +# +# default: optional +# Return the value of the *default* parameter if the +# construct can not be removed, or does not exist. If set to +# an `Exception` instance then it will be raised instead. +# +# :Returns: +# +# The removed coordinate reference construct. +# +# **Examples:** +# +# >>> f.del_coordinate_reference('rotated_latitude_longitude') +# +# +# """ +# if construct is None: +# if identity is None: +# raise ValueError("TODO") +# +# key = self.coordinate_reference(identity, key=True, default=None) +# if key is None: +# return self._default( +# default, +# f"Can't identify construct from {identity!r}", +# ) +# +# ref = self.del_construct(key) +# +# for ( +# da_key +# ) in ref.coordinate_conversion.domain_ancillaries().values(): +# self.del_construct(da_key, default=None) +# +# return ref +# elif identity is not None: +# raise ValueError("TODO") +# +# out = [] +# +# c_key = self.construct(construct, key=True, default=None) +# if c_key is None: +# return self._default( +# default, f"Can't identify construct from {construct!r}" +# ) +# +# for key, ref in tuple(self.coordinate_references(todict=True).items()): +# if c_key in ref.coordinates(): +# self.del_coordinate_reference( +# key, construct=None, default=default +# ) +# out.append(ref) +# continue +# +# if ( +# c_key +# in ref.coordinate_conversion.domain_ancillaries().values() +# ): +# self.del_coordinate_reference( +# key, construct=None, default=default +# ) +# out.append(ref) +# continue +# +# return out +# +# def del_domain_axis( +# self, identity=None, squeeze=False, default=ValueError() +# ): +# """Remove a domain axis construct. +# +# In general, a domain axis construct can only be removed if it is +# not spanned by any construct's data. However, a size 1 domain axis +# construct can be removed in any case if the *squeeze* parameter is +# set to `True`. In this case, a metadata construct whose data spans +# only the removed domain axis construct will also be removed. +# +# .. versionadded:: 3.6.0 +# +# .. seealso:: `del_construct` +# +# :Parameters: +# +# identity: +# Select the domain axis construct by one of: +# +# * An identity or key of a 1-d coordinate construct that +# whose data spans the domain axis construct. +# +# * A domain axis construct identity or key. +# +# * The position of the domain axis construct in the field +# construct's data. +# +# A construct identity is specified by a string +# (e.g. ``'latitude'``, ``'long_name=time'``, +# ``'ncvar%lat'``, etc.); or a compiled regular expression +# (e.g. ``re.compile('^atmosphere')``) that selects the +# relevant constructs whose identities match via +# `re.search`. +# +# Each construct has a number of identities, and is selected +# if any of them match any of those provided. A construct's +# identities are those returned by its `!identities` +# method. In the following example, the construct ``x`` has +# six identities: +# +# >>> x.identities() +# ['time' +# 'long_name=Time', +# 'foo=bar', +# 'standard_name=time' +# 'ncvar%t', +# 'T'] +# +# A construct key may optionally have the ``'key%'`` +# prefix. For example ``'dimensioncoordinate2'`` and +# ``'key%dimensioncoordinate2'`` are both acceptable keys. +# +# A position of a domain axis construct in the field +# construct's data is specified by an integer index. +# +# Note that in the output of a `print` call or `!dump` +# method, a construct is always described by one of its +# identities, and so this description may always be used as +# an *identity* argument. +# +# *Parameter example:* +# ``identity='long_name=Latitude'`` +# +# *Parameter example:* +# ``identity='dimensioncoordinate1'`` +# +# *Parameter example:* +# ``identity='domainaxis2'`` +# +# *Parameter example:* +# ``identity='key%domainaxis2'`` +# +# *Parameter example:* +# ``identity='ncdim%y'`` +# +# *Parameter example:* +# ``identity=2`` +# +# squeeze: `bool`, optional +# If True then allow the removal of a size 1 domain axis +# construct that is spanned by any data array and squeeze +# the corresponding dimension from those arrays. +# +# default: optional +# Return the value of the *default* parameter if the +# construct can not be removed, or does not exist. If set to +# an `Exception` instance then it will be raised instead. +# +# :Returns: +# +# `DomainAxis` +# The removed domain axis construct. +# +# **Examples:** +# +# >>> f = cf.example_field(0) +# >>> g = f[0] +# Field: specific_humidity (ncvar%q) +# ---------------------------------- +# Data : specific_humidity(latitude(1), longitude(8)) 1 +# Cell methods : area: mean +# Dimension coords: latitude(1) = [-75.0] degrees_north +# : longitude(8) = [22.5, ..., 337.5] degrees_east +# : time(1) = [2019-01-01 00:00:00] +# >>> g.del_domain_axis('Y', squeeze=True) +# +# >>> print(g) +# Field: specific_humidity (ncvar%q) +# ---------------------------------- +# Data : specific_humidity(longitude(8)) 1 +# Cell methods : area: mean +# Dimension coords: longitude(8) = [22.5, ..., 337.5] degrees_east +# : time(1) = [2019-01-01 00:00:00] +# >>> g.del_domain_axis('T', squeeze=True) +# +# >>> print(g) +# Field: specific_humidity (ncvar%q) +# ---------------------------------- +# Data : specific_humidity(longitude(8)) 1 +# Cell methods : area: mean +# Dimension coords: longitude(8) = [22.5, ..., 337.5] degrees_east +# +# """ +# dakey = self.domain_axis(identity, key=True) +# domain_axis = self.constructs[dakey] +# +# if not squeeze: +# return self.del_construct(dakey) +# +# if dakey in self.get_data_axes(default=()): +# self.squeeze(dakey, inplace=True) +# +# for ckey, construct in self.constructs.filter_by_data( +# todict=True +# ).items(): +# data = construct.get_data(None, _fill_value=False) +# if data is None: +# continue +# +# construct_axes = self.get_data_axes(ckey) +# if dakey not in construct_axes: +# continue +# +# i = construct_axes.index(dakey) +# construct.squeeze(i, inplace=True) +# construct_axes = list(construct_axes) +# construct_axes.remove(dakey) +# self.set_data_axes(axes=construct_axes, key=ckey) +# +# if not construct_axes: +# self.del_construct(ckey) +# +# return domain_axis - .. versionadded:: 3.0.0 +# def get_coordinate_reference( +# self, identity=None, key=False, construct=None, default=ValueError() +# ): +# """Returns selected coordinate reference constructs. +# +# .. versionadded:: 3.0.2 +# +# .. seealso:: `construct` +# +# :Parameters: +# +# identity: +# Select the coordinate reference construct by one of: +# +# * The identity or key of a coordinate reference +# construct. +# +# A construct identity is specified by a string +# (e.g. ``'grid_mapping_name:latitude_longitude'``, +# ``'latitude_longitude'``, ``'ncvar%lat_lon'``, etc.); a +# `Query` object (e.g. ``cf.eq('latitude_longitude')``); or +# a compiled regular expression +# (e.g. ``re.compile('^atmosphere')``) that selects the +# relevant constructs whose identities match via +# `re.search`. +# +# Each construct has a number of identities, and is selected +# if any of them match any of those provided. A construct's +# identities are those returned by its `!identities` +# method. In the following example, the construct ``x`` has +# two identities: +# +# >>> x.identities() +# ['grid_mapping_name:latitude_longitude', 'ncvar%lat_lon'] +# +# A identity's prefix of ``'grid_mapping_name:'`` or +# ``'standard_name:'`` may be omitted +# (e.g. ``'standard_name:atmosphere_hybrid_height_coordinate'`` +# and ``'atmosphere_hybrid_height_coordinate'`` are both +# acceptable identities). +# +# A construct key may optionally have the ``'key%'`` +# prefix. For example ``'coordinatereference2'`` and +# ``'key%coordinatereference2'`` are both acceptable keys. +# +# Note that in the output of a `print` call or `!dump` +# method, a construct is always described by one of its +# identities, and so this description may always be used as +# an *identity* argument. +# +# *Parameter example:* +# ``identity='standard_name:atmosphere_hybrid_height_coordinate'`` +# +# *Parameter example:* +# ``identity='grid_mapping_name:rotated_latitude_longitude'`` +# +# *Parameter example:* +# ``identity='transverse_mercator'`` +# +# *Parameter example:* +# ``identity='coordinatereference1'`` +# +# *Parameter example:* +# ``identity='key%coordinatereference1'`` +# +# *Parameter example:* +# ``identity='ncvar%lat_lon'`` +# +# key: `bool`, optional +# If True then return the selected construct key. By +# default the construct itself is returned. +# +# default: optional +# Return the value of the *default* parameter if a construct +# can not be found. If set to an `Exception` instance then +# it will be raised instead. +# +# :Returns: +# +# `CoordinateReference` or `str` +# The selected coordinate reference construct, or its key. +# +# **Examples:** +# +# TODO +# +# """ +# if construct is None: +# return self.coordinate_reference( +# identity=identity, key=key, default=default +# ) +# +# out = [] +# +# c_key = self.construct(construct, key=True, default=None) +# if c_key is None: +# return self._default( +# default, f"Can't identify construct from {construct!r}" +# ) +# +# for cr_key, ref in tuple( +# self.coordinate_references(todict=True).items() +# ): +# if c_key in [ +# ref.coordinates(), +# ref.coordinate_conversion.domain_ancillaries().values(), +# ]: +# if key: +# if cr_key not in out: +# out.append(cr_key) +# elif ref not in out: +# out.append(ref) +# +# continue +# +# return out +# +# def set_coordinate_reference( +# self, coordinate_reference, key=None, field=None, strict=True +# ): +# """Set a coordinate reference construct. +# +# By default, this is equivalent to using the `set_construct` +# method. If, however, the *field* parameter has been set then it is +# assumed to be a field construct that contains the new coordinate +# reference construct. In this case, existing coordinate and domain +# ancillary constructs will be referenced by the inserted coordinate +# reference construct, based on those which are referenced from the +# other parent field construct (given by the *field* parameter). +# +# .. versionadded:: 3.0.0 +# +# .. seealso:: `set_construct` +# +# :Parameters: +# +# coordinate_reference: `CoordinateReference` +# The coordinate reference construct to be inserted. +# +# key: `str`, optional +# The construct identifier to be used for the construct. If +# not set then a new, unique identifier is created +# automatically. If the identifier already exists then the +# existing construct will be replaced. +# +# *Parameter example:* +# ``key='coordinatereference1'`` +# +# field: `Field`, optional +# A parent field construct that contains the new coordinate +# reference construct. +# +# strict: `bool`, optional +# If False then allow non-strict identities for +# identifying coordinate and domain ancillary metadata +# constructs. +# +# :Returns: +# +# `str` +# The construct identifier for the coordinate reference +# construct. +# +# """ +# if field is None: +# return self.set_construct(coordinate_reference, key=key, copy=True) +# +# # Still here? +# ref = coordinate_reference.copy() +# +# coordinates = field.coordinates(todict=True) +# domain_ancillaries = field.domain_ancillaries(todict=True) +# +# ckeys = [] +# for value in coordinate_reference.coordinates(): +# if value in coordinates: +# identity = coordinates[value].identity(strict=strict) +# ckeys.append(self.coordinate(identity, key=True, default=None)) +# +# ref.clear_coordinates() +# ref.set_coordinates(ckeys) +# +# coordinate_conversion = coordinate_reference.coordinate_conversion +# +# dakeys = {} +# for term, value in coordinate_conversion.domain_ancillaries().items(): +# if value in domain_ancillaries: +# identity = domain_ancillaries[value].identity(strict=strict) +# dakeys[term] = self.domain_ancillary( +# identity, key=True, default=None +# ) +# else: +# dakeys[term] = None +# +# ref.coordinate_conversion.clear_domain_ancillaries() +# ref.coordinate_conversion.set_domain_ancillaries(dakeys) +# +# return self.set_construct(ref, key=key, copy=False) - .. seealso:: `del_construct` + @_deprecated_kwarg_check("i") + @_manage_log_level_via_verbosity + def collapse( + self, + method, + axes=None, + squeeze=False, + mtol=1, + weights=None, + ddof=1, + a=None, + inplace=False, + group=None, + regroup=False, + within_days=None, + within_years=None, + over_days=None, + over_years=None, + coordinate=None, + group_by=None, + group_span=None, + group_contiguous=1, + measure=False, + scale=None, + radius="earth", + great_circle=False, + verbose=None, + _create_zero_size_cell_bounds=False, + _update_cell_methods=True, + i=False, + _debug=False, + **kwargs, + ): + """Collapse axes of the field. - :Parameters: + Collapsing one or more dimensions reduces their size and replaces + the data along those axes with representative statistical + values. The result is a new field construct with consistent + metadata for the collapsed values. - identity: optional - Select the coordinate reference construct by one of: + By default all axes with size greater than 1 are collapsed + completely (i.e. to size 1) with a given collapse method. - * The identity or key of a coordinate reference - construct. + *Example:* + Find the minimum of the entire data: - A construct identity is specified by a string - (e.g. ``'grid_mapping_name:latitude_longitude'``, - ``'latitude_longitude'``, ``'ncvar%lat_lon'``, etc.); a - `Query` object (e.g. ``cf.eq('latitude_longitude')``); or - a compiled regular expression - (e.g. ``re.compile('^atmosphere')``) that selects the - relevant constructs whose identities match via - `re.search`. + >>> b = a.collapse('minimum') - Each construct has a number of identities, and is selected - if any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - two identities: + The collapse can also be applied to any subset of the field + construct's dimensions. In this case, the domain axis and + coordinate constructs for the non-collapsed dimensions remain the + same. This is implemented either with the axes keyword, or with a + CF-netCDF cell methods-like syntax for describing both the + collapse dimensions and the collapse method in a single + string. The latter syntax uses construct identities instead of + netCDF dimension names to identify the collapse axes. - >>> x.identities() - ['grid_mapping_name:latitude_longitude', 'ncvar%lat_lon'] + Statistics may be created to represent variation over one + dimension or a combination of dimensions. - A identity's prefix of ``'grid_mapping_name:'`` or - ``'standard_name:'`` may be omitted - (e.g. ``'standard_name:atmosphere_hybrid_height_coordinate'`` - and ``'atmosphere_hybrid_height_coordinate'`` are both - acceptable identities). + *Example:* + Two equivalent techniques for creating a field construct of + temporal maxima at each horizontal location: - A construct key may optionally have the ``'key%'`` - prefix. For example ``'coordinatereference2'`` and - ``'key%coordinatereference2'`` are both acceptable keys. + >>> b = a.collapse('maximum', axes='T') + >>> b = a.collapse('T: maximum') - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. + *Example:* + Find the horizontal maximum, with two equivalent techniques. - *Parameter example:* - ``identity='standard_name:atmosphere_hybrid_height_coordinate'`` + >>> b = a.collapse('maximum', axes=['X', 'Y']) + >>> b = a.collapse('X: Y: maximum') - *Parameter example:* - ``identity='grid_mapping_name:rotated_latitude_longitude'`` + Variation over horizontal area may also be specified by the + special identity 'area'. This may be used for any horizontal + coordinate reference system. - *Parameter example:* - ``identity='transverse_mercator'`` + *Example:* + Find the horizontal maximum using the special identity 'area': - *Parameter example:* - ``identity='coordinatereference1'`` + >>> b = a.collapse('area: maximum') - *Parameter example:* - ``identity='key%coordinatereference1'`` - *Parameter example:* - ``identity='ncvar%lat_lon'`` + **Collapse methods** - construct: optional - The coordinate reference construct to remove. This may - alternatively be specified via the *identity* parameter. + The following collapse methods are available (see + https://ncas-cms.github.io/cf-python/analysis.html#collapse-methods + for precise definitions): - default: optional - Return the value of the *default* parameter if the - construct can not be removed, or does not exist. If set to - an `Exception` instance then it will be raised instead. + ============================ ============================ + Method Description + ============================ ============================ + ``'maximum'`` The maximum of the values. - :Returns: + ``'minimum'`` The minimum of the values. - The removed coordinate reference construct. + ``'maximum_absolute_value'`` The maximum of the absolute + values. - **Examples:** + ``'minimum_absolute_value'`` The minimum of the absolute + values. - >>> f.del_coordinate_reference('rotated_latitude_longitude') - + ``'mid_range'`` The average of the maximum + and the minimum of the + values. - """ - if construct is None: - if identity is None: - raise ValueError("TODO") + ``'median'`` The median of the values. - key = self.coordinate_reference(identity, key=True, default=None) - if key is None: - return self._default( - default, - f"Can't identify construct from {identity!r}", - ) + ``'range'`` The absolute difference + between the maximum and the + minimum of the values. - ref = self.del_construct(key) + ``'sum'`` The sum of the values. - for ( - da_key - ) in ref.coordinate_conversion.domain_ancillaries().values(): - self.del_construct(da_key, default=None) + ``'sum_of_squares'`` The sum of the squares of + values. - return ref - elif identity is not None: - raise ValueError("TODO") + ``'sample_size'`` The sample size, i.e. the + number of non-missing + values. - out = [] + ``'sum_of_weights'`` The sum of weights, as + would be used for other + calculations. - c_key = self.construct(construct, key=True, default=None) - if c_key is None: - return self._default( - default, f"Can't identify construct from {construct!r}" - ) + ``'sum_of_weights2'`` The sum of squares of + weights, as would be used + for other calculations. - for key, ref in tuple(self.coordinate_references(todict=True).items()): - if c_key in ref.coordinates(): - self.del_coordinate_reference( - key, construct=None, default=default - ) - out.append(ref) - continue + ``'mean'`` The weighted or unweighted + mean of the values. - if ( - c_key - in ref.coordinate_conversion.domain_ancillaries().values() - ): - self.del_coordinate_reference( - key, construct=None, default=default - ) - out.append(ref) - continue + ``'mean_absolute_value'`` The mean of the absolute + values. - return out + ``'mean_of_upper_decile'`` The mean of the upper group + of data values defined by + the upper tenth of their + distribution. - def del_domain_axis( - self, identity=None, squeeze=False, default=ValueError() - ): - """Remove a domain axis construct. + ``'variance'`` The weighted or unweighted + variance of the values, with + a given number of degrees of + freedom. - In general, a domain axis construct can only be removed if it is - not spanned by any construct's data. However, a size 1 domain axis - construct can be removed in any case if the *squeeze* parameter is - set to `True`. In this case, a metadata construct whose data spans - only the removed domain axis construct will also be removed. + ``'standard_deviation'`` The weighted or unweighted + standard deviation of the + values, with a given number + of degrees of freedom. - .. versionadded:: 3.6.0 + ``'root_mean_square'`` The square root of the + weighted or unweighted mean + of the squares of the + values. - .. seealso:: `del_construct` + ``'integral'`` The integral of values. + ============================ ============================ - :Parameters: - identity: - Select the domain axis construct by one of: + **Data type and missing data** - * An identity or key of a 1-d coordinate construct that - whose data spans the domain axis construct. + In all collapses, missing data array elements are accounted for in + the calculation. - * A domain axis construct identity or key. + Any collapse method that involves a calculation (such as + calculating a mean), as opposed to just selecting a value (such as + finding a maximum), will return a field containing double + precision floating point numbers. If this is not desired then the + data type can be reset after the collapse with the `dtype` + attribute of the field construct. - * The position of the domain axis construct in the field - construct's data. - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); or a compiled regular expression - (e.g. ``re.compile('^atmosphere')``) that selects the - relevant constructs whose identities match via - `re.search`. + **Collapse weights** - Each construct has a number of identities, and is selected - if any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: + The calculations of means, standard deviations and variances are, + by default, **not weighted**. For weights to be incorporated in + the collapse, the axes to be weighted must be identified with the + *weights* keyword. - >>> x.identities() - ['time' - 'long_name=Time', - 'foo=bar', - 'standard_name=time' - 'ncvar%t', - 'T'] + Weights are either derived from the field construct's metadata + (such as cell sizes), or may be provided explicitly in the form of + other field constructs containing data of weights values. In + either case, the weights actually used are those derived by the + `weights` method of the field construct with the same weights + keyword value. Collapsed axes that are not identified by the + *weights* keyword are unweighted during the collapse operation. - A construct key may optionally have the ``'key%'`` - prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. + *Example:* + Create a weighted time average: - A position of a domain axis construct in the field - construct's data is specified by an integer index. + >>> b = a.collapse('T: mean', weights=True) - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. + *Example:* + Calculate the mean over the time and latitude axes, with + weights only applied to the latitude axis: - *Parameter example:* - ``identity='long_name=Latitude'`` + >>> b = a.collapse('T: Y: mean', weights='Y') - *Parameter example:* - ``identity='dimensioncoordinate1'`` + *Example* + Alternative syntax for specifying area weights: - *Parameter example:* - ``identity='domainaxis2'`` + >>> b = a.collapse('area: mean', weights=True) - *Parameter example:* - ``identity='key%domainaxis2'`` + An alternative technique for specifying weights is to set the + *weights* keyword to the output of a call to the `weights` method. - *Parameter example:* - ``identity='ncdim%y'`` + *Example* + Alternative syntax for specifying weights: - *Parameter example:* - ``identity=2`` + >>> b = a.collapse('area: mean', weights=a.weights('area')) - squeeze: `bool`, optional - If True then allow the removal of a size 1 domain axis - construct that is spanned by any data array and squeeze - the corresponding dimension from those arrays. + **Multiple collapses** - default: optional - Return the value of the *default* parameter if the - construct can not be removed, or does not exist. If set to - an `Exception` instance then it will be raised instead. + Multiple collapses normally require multiple calls to `collapse`: + one on the original field construct and then one on each interim + field construct. - :Returns: + *Example:* + Calculate the temporal maximum of the weighted areal means + using two independent calls: - `DomainAxis` - The removed domain axis construct. + >>> b = a.collapse('area: mean', weights=True).collapse('T: maximum') - **Examples:** + If preferred, multiple collapses may be carried out in a single + call by using the CF-netCDF cell methods-like syntax (note that + the colon (:) is only used after the construct identity that + specifies each axis, and a space delimits the separate collapses). - >>> f = cf.example_field(0) - >>> g = f[0] - Field: specific_humidity (ncvar%q) - ---------------------------------- - Data : specific_humidity(latitude(1), longitude(8)) 1 - Cell methods : area: mean - Dimension coords: latitude(1) = [-75.0] degrees_north - : longitude(8) = [22.5, ..., 337.5] degrees_east - : time(1) = [2019-01-01 00:00:00] - >>> g.del_domain_axis('Y', squeeze=True) - - >>> print(g) - Field: specific_humidity (ncvar%q) - ---------------------------------- - Data : specific_humidity(longitude(8)) 1 - Cell methods : area: mean - Dimension coords: longitude(8) = [22.5, ..., 337.5] degrees_east - : time(1) = [2019-01-01 00:00:00] - >>> g.del_domain_axis('T', squeeze=True) - - >>> print(g) - Field: specific_humidity (ncvar%q) - ---------------------------------- - Data : specific_humidity(longitude(8)) 1 - Cell methods : area: mean - Dimension coords: longitude(8) = [22.5, ..., 337.5] degrees_east + *Example:* + Calculate the temporal maximum of the weighted areal means in + a single call, using the cf-netCDF cell methods-like syntax: - """ - dakey = self.domain_axis(identity, key=True) - domain_axis = self.constructs[dakey] + >>> b =a.collapse('area: mean T: maximum', weights=True) - if not squeeze: - return self.del_construct(dakey) - if dakey in self.get_data_axes(default=()): - self.squeeze(dakey, inplace=True) + **Grouped collapses** - for ckey, construct in self.constructs.filter_by_data( - todict=True - ).items(): - data = construct.get_data(None, _fill_value=False) - if data is None: - continue + A grouped collapse is one for which as axis is not collapsed + completely to size 1. Instead the collapse axis is partitioned + into non-overlapping groups and each group is collapsed to size + 1. The resulting axis will generally have more than one + element. For example, creating 12 annual means from a timeseries + of 120 months would be a grouped collapse. - construct_axes = self.get_data_axes(ckey) - if dakey not in construct_axes: - continue + Selected statistics for overlapping groups can be calculated with + the `moving_window` method. - i = construct_axes.index(dakey) - construct.squeeze(i, inplace=True) - construct_axes = list(construct_axes) - construct_axes.remove(dakey) - self.set_data_axes(axes=construct_axes, key=ckey) + The *group* keyword defines the size of the groups. Groups can be + defined in a variety of ways, including with `Query`, + `TimeDuration` and `Data` instances. - if not construct_axes: - self.del_construct(ckey) + An element of the collapse axis can not be a member of more than + one group, and may be a member of no groups. Elements that are not + selected by the *group* keyword are excluded from the result. - return domain_axis + *Example:* + Create annual maxima from a time series, defining a year to + start on 1st December. - def get_coordinate_reference( - self, identity=None, key=False, construct=None, default=ValueError() - ): - """Returns selected coordinate reference constructs. + >>> b = a.collapse('T: maximum', group=cf.Y(month=12)) - .. versionadded:: 3.0.2 + *Example:* + Find the maximum of each group of 6 elements along an axis. - .. seealso:: `construct` + >>> b = a.collapse('T: maximum', group=6) - :Parameters: + *Example:* + Create December, January, February maxima from a time series. - identity: - Select the coordinate reference construct by one of: + >>> b = a.collapse('T: maximum', group=cf.djf()) - * The identity or key of a coordinate reference - construct. + *Example:* + Create maxima for each 3-month season of a timeseries (DJF, MAM, + JJA, SON). - A construct identity is specified by a string - (e.g. ``'grid_mapping_name:latitude_longitude'``, - ``'latitude_longitude'``, ``'ncvar%lat_lon'``, etc.); a - `Query` object (e.g. ``cf.eq('latitude_longitude')``); or - a compiled regular expression - (e.g. ``re.compile('^atmosphere')``) that selects the - relevant constructs whose identities match via - `re.search`. + >>> b = a.collapse('T: maximum', group=cf.seasons()) - Each construct has a number of identities, and is selected - if any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - two identities: + *Example:* + Calculate zonal means for the western and eastern hemispheres. - >>> x.identities() - ['grid_mapping_name:latitude_longitude', 'ncvar%lat_lon'] + >>> b = a.collapse('X: mean', group=cf.Data(180, 'degrees')) - A identity's prefix of ``'grid_mapping_name:'`` or - ``'standard_name:'`` may be omitted - (e.g. ``'standard_name:atmosphere_hybrid_height_coordinate'`` - and ``'atmosphere_hybrid_height_coordinate'`` are both - acceptable identities). + Groups can be further described with the *group_span* parameter + (to include groups whose actual span is not equal to a given + value) and the *group_contiguous* parameter (to include + non-contiguous groups, or any contiguous group containing + overlapping cells). - A construct key may optionally have the ``'key%'`` - prefix. For example ``'coordinatereference2'`` and - ``'key%coordinatereference2'`` are both acceptable keys. - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. + **Climatological statistics** - *Parameter example:* - ``identity='standard_name:atmosphere_hybrid_height_coordinate'`` + Climatological statistics may be derived from corresponding + portions of the annual cycle in a set of years (e.g. the average + January temperatures in the climatology of 1961-1990, where the + values are derived by averaging the 30 Januarys from the separate + years); or from corresponding portions of the diurnal cycle in a + set of days (e.g. the average temperatures for each hour in the + day for May 1997). A diurnal climatology may also be combined with + a multiannual climatology (e.g. the minimum temperature for each + hour of the average day in May from a 1961-1990 climatology). - *Parameter example:* - ``identity='grid_mapping_name:rotated_latitude_longitude'`` + Calculation requires two or three collapses, depending on the + quantity being created, all of which are grouped collapses. Each + collapse method needs to indicate its climatological nature with + one of the following qualifiers, - *Parameter example:* - ``identity='transverse_mercator'`` + ================ ======================= + Method qualifier Associated keyword + ================ ======================= + ``within years`` *within_years* + ``within days`` *within_days* + ``over years`` *over_years* (optional) + ``over days`` *over_days* (optional) + ================ ======================= - *Parameter example:* - ``identity='coordinatereference1'`` + and the associated keyword specifies how the method is to be + applied. - *Parameter example:* - ``identity='key%coordinatereference1'`` + *Example* + Calculate the multiannual average of the seasonal means: - *Parameter example:* - ``identity='ncvar%lat_lon'`` + >>> b = a.collapse('T: mean within years T: mean over years', + ... within_years=cf.seasons(), weights=True) - key: `bool`, optional - If True then return the selected construct key. By - default the construct itself is returned. + *Example:* + Calculate the multiannual variance of the seasonal + minima. Note that the units of the result have been changed + from 'K' to 'K2': - default: optional - Return the value of the *default* parameter if a construct - can not be found. If set to an `Exception` instance then - it will be raised instead. + >>> b = a.collapse('T: minimum within years T: variance over years', + ... within_years=cf.seasons(), weights=True) - :Returns: - - `CoordinateReference` or `str` - The selected coordinate reference construct, or its key. - - **Examples:** + When collapsing over years, it is assumed by default that each + portion of the annual cycle is collapsed over all years that are + present. This is the case in the above two examples. It is + possible, however, to restrict the years to be included, or group + them into chunks, with the *over_years* keyword. - TODO + *Example:* + Calculate the multiannual average of the seasonal means in 5 + year chunks: - """ - if construct is None: - return self.coordinate_reference( - identity=identity, key=key, default=default - ) + >>> b = a.collapse( + ... 'T: mean within years T: mean over years', weights=True, + ... within_years=cf.seasons(), over_years=cf.Y(5) + ... ) - out = [] + *Example:* + Calculate the multiannual average of the seasonal means, + restricting the years from 1963 to 1968: - c_key = self.construct(construct, key=True, default=None) - if c_key is None: - return self._default( - default, f"Can't identify construct from {construct!r}" - ) + >>> b = a.collapse( + ... 'T: mean within years T: mean over years', weights=True, + ... within_years=cf.seasons(), + ... over_years=cf.year(cf.wi(1963, 1968)) + ... ) - for cr_key, ref in tuple( - self.coordinate_references(todict=True).items() - ): - if c_key in [ - ref.coordinates(), - ref.coordinate_conversion.domain_ancillaries().values(), - ]: - if key: - if cr_key not in out: - out.append(cr_key) - elif ref not in out: - out.append(ref) + Similarly for collapses over days, it is assumed by default that + each portion of the diurnal cycle is collapsed over all days that + are present, But it is possible to restrict the days to be + included, or group them into chunks, with the *over_days* keyword. - continue + The calculation can be done with multiple collapse calls, which + can be useful if the interim stages are needed independently, but + be aware that the interim field constructs will have + non-CF-compliant cell method constructs. - return out + *Example:* + Calculate the multiannual maximum of the seasonal standard + deviations with two separate collapse calls: - def set_coordinate_reference( - self, coordinate_reference, key=None, field=None, strict=True - ): - """Set a coordinate reference construct. + >>> b = a.collapse('T: standard_deviation within years', + ... within_years=cf.seasons(), weights=True) - By default, this is equivalent to using the `set_construct` - method. If, however, the *field* parameter has been set then it is - assumed to be a field construct that contains the new coordinate - reference construct. In this case, existing coordinate and domain - ancillary constructs will be referenced by the inserted coordinate - reference construct, based on those which are referenced from the - other parent field construct (given by the *field* parameter). - .. versionadded:: 3.0.0 + .. versionadded:: 1.0 - .. seealso:: `set_construct` + .. seealso:: `bin`, `cell_area`, `convolution_filter`, + `moving_window`, `radius`, `weights` :Parameters: - coordinate_reference: `CoordinateReference` - The coordinate reference construct to be inserted. - - key: `str`, optional - The construct identifier to be used for the construct. If - not set then a new, unique identifier is created - automatically. If the identifier already exists then the - existing construct will be replaced. + method: `str` + Define the collapse method. All of the axes specified by + the *axes* parameter are collapsed simultaneously by this + method. The method is given by one of the following + strings (see + https://ncas-cms.github.io/cf-python/analysis.html#collapse-methods + for precise definitions): - *Parameter example:* - ``key='coordinatereference1'`` + ============================ ============================ ======== + *method* Description Weighted + ============================ ============================ ======== + ``'maximum'`` The maximum of the values. Never - field: `Field`, optional - A parent field construct that contains the new coordinate - reference construct. + ``'minimum'`` The minimum of the values. Never - strict: `bool`, optional - If False then allow non-strict identities for - identifying coordinate and domain ancillary metadata - constructs. + ``'maximum_absolute_value'`` The maximum of the absolute Never + values. - :Returns: + ``'minimum_absolute_value'`` The minimum of the absolute Never + values. - `str` - The construct identifier for the coordinate reference - construct. + ``'mid_range'`` The average of the maximum Never + and the minimum of the + values. - """ - if field is None: - return self.set_construct(coordinate_reference, key=key, copy=True) + ``'median'`` The median of the values. Never - # Still here? - ref = coordinate_reference.copy() + ``'range'`` The absolute difference Never + between the maximum and the + minimum of the values. - coordinates = field.coordinates(todict=True) - domain_ancillaries = field.domain_ancillaries(todict=True) + ``'sum'`` The sum of the values. Never - ckeys = [] - for value in coordinate_reference.coordinates(): - if value in coordinates: - identity = coordinates[value].identity(strict=strict) - ckeys.append(self.coordinate(identity, key=True, default=None)) + ``'sum_of_squares'`` The sum of the squares of Never + values. - ref.clear_coordinates() - ref.set_coordinates(ckeys) + ``'sample_size'`` The sample size, i.e. the Never + number of non-missing + values. - coordinate_conversion = coordinate_reference.coordinate_conversion + ``'sum_of_weights'`` The sum of weights, as Never + would be used for other + calculations. - dakeys = {} - for term, value in coordinate_conversion.domain_ancillaries().items(): - if value in domain_ancillaries: - identity = domain_ancillaries[value].identity(strict=strict) - dakeys[term] = self.domain_ancillary( - identity, key=True, default=None - ) - else: - dakeys[term] = None + ``'sum_of_weights2'`` The sum of squares of Never + weights, as would be used + for other calculations. - ref.coordinate_conversion.clear_domain_ancillaries() - ref.coordinate_conversion.set_domain_ancillaries(dakeys) + ``'mean'`` The weighted or unweighted May be + mean of the values. - return self.set_construct(ref, key=key, copy=False) + ``'mean_absolute_value'`` The mean of the absolute May be + values. - @_deprecated_kwarg_check("i") - @_manage_log_level_via_verbosity - def collapse( - self, - method, - axes=None, - squeeze=False, - mtol=1, - weights=None, - ddof=1, - a=None, - inplace=False, - group=None, - regroup=False, - within_days=None, - within_years=None, - over_days=None, - over_years=None, - coordinate=None, - group_by=None, - group_span=None, - group_contiguous=1, - measure=False, - scale=None, - radius="earth", - great_circle=False, - verbose=None, - _create_zero_size_cell_bounds=False, - _update_cell_methods=True, - i=False, - _debug=False, - **kwargs, - ): - """Collapse axes of the field. + ``'mean_of_upper_decile'`` The mean of the upper group May be + of data values defined by + the upper tenth of their + distribution. - Collapsing one or more dimensions reduces their size and replaces - the data along those axes with representative statistical - values. The result is a new field construct with consistent - metadata for the collapsed values. + ``'variance'`` The weighted or unweighted May be + variance of the values, with + a given number of degrees of + freedom. - By default all axes with size greater than 1 are collapsed - completely (i.e. to size 1) with a given collapse method. + ``'standard_deviation'`` The weighted or unweighted May be + standard deviation of the + values, with a given number + of degrees of freedom. - *Example:* - Find the minimum of the entire data: + ``'root_mean_square'`` The square root of the May be + weighted or unweighted mean + of the squares of the + values. - >>> b = a.collapse('minimum') + ``'integral'`` The integral of values. Always + ============================ ============================ ======== - The collapse can also be applied to any subset of the field - construct's dimensions. In this case, the domain axis and - coordinate constructs for the non-collapsed dimensions remain the - same. This is implemented either with the axes keyword, or with a - CF-netCDF cell methods-like syntax for describing both the - collapse dimensions and the collapse method in a single - string. The latter syntax uses construct identities instead of - netCDF dimension names to identify the collapse axes. + * Collapse methods that are "Never" weighted ignore the + *weights* parameter, even if it is set. - Statistics may be created to represent variation over one - dimension or a combination of dimensions. + * Collapse methods that "May be" weighted will only be + weighted if the *weights* parameter is set. - *Example:* - Two equivalent techniques for creating a field construct of - temporal maxima at each horizontal location: + * Collapse methods that are "Always" weighted require the + *weights* parameter to be set. - >>> b = a.collapse('maximum', axes='T') - >>> b = a.collapse('T: maximum') + An alternative form of providing the collapse method is to + provide a CF cell methods-like string. In this case an + ordered sequence of collapses may be defined and both the + collapse methods and their axes are provided. The axes are + interpreted as for the *axes* parameter, which must not + also be set. For example: - *Example:* - Find the horizontal maximum, with two equivalent techniques. + >>> g = f.collapse( + ... 'time: max (interval 1 hr) X: Y: mean dim3: sd') - >>> b = a.collapse('maximum', axes=['X', 'Y']) - >>> b = a.collapse('X: Y: maximum') + is equivalent to: - Variation over horizontal area may also be specified by the - special identity 'area'. This may be used for any horizontal - coordinate reference system. + >>> g = f.collapse('max', axes='time') + >>> g = g.collapse('mean', axes=['X', 'Y']) + >>> g = g.collapse('sd', axes='dim3') - *Example:* - Find the horizontal maximum using the special identity 'area': + Climatological collapses are carried out if a *method* + string contains any of the modifiers ``'within days'``, + ``'within years'``, ``'over days'`` or ``'over + years'``. For example, to collapse a time axis into + multiannual means of calendar monthly minima: - >>> b = a.collapse('area: maximum') + >>> g = f.collapse( + ... 'time: minimum within years T: mean over years', + ... within_years=cf.M() + ... ) + which is equivalent to: - **Collapse methods** + >>> g = f.collapse( + ... 'time: minimum within years', within_years=cf.M()) + >>> g = g.collapse('mean over years', axes='T') - The following collapse methods are available (see - https://ncas-cms.github.io/cf-python/analysis.html#collapse-methods - for precise definitions): + axes: (sequence of) `str`, optional + The axes to be collapsed, defined by those which would be + selected by passing each given axis description to a call + of the field construct's `domain_axis` method. For + example, for a value of ``'X'``, the domain axis construct + returned by ``f.domain_axis('X')`` is selected. If a + selected axis has size 1 then it is ignored. By default + all axes with size greater than 1 are collapsed. - ============================ ============================ - Method Description - ============================ ============================ - ``'maximum'`` The maximum of the values. + *Parameter example:* + ``axes='X'`` - ``'minimum'`` The minimum of the values. + *Parameter example:* + ``axes=['X']`` - ``'maximum_absolute_value'`` The maximum of the absolute - values. + *Parameter example:* + ``axes=['X', 'Y']`` - ``'minimum_absolute_value'`` The minimum of the absolute - values. + *Parameter example:* + ``axes=['Z', 'time']`` - ``'mid_range'`` The average of the maximum - and the minimum of the - values. + If the *axes* parameter has the special value ``'area'`` + then it is assumed that the X and Y axes are intended. - ``'median'`` The median of the values. + *Parameter example:* + ``axes='area'`` is equivalent to ``axes=['X', 'Y']``. - ``'range'`` The absolute difference - between the maximum and the - minimum of the values. - - ``'sum'`` The sum of the values. - - ``'sum_of_squares'`` The sum of the squares of - values. + *Parameter example:* + ``axes=['area', Z']`` is equivalent to ``axes=['X', 'Y', + 'Z']``. - ``'sample_size'`` The sample size, i.e. the - number of non-missing - values. + weights: optional + Specify the weights for the collapse axes. The weights + are, in general, those that would be returned by this call + of the field construct's `weights` method: + ``f.weights(weights, axes=axes, measure=measure, + scale=scale, radius=radius, great_circle=great_circle, + components=True)``. See the *axes*, *measure*, *scale*, + *radius* and *great_circle* parameters and + `cf.Field.weights` for details. - ``'sum_of_weights'`` The sum of weights, as - would be used for other - calculations. + .. note:: By default *weights* is `None`, resulting in + **unweighted calculations**. - ``'sum_of_weights2'`` The sum of squares of - weights, as would be used - for other calculations. + If the alternative form of providing the collapse method + and axes combined as a CF cell methods-like string via the + *method* parameter has been used, then the *axes* + parameter is ignored and the axes are derived from the + *method* parameter. For example, if *method* is ``'T: + area: minimum'`` then this defines axes of ``['T', + 'area']``. If *method* specifies multiple collapses, + e.g. ``'T: minimum area: mean'`` then this implies axes of + ``'T'`` for the first collapse, and axes of ``'area'`` for + the second collapse. - ``'mean'`` The weighted or unweighted - mean of the values. + .. note:: Setting *weights* to `True` is generally a good + way to ensure that all collapses are + appropriately weighted according to the field + construct's metadata. In this case, if it is not + possible to create weights for any axis then an + exception will be raised. - ``'mean_absolute_value'`` The mean of the absolute - values. + However, care needs to be taken if *weights* is + `True` when cell volume weights are desired. The + volume weights will be taken from a "volume" + cell measure construct if one exists, otherwise + the cell volumes will be calculated as being + proportional to the sizes of one-dimensional + vertical coordinate cells. In the latter case + **if the vertical dimension coordinates do not + define the actual height or depth thickness of + every cell in the domain then the weights will + be incorrect**. - ``'mean_of_upper_decile'`` The mean of the upper group - of data values defined by - the upper tenth of their - distribution. + *Parameter example:* + To specify weights based on the field construct's + metadata for all collapse axes use ``weights=True``. - ``'variance'`` The weighted or unweighted - variance of the values, with - a given number of degrees of - freedom. + *Parameter example:* + To specify weights based on cell areas use + ``weights='area'``. - ``'standard_deviation'`` The weighted or unweighted - standard deviation of the - values, with a given number - of degrees of freedom. + *Parameter example:* + To specify weights based on cell areas and linearly in + time you could set ``weights=('area', 'T')``. - ``'root_mean_square'`` The square root of the - weighted or unweighted mean - of the squares of the - values. + measure: `bool`, optional + Create weights which are cell measures, i.e. which + describe actual cell sizes (e.g. cell area) with + appropriate units (e.g. metres squared). By default the + weights are normalized and have arbitrary units. - ``'integral'`` The integral of values. - ============================ ============================ + Cell measures can be created for any combination of + axes. For example, cell measures for a time axis are the + time span for each cell with canonical units of seconds; + cell measures for the combination of four axes + representing time and three dimensional space could have + canonical units of metres cubed seconds. + When collapsing with the ``'integral'`` method, *measure* + must be True, and the units of the weights are + incorporated into the units of the returned field + construct. - **Data type and missing data** + .. note:: Specifying cell volume weights via + ``weights=['X', 'Y', 'Z']`` or + ``weights=['area', 'Z']`` (or other equivalents) + will produce **an incorrect result if the + vertical dimension coordinates do not define the + actual height or depth thickness of every cell + in the domain**. In this case, + ``weights='volume'`` should be used instead, + which requires the field construct to have a + "volume" cell measure construct. - In all collapses, missing data array elements are accounted for in - the calculation. + If ``weights=True`` then care also needs to be + taken, as a "volume" cell measure construct will + be used if present, otherwise the cell volumes + will be calculated using the size of the + vertical coordinate cells. - Any collapse method that involves a calculation (such as - calculating a mean), as opposed to just selecting a value (such as - finding a maximum), will return a field containing double - precision floating point numbers. If this is not desired then the - data type can be reset after the collapse with the `dtype` - attribute of the field construct. + .. versionadded:: 3.0.2 + scale: number, optional + If set to a positive number then scale the weights so that + they are less than or equal to that number. By default the + weights are scaled to lie between 0 and 1 (i.e. *scale* + is 1). - **Collapse weights** + *Parameter example:* + To scale all weights so that they lie between 0 and 0.5: + ``scale=0.5``. - The calculations of means, standard deviations and variances are, - by default, **not weighted**. For weights to be incorporated in - the collapse, the axes to be weighted must be identified with the - *weights* keyword. + .. versionadded:: 3.0.2 - Weights are either derived from the field construct's metadata - (such as cell sizes), or may be provided explicitly in the form of - other field constructs containing data of weights values. In - either case, the weights actually used are those derived by the - `weights` method of the field construct with the same weights - keyword value. Collapsed axes that are not identified by the - *weights* keyword are unweighted during the collapse operation. + radius: optional + Specify the radius used for calculating the areas of cells + defined in spherical polar coordinates. The radius is that + which would be returned by this call of the field + construct's `~cf.Field.radius` method: + ``f.radius(radius)``. See the `cf.Field.radius` for + details. - *Example:* - Create a weighted time average: + By default *radius* is ``'earth'`` which means that if and + only if the radius can not found from the datums of any + coordinate reference constucts, then the default radius + taken as 6371229 metres. - >>> b = a.collapse('T: mean', weights=True) + .. versionadded:: 3.0.2 - *Example:* - Calculate the mean over the time and latitude axes, with - weights only applied to the latitude axis: + great_circle: `bool`, optional + If True then allow, if required, the derivation of i) area + weights from polygon geometry cells by assuming that each + cell part is a spherical polygon composed of great circle + segments; and ii) and the derivation of line-length + weights from line geometry cells by assuming that each + line part is composed of great circle segments. - >>> b = a.collapse('T: Y: mean', weights='Y') + .. versionadded:: 3.2.0 - *Example* - Alternative syntax for specifying area weights: + squeeze: `bool`, optional + If True then size 1 collapsed axes are removed from the + output data array. By default the axes which are collapsed + are retained in the result's data array. - >>> b = a.collapse('area: mean', weights=True) + mtol: number, optional + Set the fraction of input data elements which is allowed + to contain missing data when contributing to an individual + output data element. Where this fraction exceeds *mtol*, + missing data is returned. The default is 1, meaning that a + missing datum in the output array occurs when its + contributing input array elements are all missing data. A + value of 0 means that a missing datum in the output array + occurs whenever any of its contributing input array + elements are missing data. Any intermediate value is + permitted. - An alternative technique for specifying weights is to set the - *weights* keyword to the output of a call to the `weights` method. + *Parameter example:* + To ensure that an output array element is a missing + datum if more than 25% of its input array elements are + missing data: ``mtol=0.25``. - *Example* - Alternative syntax for specifying weights: + ddof: number, optional + The delta degrees of freedom in the calculation of a + standard deviation or variance. The number of degrees of + freedom used in the calculation is (N-*ddof*) where N + represents the number of non-missing elements. By default + *ddof* is 1, meaning the standard deviation and variance + of the population is estimated according to the usual + formula with (N-1) in the denominator to avoid the bias + caused by the use of the sample mean (Bessel's + correction). - >>> b = a.collapse('area: mean', weights=a.weights('area')) + coordinate: optional + Specify how the cell coordinate values for collapsed axes + are placed. This has no effect on the cell bounds for the + collapsed axes, which always represent the extrema of the + input coordinates. - **Multiple collapses** + The *coordinate* parameter may be one of: - Multiple collapses normally require multiple calls to `collapse`: - one on the original field construct and then one on each interim - field construct. + =============== ========================================= + *coordinate* Description + =============== ========================================= + `None` This is the default. - *Example:* - Calculate the temporal maximum of the weighted areal means - using two independent calls: + If the collapse is a climatological time + collapse over years or over days then + assume a value of ``'min'``, otherwise + assume value of ``'mid_range'``. - >>> b = a.collapse('area: mean', weights=True).collapse('T: maximum') + ``'mid_range'`` An output coordinate is the mean of + first and last input coordinate bounds + (or the first and last coordinates if + there are no bounds). This is the + default. - If preferred, multiple collapses may be carried out in a single - call by using the CF-netCDF cell methods-like syntax (note that - the colon (:) is only used after the construct identity that - specifies each axis, and a space delimits the separate collapses). + ``'minimum'`` An output coordinate is the minimum of + the input coordinates. - *Example:* - Calculate the temporal maximum of the weighted areal means in - a single call, using the cf-netCDF cell methods-like syntax: + ``'maximum'`` An output coordinate is the maximum of + the input coordinates. + =============== ========================================= - >>> b =a.collapse('area: mean T: maximum', weights=True) + *Parameter example:* + ``coordinate='minimum'`` + group: optional + A grouped collapse is one for which an axis is not + collapsed completely to size 1. Instead, the collapse axis + is partitioned into non-overlapping groups and each group + is collapsed to size 1, independently of the other + groups. The results of the collapses are concatenated so + that the output axis has a size equal to the number of + groups. - **Grouped collapses** + An element of the collapse axis can not be a member of + more than one group, and may be a member of no + groups. Elements that are not selected by the *group* + parameter are excluded from the result. - A grouped collapse is one for which as axis is not collapsed - completely to size 1. Instead the collapse axis is partitioned - into non-overlapping groups and each group is collapsed to size - 1. The resulting axis will generally have more than one - element. For example, creating 12 annual means from a timeseries - of 120 months would be a grouped collapse. + The *group* parameter defines how the axis elements are + partitioned into groups, and may be one of: - Selected statistics for overlapping groups can be calculated with - the `moving_window` method. + =============== ========================================= + *group* Description + =============== ========================================= + `Data` Define groups by coordinate values that + span the given range. The first group + starts at the first coordinate bound of + the first axis element (or its coordinate + if there are no bounds) and spans the + defined group size. Each subsequent + group immediately follows the preceding + one. By default each group contains the + consective run of elements whose + coordinate values lie within the group + limits (see the *group_by* parameter). - The *group* keyword defines the size of the groups. Groups can be - defined in a variety of ways, including with `Query`, - `TimeDuration` and `Data` instances. + * By default each element will be in + exactly one group (see the *group_by*, + *group_span* and *group_contiguous* + parameters). - An element of the collapse axis can not be a member of more than - one group, and may be a member of no groups. Elements that are not - selected by the *group* keyword are excluded from the result. + * By default groups may contain different + numbers of elements. - *Example:* - Create annual maxima from a time series, defining a year to - start on 1st December. + * If no units are specified then the + units of the coordinates are assumed. - >>> b = a.collapse('T: maximum', group=cf.Y(month=12)) + `TimeDuration` Define groups by a time interval spanned + by the coordinates. The first group + starts at or before the first coordinate + bound of the first axis element (or its + coordinate if there are no bounds) and + spans the defined group size. Each + subsequent group immediately follows the + preceding one. By default each group + contains the consective run of elements + whose coordinate values lie within the + group limits (see the *group_by* + parameter). - *Example:* - Find the maximum of each group of 6 elements along an axis. + * By default each element will be in + exactly one group (see the *group_by*, + *group_span* and *group_contiguous* + parameters). - >>> b = a.collapse('T: maximum', group=6) + * By default groups may contain different + numbers of elements. - *Example:* - Create December, January, February maxima from a time series. + * The start of the first group may be + before the first first axis element, + depending on the offset defined by the + time duration. For example, if + ``group=cf.Y(month=12)`` then the first + group will start on the closest 1st + December to the first axis element. - >>> b = a.collapse('T: maximum', group=cf.djf()) + `Query` Define groups from elements whose + coordinates satisfy the query + condition. Multiple groups are created: + one for each maximally consecutive run + within the selected elements. - *Example:* - Create maxima for each 3-month season of a timeseries (DJF, MAM, - JJA, SON). + If a sequence of `Query` is provided then + groups are defined for each query. - >>> b = a.collapse('T: maximum', group=cf.seasons()) + * If a coordinate does not satisfy any of + the query conditions then its element + will not be in a group. - *Example:* - Calculate zonal means for the western and eastern hemispheres. + * By default groups may contain different + numbers of elements. - >>> b = a.collapse('X: mean', group=cf.Data(180, 'degrees')) + * If no units are specified then the + units of the coordinates are assumed. - Groups can be further described with the *group_span* parameter - (to include groups whose actual span is not equal to a given - value) and the *group_contiguous* parameter (to include - non-contiguous groups, or any contiguous group containing - overlapping cells). + * If an element is selected by two or + more queries then the latest one in the + sequence defines which group it will be + in. + `int` Define groups that contain the given + number of elements. The first group + starts with the first axis element and + spans the defined number of consecutive + elements. Each subsequent group + immediately follows the preceding one. - **Climatological statistics** + * By default each group has the defined + number of elements, apart from the last + group which may contain fewer elements + (see the *group_span* parameter). - Climatological statistics may be derived from corresponding - portions of the annual cycle in a set of years (e.g. the average - January temperatures in the climatology of 1961-1990, where the - values are derived by averaging the 30 Januarys from the separate - years); or from corresponding portions of the diurnal cycle in a - set of days (e.g. the average temperatures for each hour in the - day for May 1997). A diurnal climatology may also be combined with - a multiannual climatology (e.g. the minimum temperature for each - hour of the average day in May from a 1961-1990 climatology). + `numpy.ndarray` Define groups by selecting elements that + map to the same value in the `numpy` + array. The array must contain integers + and have the same length as the axis to + be collapsed and its sequence of values + correspond to the axis elements. Each + group contains the elements which + correspond to a common non-negative + integer value in the numpy array. Upon + output, the collapsed axis is arranged in + order of increasing group number. See the + *regroup* parameter, which allows the + creation of such a `numpy.array` for a + given grouped collapse. - Calculation requires two or three collapses, depending on the - quantity being created, all of which are grouped collapses. Each - collapse method needs to indicate its climatological nature with - one of the following qualifiers, + * The groups do not have to be in runs of + consective elements; they may be + scattered throughout the axis. - ================ ======================= - Method qualifier Associated keyword - ================ ======================= - ``within years`` *within_years* - ``within days`` *within_days* - ``over years`` *over_years* (optional) - ``over days`` *over_days* (optional) - ================ ======================= + * An element which corresponds to a + negative integer in the array will not + be in any group. + =============== ========================================= - and the associated keyword specifies how the method is to be - applied. + *Parameter example:* + To define groups of 10 kilometres: ``group=cf.Data(10, + 'km')``. - *Example* - Calculate the multiannual average of the seasonal means: + *Parameter example:* + To define groups of 5 days, starting and ending at + midnight on each day: ``group=cf.D(5)`` (see `cf.D`). - >>> b = a.collapse('T: mean within years T: mean over years', - ... within_years=cf.seasons(), weights=True) + *Parameter example:* + To define groups of 1 calendar month, starting and + ending at day 16 of each month: ``group=cf.M(day=16)`` + (see `cf.M`). - *Example:* - Calculate the multiannual variance of the seasonal - minima. Note that the units of the result have been changed - from 'K' to 'K2': + *Parameter example:* + To define groups of the season MAM in each year: + ``group=cf.mam()`` (see `cf.mam`). - >>> b = a.collapse('T: minimum within years T: variance over years', - ... within_years=cf.seasons(), weights=True) + *Parameter example:* + To define groups of the seasons DJF and JJA in each + year: ``group=[cf.jja(), cf.djf()]``. To define groups + for seasons DJF, MAM, JJA and SON in each year: + ``group=cf.seasons()`` (see `cf.djf`, `cf.jja` and + `cf.season`). - When collapsing over years, it is assumed by default that each - portion of the annual cycle is collapsed over all years that are - present. This is the case in the above two examples. It is - possible, however, to restrict the years to be included, or group - them into chunks, with the *over_years* keyword. + *Parameter example:* + To define groups for longitude elements less than or + equal to 90 degrees and greater than 90 degrees: + ``group=[cf.le(90, 'degrees'), cf.gt(90, 'degrees')]`` + (see `cf.le` and `cf.gt`). - *Example:* - Calculate the multiannual average of the seasonal means in 5 - year chunks: + *Parameter example:* + To define groups of 5 elements: ``group=5``. - >>> b = a.collapse( - ... 'T: mean within years T: mean over years', weights=True, - ... within_years=cf.seasons(), over_years=cf.Y(5) - ... ) + *Parameter example:* + For an axis of size 8, create two groups, the first + containing the first and last elements and the second + containing the 3rd, 4th and 5th elements, whilst + ignoring the 2nd, 6th and 7th elements: + ``group=numpy.array([0, -1, 4, 4, 4, -1, -2, 0])``. - *Example:* - Calculate the multiannual average of the seasonal means, - restricting the years from 1963 to 1968: + regroup: `bool`, optional + If True then, for grouped collapses, do not collapse the + field construct, but instead return a `numpy.array` of + integers which identifies the groups defined by the + *group* parameter. Each group contains the elements which + correspond to a common non-negative integer value in the + numpy array. Elements corresponding to negative integers + are not in any group. The array may subsequently be used + as the value of the *group* parameter in a separate + collapse. - >>> b = a.collapse( - ... 'T: mean within years T: mean over years', weights=True, - ... within_years=cf.seasons(), - ... over_years=cf.year(cf.wi(1963, 1968)) - ... ) + For example: - Similarly for collapses over days, it is assumed by default that - each portion of the diurnal cycle is collapsed over all days that - are present, But it is possible to restrict the days to be - included, or group them into chunks, with the *over_days* keyword. + >>> groups = f.collapse('time: mean', group=10, regroup=True) + >>> g = f.collapse('time: mean', group=groups) - The calculation can be done with multiple collapse calls, which - can be useful if the interim stages are needed independently, but - be aware that the interim field constructs will have - non-CF-compliant cell method constructs. + is equivalent to: - *Example:* - Calculate the multiannual maximum of the seasonal standard - deviations with two separate collapse calls: + >>> g = f.collapse('time: mean', group=10) - >>> b = a.collapse('T: standard_deviation within years', - ... within_years=cf.seasons(), weights=True) + group_by: optional + Specify how coordinates are assigned to the groups defined + by the *group*, *within_days* or *within_years* + parameters. Ignored unless one of these parameters is set + to a `Data` or `TimeDuration` object. + The *group_by* parameter may be one of: - .. versionadded:: 1.0 + ============ ============================================ + *group_by* Description + ============ ============================================ + `None` This is the default. - .. seealso:: `bin`, `cell_area`, `convolution_filter`, - `moving_window`, `radius`, `weights` + If the groups are defined by the *group* + parameter (i.e. collapses other than + climatological time collapses) then assume a + value of ``'coords'``. - :Parameters: + If the groups are defined by the + *within_days* or *within_years* parameter + (i.e. climatological time collapses) then + assume a value of ``'bounds'``. - method: `str` - Define the collapse method. All of the axes specified by - the *axes* parameter are collapsed simultaneously by this - method. The method is given by one of the following - strings (see - https://ncas-cms.github.io/cf-python/analysis.html#collapse-methods - for precise definitions): + ``'coords'`` Each group contains the axis elements whose + coordinate values lie within the group + limits. Every element will be in a group. - ============================ ============================ ======== - *method* Description Weighted - ============================ ============================ ======== - ``'maximum'`` The maximum of the values. Never + ``'bounds'`` Each group contains the axis elements whose + upper and lower coordinate bounds both lie + within the group limits. Some elements may + not be inside any group, either because the + group limits do not coincide with coordinate + bounds or because the group size is + sufficiently small. + ============ ============================================ - ``'minimum'`` The minimum of the values. Never + group_span: optional + Specify how to treat groups that may not span the desired + range. For example, when creating 3-month means, the + *group_span* parameter can be used to allow groups which + only contain 1 or 2 months of data. - ``'maximum_absolute_value'`` The maximum of the absolute Never - values. + By default, *group_span* is `None`. This means that only + groups whose span equals the size specified by the + definition of the groups are collapsed; unless the groups + have been defined by one or more `Query` objects, in which + case then the default behaviour is to collapse all groups, + regardless of their size. - ``'minimum_absolute_value'`` The minimum of the absolute Never - values. + In effect, the *group_span* parameter defaults to `True` + unless the groups have been defined by one or more `Query` + objects, in which case *group_span* defaults to `False`. - ``'mid_range'`` The average of the maximum Never - and the minimum of the - values. + The different behaviour when the groups have been defined + by one or more `Query` objects is necessary because a + `Query` object can only define the composition of a group, + and not its size (see the parameter examples below for how + to specify a group span in this case). - ``'median'`` The median of the values. Never + .. note:: Prior to version 3.1.0, the default value of + *group_span* was effectively `False`. - ``'range'`` The absolute difference Never - between the maximum and the - minimum of the values. + In general, the span of a group is the absolute difference + between the lower bound of its first element and the upper + bound of its last element. The only exception to this + occurs if *group_span* is (by default or by explicit + setting) an integer, in which case the span of a group is + the number of elements in the group. See also the + *group_contiguous* parameter for how to deal with groups + that have gaps in their coverage. - ``'sum'`` The sum of the values. Never + The *group_span* parameter is only applied to groups + defined by the *group*, *within_days* or *within_years* + parameters, and is otherwise ignored. - ``'sum_of_squares'`` The sum of the squares of Never - values. + The *group_span* parameter may be one of: - ``'sample_size'`` The sample size, i.e. the Never - number of non-missing - values. + ============== ========================================== + *group_span* Description + ============== ========================================== + `None` This is the default. Apply a value of + `True` or `False` depending on how the + groups have been defined. - ``'sum_of_weights'`` The sum of weights, as Never - would be used for other - calculations. + `True` Ignore groups whose span is not equal to + the size specified by the definition of + the groups. Only applicable if the groups + are defined by a `Data`, `TimeDuration` or + `int` object, and this is the default in + this case. - ``'sum_of_weights2'`` The sum of squares of Never - weights, as would be used - for other calculations. + `False` Collapse all groups, regardless of their + size. This is the default if the groups + are defined by one to more `Query` + objects. - ``'mean'`` The weighted or unweighted May be - mean of the values. + `Data` Ignore groups whose span is not equal to + the given size. If no units are specified + then the units of the coordinates are + assumed. - ``'mean_absolute_value'`` The mean of the absolute May be - values. + `TimeDuration` Ignore groups whose span is not equals to + the given time duration. - ``'mean_of_upper_decile'`` The mean of the upper group May be - of data values defined by - the upper tenth of their - distribution. + `int` Ignore groups that contain fewer than the + given number of elements + ============== ========================================== - ``'variance'`` The weighted or unweighted May be - variance of the values, with - a given number of degrees of - freedom. + *Parameter example:* + To collapse into groups of 10km, ignoring any groups + that span less than that distance: ``group=cf.Data(10, + 'km'), group_span=True``. - ``'standard_deviation'`` The weighted or unweighted May be - standard deviation of the - values, with a given number - of degrees of freedom. + *Parameter example:* + To collapse a daily timeseries into monthly groups, + ignoring any groups that span less than 1 calendar + month: monthly values: ``group=cf.M(), group_span=True`` + (see `cf.M`). - ``'root_mean_square'`` The square root of the May be - weighted or unweighted mean - of the squares of the - values. + *Parameter example:* + To collapse a timeseries into seasonal groups, ignoring + any groups that span less than three months: + ``group=cf.seasons(), group_span=cf.M(3)`` (see + `cf.seasons` and `cf.M`). - ``'integral'`` The integral of values. Always - ============================ ============================ ======== + group_contiguous: `int`, optional + Specify how to treat groups whose elements are not + contiguous or have overlapping cells. For example, when + creating a December to February means, the + *group_contiguous* parameter can be used to allow groups + which have no data for January. - * Collapse methods that are "Never" weighted ignore the - *weights* parameter, even if it is set. + A group is considered to be contiguous unless it has + coordinates with bounds that do not coincide for adjacent + cells. The definition may be expanded to include groups + whose coordinate bounds that overlap. - * Collapse methods that "May be" weighted will only be - weighted if the *weights* parameter is set. + By default *group_contiguous* is ``1``, meaning that + non-contiguous groups, and those whose coordinate bounds + overlap, are not collapsed - * Collapse methods that are "Always" weighted require the - *weights* parameter to be set. + .. note:: Prior to version 3.1.0, the default value of + *group_contiguous* was ``0``. - An alternative form of providing the collapse method is to - provide a CF cell methods-like string. In this case an - ordered sequence of collapses may be defined and both the - collapse methods and their axes are provided. The axes are - interpreted as for the *axes* parameter, which must not - also be set. For example: + The *group_contiguous* parameter is only applied to groups + defined by the *group*, *within_days* or *within_years* + parameters, and is otherwise ignored. - >>> g = f.collapse( - ... 'time: max (interval 1 hr) X: Y: mean dim3: sd') + The *group_contiguous* parameter may be one of: - is equivalent to: + =================== ===================================== + *group_contiguous* Description + =================== ===================================== + ``0`` Allow non-contiguous groups, and + those containing overlapping cells. - >>> g = f.collapse('max', axes='time') - >>> g = g.collapse('mean', axes=['X', 'Y']) - >>> g = g.collapse('sd', axes='dim3') + ``1`` This is the default. Ignore + non-contiguous groups, as well as + contiguous groups containing + overlapping cells. - Climatological collapses are carried out if a *method* - string contains any of the modifiers ``'within days'``, - ``'within years'``, ``'over days'`` or ``'over - years'``. For example, to collapse a time axis into - multiannual means of calendar monthly minima: + ``2`` Ignore non-contiguous groups, + allowing contiguous groups containing + overlapping cells. + =================== ===================================== - >>> g = f.collapse( - ... 'time: minimum within years T: mean over years', - ... within_years=cf.M() - ... ) + *Parameter example:* + To allow non-contiguous groups, and those containing + overlapping cells: ``group_contiguous=0``. - which is equivalent to: + within_days: optional + Define the groups for creating CF "within days" + climatological statistics. - >>> g = f.collapse( - ... 'time: minimum within years', within_years=cf.M()) - >>> g = g.collapse('mean over years', axes='T') + Each group contains elements whose coordinates span a time + interval of up to one day. The results of the collapses + are concatenated so that the output axis has a size equal + to the number of groups. - axes: (sequence of) `str`, optional - The axes to be collapsed, defined by those which would be - selected by passing each given axis description to a call - of the field construct's `domain_axis` method. For - example, for a value of ``'X'``, the domain axis construct - returned by ``f.domain_axis('X')`` is selected. If a - selected axis has size 1 then it is ignored. By default - all axes with size greater than 1 are collapsed. + .. note:: For CF compliance, a "within days" collapse + should be followed by an "over days" collapse. - *Parameter example:* - ``axes='X'`` + The *within_days* parameter defines how the elements are + partitioned into groups, and may be one of: - *Parameter example:* - ``axes=['X']`` - - *Parameter example:* - ``axes=['X', 'Y']`` - - *Parameter example:* - ``axes=['Z', 'time']`` + ============== ========================================== + *within_days* Description + ============== ========================================== + `TimeDuration` Defines the group size in terms of a time + interval of up to one day. The first group + starts at or before the first coordinate + bound of the first axis element (or its + coordinate if there are no bounds) and + spans the defined group size. Each + subsequent group immediately follows the + preceding one. By default each group + contains the consective run of elements + whose coordinate cells lie within the + group limits (see the *group_by* + parameter). - If the *axes* parameter has the special value ``'area'`` - then it is assumed that the X and Y axes are intended. + * Groups may contain different numbers of + elements. - *Parameter example:* - ``axes='area'`` is equivalent to ``axes=['X', 'Y']``. + * The start of the first group may be + before the first first axis element, + depending on the offset defined by the + time duration. For example, if + ``group=cf.D(hour=12)`` then the first + group will start on the closest midday + to the first axis element. - *Parameter example:* - ``axes=['area', Z']`` is equivalent to ``axes=['X', 'Y', - 'Z']``. + `Query` Define groups from elements whose + coordinates satisfy the query + condition. Multiple groups are created: + one for each maximally consecutive run + within the selected elements. - weights: optional - Specify the weights for the collapse axes. The weights - are, in general, those that would be returned by this call - of the field construct's `weights` method: - ``f.weights(weights, axes=axes, measure=measure, - scale=scale, radius=radius, great_circle=great_circle, - components=True)``. See the *axes*, *measure*, *scale*, - *radius* and *great_circle* parameters and - `cf.Field.weights` for details. + If a sequence of `Query` is provided then + groups are defined for each query. - .. note:: By default *weights* is `None`, resulting in - **unweighted calculations**. + * Groups may contain different numbers of + elements. - If the alternative form of providing the collapse method - and axes combined as a CF cell methods-like string via the - *method* parameter has been used, then the *axes* - parameter is ignored and the axes are derived from the - *method* parameter. For example, if *method* is ``'T: - area: minimum'`` then this defines axes of ``['T', - 'area']``. If *method* specifies multiple collapses, - e.g. ``'T: minimum area: mean'`` then this implies axes of - ``'T'`` for the first collapse, and axes of ``'area'`` for - the second collapse. + * If no units are specified then the units + of the coordinates are assumed. - .. note:: Setting *weights* to `True` is generally a good - way to ensure that all collapses are - appropriately weighted according to the field - construct's metadata. In this case, if it is not - possible to create weights for any axis then an - exception will be raised. + * If a coordinate does not satisfy any of + the conditions then its element will not + be in a group. - However, care needs to be taken if *weights* is - `True` when cell volume weights are desired. The - volume weights will be taken from a "volume" - cell measure construct if one exists, otherwise - the cell volumes will be calculated as being - proportional to the sizes of one-dimensional - vertical coordinate cells. In the latter case - **if the vertical dimension coordinates do not - define the actual height or depth thickness of - every cell in the domain then the weights will - be incorrect**. + * If an element is selected by two or more + queries then the latest one in the + sequence defines which group it will be + in. + ============== ========================================== *Parameter example:* - To specify weights based on the field construct's - metadata for all collapse axes use ``weights=True``. + To define groups of 6 hours, starting at 00:00, 06:00, + 12:00 and 18:00: ``within_days=cf.h(6)`` (see `cf.h`). *Parameter example:* - To specify weights based on cell areas use - ``weights='area'``. + To define groups of 1 day, starting at 06:00: + ``within_days=cf.D(1, hour=6)`` (see `cf.D`). *Parameter example:* - To specify weights based on cell areas and linearly in - time you could set ``weights=('area', 'T')``. + To define groups of 00:00 to 06:00 within each day, + ignoring the rest of each day: + ``within_days=cf.hour(cf.le(6))`` (see `cf.hour` and + `cf.le`). - measure: `bool`, optional - Create weights which are cell measures, i.e. which - describe actual cell sizes (e.g. cell area) with - appropriate units (e.g. metres squared). By default the - weights are normalized and have arbitrary units. + *Parameter example:* + To define groups of 00:00 to 06:00 and 18:00 to 24:00 + within each day, ignoring the rest of each day: + ``within_days=[cf.hour(cf.le(6)), cf.hour(cf.gt(18))]`` + (see `cf.gt`, `cf.hour` and `cf.le`). - Cell measures can be created for any combination of - axes. For example, cell measures for a time axis are the - time span for each cell with canonical units of seconds; - cell measures for the combination of four axes - representing time and three dimensional space could have - canonical units of metres cubed seconds. + within_years: optional + Define the groups for creating CF "within years" + climatological statistics. - When collapsing with the ``'integral'`` method, *measure* - must be True, and the units of the weights are - incorporated into the units of the returned field - construct. + Each group contains elements whose coordinates span a time + interval of up to one calendar year. The results of the + collapses are concatenated so that the output axis has a + size equal to the number of groups. - .. note:: Specifying cell volume weights via - ``weights=['X', 'Y', 'Z']`` or - ``weights=['area', 'Z']`` (or other equivalents) - will produce **an incorrect result if the - vertical dimension coordinates do not define the - actual height or depth thickness of every cell - in the domain**. In this case, - ``weights='volume'`` should be used instead, - which requires the field construct to have a - "volume" cell measure construct. + .. note:: For CF compliance, a "within years" collapse + should be followed by an "over years" collapse. - If ``weights=True`` then care also needs to be - taken, as a "volume" cell measure construct will - be used if present, otherwise the cell volumes - will be calculated using the size of the - vertical coordinate cells. + The *within_years* parameter defines how the elements are + partitioned into groups, and may be one of: - .. versionadded:: 3.0.2 + ============== ========================================== + *within_years* Description + ============== ========================================== + `TimeDuration` Define the group size in terms of a time + interval of up to one calendar year. The + first group starts at or before the first + coordinate bound of the first axis element + (or its coordinate if there are no bounds) + and spans the defined group size. Each + subsequent group immediately follows the + preceding one. By default each group + contains the consective run of elements + whose coordinate cells lie within the + group limits (see the *group_by* + parameter). - scale: number, optional - If set to a positive number then scale the weights so that - they are less than or equal to that number. By default the - weights are scaled to lie between 0 and 1 (i.e. *scale* - is 1). + * Groups may contain different numbers of + elements. - *Parameter example:* - To scale all weights so that they lie between 0 and 0.5: - ``scale=0.5``. + * The start of the first group may be + before the first first axis element, + depending on the offset defined by the + time duration. For example, if + ``group=cf.Y(month=12)`` then the first + group will start on the closest 1st + December to the first axis element. - .. versionadded:: 3.0.2 + `Query` Define groups from elements whose + coordinates satisfy the query + condition. Multiple groups are created: + one for each maximally consecutive run + within the selected elements. - radius: optional - Specify the radius used for calculating the areas of cells - defined in spherical polar coordinates. The radius is that - which would be returned by this call of the field - construct's `~cf.Field.radius` method: - ``f.radius(radius)``. See the `cf.Field.radius` for - details. + If a sequence of `Query` is provided then + groups are defined for each query. - By default *radius* is ``'earth'`` which means that if and - only if the radius can not found from the datums of any - coordinate reference constucts, then the default radius - taken as 6371229 metres. + * The first group may start outside of the + range of coordinates (the start of the + first group is controlled by parameters + of the `TimeDuration`). - .. versionadded:: 3.0.2 + * If group boundaries do not coincide with + coordinate bounds then some elements may + not be inside any group. - great_circle: `bool`, optional - If True then allow, if required, the derivation of i) area - weights from polygon geometry cells by assuming that each - cell part is a spherical polygon composed of great circle - segments; and ii) and the derivation of line-length - weights from line geometry cells by assuming that each - line part is composed of great circle segments. + * If the group size is sufficiently small + then some elements may not be inside any + group. - .. versionadded:: 3.2.0 + * Groups may contain different numbers of + elements. + ============== ========================================== - squeeze: `bool`, optional - If True then size 1 collapsed axes are removed from the - output data array. By default the axes which are collapsed - are retained in the result's data array. + *Parameter example:* + To define groups of 90 days: ``within_years=cf.D(90)`` + (see `cf.D`). - mtol: number, optional - Set the fraction of input data elements which is allowed - to contain missing data when contributing to an individual - output data element. Where this fraction exceeds *mtol*, - missing data is returned. The default is 1, meaning that a - missing datum in the output array occurs when its - contributing input array elements are all missing data. A - value of 0 means that a missing datum in the output array - occurs whenever any of its contributing input array - elements are missing data. Any intermediate value is - permitted. + *Parameter example:* + To define groups of 3 calendar months, starting on the + 15th of a month: ``within_years=cf.M(3, day=15)`` (see + `cf.M`). *Parameter example:* - To ensure that an output array element is a missing - datum if more than 25% of its input array elements are - missing data: ``mtol=0.25``. + To define groups for the season MAM within each year: + ``within_years=cf.mam()`` (see `cf.mam`). - ddof: number, optional - The delta degrees of freedom in the calculation of a - standard deviation or variance. The number of degrees of - freedom used in the calculation is (N-*ddof*) where N - represents the number of non-missing elements. By default - *ddof* is 1, meaning the standard deviation and variance - of the population is estimated according to the usual - formula with (N-1) in the denominator to avoid the bias - caused by the use of the sample mean (Bessel's - correction). - - coordinate: optional - Specify how the cell coordinate values for collapsed axes - are placed. This has no effect on the cell bounds for the - collapsed axes, which always represent the extrema of the - input coordinates. + *Parameter example:* + To define groups for February and for November to + December within each year: ``within_years=[cf.month(2), + cf.month(cf.ge(11))]`` (see `cf.month` and `cf.ge`). - The *coordinate* parameter may be one of: + over_days: optional + Define the groups for creating CF "over days" + climatological statistics. - =============== ========================================= - *coordinate* Description - =============== ========================================= - `None` This is the default. + By default (or if *over_days* is `None`) each group + contains all elements for which the time coordinate cell + lower bounds have a common time of day but different + dates, and for which the time coordinate cell upper bounds + also have a common time of day but different dates. The + collapsed dime axis will have a size equal to the number + of groups that were found. - If the collapse is a climatological time - collapse over years or over days then - assume a value of ``'min'``, otherwise - assume value of ``'mid_range'``. + For example, elements corresponding to the two time + coordinate cells - ``'mid_range'`` An output coordinate is the mean of - first and last input coordinate bounds - (or the first and last coordinates if - there are no bounds). This is the - default. + | ``1999-12-31 06:00:00/1999-12-31 18:00:00`` + | ``2000-01-01 06:00:00/2000-01-01 18:00:00`` - ``'minimum'`` An output coordinate is the minimum of - the input coordinates. + would be together in a group; and elements corresponding + to the two time coordinate cells - ``'maximum'`` An output coordinate is the maximum of - the input coordinates. - =============== ========================================= + | ``1999-12-31 00:00:00/2000-01-01 00:00:00`` + | ``2000-01-01 00:00:00/2000-01-02 00:00:00`` - *Parameter example:* - ``coordinate='minimum'`` + would also be together in a different group. - group: optional - A grouped collapse is one for which an axis is not - collapsed completely to size 1. Instead, the collapse axis - is partitioned into non-overlapping groups and each group - is collapsed to size 1, independently of the other - groups. The results of the collapses are concatenated so - that the output axis has a size equal to the number of - groups. + .. note:: For CF compliance, an "over days" collapse + should be preceded by a "within days" collapse. - An element of the collapse axis can not be a member of - more than one group, and may be a member of no - groups. Elements that are not selected by the *group* - parameter are excluded from the result. + The default groups may be split into smaller groups if the + *over_days* parameter is one of: - The *group* parameter defines how the axis elements are - partitioned into groups, and may be one of: + ============== ========================================== + *over_days* Description + ============== ========================================== + `TimeDuration` Split each default group into smaller + groups which span the given time duration, + which must be at least one day. - =============== ========================================= - *group* Description - =============== ========================================= - `Data` Define groups by coordinate values that - span the given range. The first group - starts at the first coordinate bound of - the first axis element (or its coordinate - if there are no bounds) and spans the - defined group size. Each subsequent - group immediately follows the preceding - one. By default each group contains the - consective run of elements whose - coordinate values lie within the group - limits (see the *group_by* parameter). + * Groups may contain different numbers of + elements. - * By default each element will be in - exactly one group (see the *group_by*, - *group_span* and *group_contiguous* - parameters). + * The start of the first group may be + before the first first axis element, + depending on the offset defined by the + time duration. For example, if + ``group=cf.M(day=15)`` then the first + group will start on the closest 15th of + a month to the first axis element. - * By default groups may contain different - numbers of elements. + `Query` Split each default group into smaller + groups whose coordinate cells satisfy the + query condition. - * If no units are specified then the - units of the coordinates are assumed. + If a sequence of `Query` is provided then + groups are defined for each query. - `TimeDuration` Define groups by a time interval spanned - by the coordinates. The first group - starts at or before the first coordinate - bound of the first axis element (or its - coordinate if there are no bounds) and - spans the defined group size. Each - subsequent group immediately follows the - preceding one. By default each group - contains the consective run of elements - whose coordinate values lie within the - group limits (see the *group_by* - parameter). + * Groups may contain different numbers of + elements. - * By default each element will be in - exactly one group (see the *group_by*, - *group_span* and *group_contiguous* - parameters). + * If a coordinate does not satisfy any of + the conditions then its element will not + be in a group. - * By default groups may contain different - numbers of elements. + * If an element is selected by two or more + queries then the latest one in the + sequence defines which group it will be + in. + ============== ========================================== - * The start of the first group may be - before the first first axis element, - depending on the offset defined by the - time duration. For example, if - ``group=cf.Y(month=12)`` then the first - group will start on the closest 1st - December to the first axis element. + *Parameter example:* + To define groups for January and for June to December, + ignoring all other months: ``over_days=[cf.month(1), + cf.month(cf.wi(6, 12))]`` (see `cf.month` and `cf.wi`). - `Query` Define groups from elements whose - coordinates satisfy the query - condition. Multiple groups are created: - one for each maximally consecutive run - within the selected elements. + *Parameter example:* + To define groups spanning 90 days: + ``over_days=cf.D(90)`` or ``over_days=cf.h(2160)``. (see + `cf.D` and `cf.h`). - If a sequence of `Query` is provided then - groups are defined for each query. + *Parameter example:* + To define groups that each span 3 calendar months, + starting and ending at 06:00 in the first day of each + month: ``over_days=cf.M(3, hour=6)`` (see `cf.M`). - * If a coordinate does not satisfy any of - the query conditions then its element - will not be in a group. + *Parameter example:* + To define groups that each span a calendar month + ``over_days=cf.M()`` (see `cf.M`). - * By default groups may contain different - numbers of elements. + *Parameter example:* + To define groups for January and for June to December, + ignoring all other months: ``over_days=[cf.month(1), + cf.month(cf.wi(6, 12))]`` (see `cf.month` and `cf.wi`). - * If no units are specified then the - units of the coordinates are assumed. + over_years: optional + Define the groups for creating CF "over years" + climatological statistics. - * If an element is selected by two or - more queries then the latest one in the - sequence defines which group it will be - in. + By default (or if *over_years* is `None`) each group + contains all elements for which the time coordinate cell + lower bounds have a common date of the year but different + years, and for which the time coordinate cell upper bounds + also have a common date of the year but different + years. The collapsed dime axis will have a size equal to + the number of groups that were found. - `int` Define groups that contain the given - number of elements. The first group - starts with the first axis element and - spans the defined number of consecutive - elements. Each subsequent group - immediately follows the preceding one. + For example, elements corresponding to the two time + coordinate cells - * By default each group has the defined - number of elements, apart from the last - group which may contain fewer elements - (see the *group_span* parameter). + | ``1999-12-01 00:00:00/2000-01-01 00:00:00`` + | ``2000-12-01 00:00:00/2001-01-01 00:00:00`` - `numpy.ndarray` Define groups by selecting elements that - map to the same value in the `numpy` - array. The array must contain integers - and have the same length as the axis to - be collapsed and its sequence of values - correspond to the axis elements. Each - group contains the elements which - correspond to a common non-negative - integer value in the numpy array. Upon - output, the collapsed axis is arranged in - order of increasing group number. See the - *regroup* parameter, which allows the - creation of such a `numpy.array` for a - given grouped collapse. + would be together in a group. - * The groups do not have to be in runs of - consective elements; they may be - scattered throughout the axis. + .. note:: For CF compliance, an "over years" collapse + should be preceded by a "within years" or "over + days" collapse. - * An element which corresponds to a - negative integer in the array will not - be in any group. - =============== ========================================= + The default groups may be split into smaller groups if the + *over_years* parameter is one of: - *Parameter example:* - To define groups of 10 kilometres: ``group=cf.Data(10, - 'km')``. + ============== ========================================== + *over_years* Description + ============== ========================================== + `TimeDuration` Split each default group into smaller + groups which span the given time duration, + which must be at least one day. - *Parameter example:* - To define groups of 5 days, starting and ending at - midnight on each day: ``group=cf.D(5)`` (see `cf.D`). + * Groups may contain different numbers of + elements. - *Parameter example:* - To define groups of 1 calendar month, starting and - ending at day 16 of each month: ``group=cf.M(day=16)`` - (see `cf.M`). + * The start of the first group may be + before the first first axis element, + depending on the offset defined by the + time duration. For example, if + ``group=cf.Y(month=12)`` then the first + group will start on the closest 1st + December to the first axis element. + + `Query` Split each default group into smaller + groups whose coordinate cells satisfy the + query condition. + + If a sequence of `Query` is provided then + groups are defined for each query. + + * Groups may contain different numbers of + elements. + + * If a coordinate does not satisfy any of + the conditions then its element will not + be in a group. + + * If an element is selected by two or more + queries then the latest one in the + sequence defines which group it will be + in. + ============== ========================================== *Parameter example:* - To define groups of the season MAM in each year: - ``group=cf.mam()`` (see `cf.mam`). + An element with coordinate bounds {1999-06-01 06:00:00, + 1999-09-01 06:00:00} **matches** an element with + coordinate bounds {2000-06-01 06:00:00, 2000-09-01 + 06:00:00}. *Parameter example:* - To define groups of the seasons DJF and JJA in each - year: ``group=[cf.jja(), cf.djf()]``. To define groups - for seasons DJF, MAM, JJA and SON in each year: - ``group=cf.seasons()`` (see `cf.djf`, `cf.jja` and - `cf.season`). + An element with coordinate bounds {1999-12-01 00:00:00, + 2000-12-01 00:00:00} **matches** an element with + coordinate bounds {2000-12-01 00:00:00, 2001-12-01 + 00:00:00}. *Parameter example:* - To define groups for longitude elements less than or - equal to 90 degrees and greater than 90 degrees: - ``group=[cf.le(90, 'degrees'), cf.gt(90, 'degrees')]`` - (see `cf.le` and `cf.gt`). + To define groups spanning 10 calendar years: + ``over_years=cf.Y(10)`` or ``over_years=cf.M(120)`` (see + `cf.M` and `cf.Y`). *Parameter example:* - To define groups of 5 elements: ``group=5``. + To define groups spanning 5 calendar years, starting and + ending at 06:00 on 01 December of each year: + ``over_years=cf.Y(5, month=12, hour=6)`` (see `cf.Y`). *Parameter example:* - For an axis of size 8, create two groups, the first - containing the first and last elements and the second - containing the 3rd, 4th and 5th elements, whilst - ignoring the 2nd, 6th and 7th elements: - ``group=numpy.array([0, -1, 4, 4, 4, -1, -2, 0])``. + To define one group spanning 1981 to 1990 and another + spanning 2001 to 2005: ``over_years=[cf.year(cf.wi(1981, + 1990), cf.year(cf.wi(2001, 2005)]`` (see `cf.year` and + `cf.wi`). - regroup: `bool`, optional - If True then, for grouped collapses, do not collapse the - field construct, but instead return a `numpy.array` of - integers which identifies the groups defined by the - *group* parameter. Each group contains the elements which - correspond to a common non-negative integer value in the - numpy array. Elements corresponding to negative integers - are not in any group. The array may subsequently be used - as the value of the *group* parameter in a separate - collapse. + {{inplace: `bool`, optional}} - For example: + {{i: deprecated at version 3.0.0}} - >>> groups = f.collapse('time: mean', group=10, regroup=True) - >>> g = f.collapse('time: mean', group=groups) + kwargs: deprecated at version 3.0.0 - is equivalent to: + :Returns: - >>> g = f.collapse('time: mean', group=10) + `Field` or `numpy.ndarray` + The collapsed field construct. Alternatively, if the + *regroup* parameter is True then a `numpy` array is + returned. - group_by: optional - Specify how coordinates are assigned to the groups defined - by the *group*, *within_days* or *within_years* - parameters. Ignored unless one of these parameters is set - to a `Data` or `TimeDuration` object. + **Examples:** - The *group_by* parameter may be one of: + There are further worked examples in + https://ncas-cms.github.io/cf-python/analysis.html#statistical-collapses - ============ ============================================ - *group_by* Description - ============ ============================================ - `None` This is the default. + """ + if _debug: + _DEPRECATION_ERROR_KWARGS( + self, + "collapse", + {"_debug": _debug}, + "Use keyword 'verbose' instead.", + ) # pragma: no cover - If the groups are defined by the *group* - parameter (i.e. collapses other than - climatological time collapses) then assume a - value of ``'coords'``. + if kwargs: + _DEPRECATION_ERROR_KWARGS( + self, "collapse", kwargs + ) # pragma: no cover - If the groups are defined by the - *within_days* or *within_years* parameter - (i.e. climatological time collapses) then - assume a value of ``'bounds'``. + if inplace: + f = self + else: + f = self.copy() - ``'coords'`` Each group contains the axis elements whose - coordinate values lie within the group - limits. Every element will be in a group. + # Whether or not to create null bounds for null + # collapses. I.e. if the collapse axis has size 1 and no + # bounds, whether or not to create upper and lower bounds to + # the coordinate value. If this occurs it's because the null + # collapse is part of a grouped collapse and so will be + # concatenated to other collapses for the final result: bounds + # will be made for the grouped collapse, so all elements need + # bounds. + # _create_zero_size_cell_bounds = kwargs.get( + # '_create_zero_size_cell_bounds', False) - ``'bounds'`` Each group contains the axis elements whose - upper and lower coordinate bounds both lie - within the group limits. Some elements may - not be inside any group, either because the - group limits do not coincide with coordinate - bounds or because the group size is - sufficiently small. - ============ ============================================ + # ------------------------------------------------------------ + # Parse the methods and axes + # ------------------------------------------------------------ + if ":" in method: + # Convert a cell methods string (such as 'area: mean dim3: + # dim2: max T: minimum height: variance') to a CellMethod + # construct + if axes is not None: + raise ValueError( + "Can't collapse: Can't set 'axes' when 'method' is " + "CF-like cell methods string" + ) - group_span: optional - Specify how to treat groups that may not span the desired - range. For example, when creating 3-month means, the - *group_span* parameter can be used to allow groups which - only contain 1 or 2 months of data. + all_methods = [] + all_axes = [] + all_within = [] + all_over = [] - By default, *group_span* is `None`. This means that only - groups whose span equals the size specified by the - definition of the groups are collapsed; unless the groups - have been defined by one or more `Query` objects, in which - case then the default behaviour is to collapse all groups, - regardless of their size. + for cm in CellMethod.create(method): + all_methods.append(cm.get_method(None)) + all_axes.append(cm.get_axes(())) + all_within.append(cm.get_qualifier("within", None)) + all_over.append(cm.get_qualifier("over", None)) + else: + x = method.split(" within ") + if method == x[0]: + within = None + x = method.split(" over ") + if method == x[0]: + over = None + else: + method, over = x + else: + method, within = x - In effect, the *group_span* parameter defaults to `True` - unless the groups have been defined by one or more `Query` - objects, in which case *group_span* defaults to `False`. + if isinstance(axes, (str, int)): + axes = (axes,) - The different behaviour when the groups have been defined - by one or more `Query` objects is necessary because a - `Query` object can only define the composition of a group, - and not its size (see the parameter examples below for how - to specify a group span in this case). + all_methods = (method,) + all_within = (within,) + all_over = (over,) + all_axes = (axes,) - .. note:: Prior to version 3.1.0, the default value of - *group_span* was effectively `False`. + # ------------------------------------------------------------ + # Convert axes into domain axis construct keys + # ------------------------------------------------------------ + domain_axes = None - In general, the span of a group is the absolute difference - between the lower bound of its first element and the upper - bound of its last element. The only exception to this - occurs if *group_span* is (by default or by explicit - setting) an integer, in which case the span of a group is - the number of elements in the group. See also the - *group_contiguous* parameter for how to deal with groups - that have gaps in their coverage. + input_axes = all_axes + all_axes = [] + for axes in input_axes: + if axes is None: + domain_axes = self.domain_axes( + todict=False, cached=domain_axes + ) + all_axes.append(list(domain_axes)) + continue - The *group_span* parameter is only applied to groups - defined by the *group*, *within_days* or *within_years* - parameters, and is otherwise ignored. + axes2 = [] + for axis in axes: + msg = ( + "Must have '{}' axes for an '{}' collapse. Can't " + "find {{!r}} axis" + ) + if axis == "area": + iterate_over = ("X", "Y") + msg = msg.format("', '".join(iterate_over), axis) + elif axis == "volume": + iterate_over = ("X", "Y", "Z") + msg = msg.format("', '".join(iterate_over), axis) + else: + iterate_over = (axis,) + msg = "Can't find the collapse axis identified by {!r}" - The *group_span* parameter may be one of: + for x in iterate_over: + a = self.domain_axis(x, key=True, default=None) + if a is None: + raise ValueError(msg.format(x)) + axes2.append(a) - ============== ========================================== - *group_span* Description - ============== ========================================== - `None` This is the default. Apply a value of - `True` or `False` depending on how the - groups have been defined. + all_axes.append(axes2) - `True` Ignore groups whose span is not equal to - the size specified by the definition of - the groups. Only applicable if the groups - are defined by a `Data`, `TimeDuration` or - `int` object, and this is the default in - this case. + logger.info( + " all_methods, all_axes, all_within, all_over = " + "{} {} {} {}".format(all_methods, all_axes, all_within, all_over) + ) # pragma: no cover - `False` Collapse all groups, regardless of their - size. This is the default if the groups - are defined by one to more `Query` - objects. + if group is not None and len(all_axes) > 1: + raise ValueError( + "Can't use the 'group' parameter for multiple collapses" + ) - `Data` Ignore groups whose span is not equal to - the given size. If no units are specified - then the units of the coordinates are - assumed. + # ------------------------------------------------------------ + # + # ------------------------------------------------------------ + domain_axes = f.domain_axes(todict=False, cached=domain_axes) + # auxiliary_coordinates = f.auxiliary_coordinates(view=True) + # dimension_coordinates = f.dimension_coordinates(view=True) - `TimeDuration` Ignore groups whose span is not equals to - the given time duration. + for method, axes, within, over, axes_in in zip( + all_methods, all_axes, all_within, all_over, input_axes + ): - `int` Ignore groups that contain fewer than the - given number of elements - ============== ========================================== + method2 = _collapse_methods.get(method, None) + if method2 is None: + raise ValueError( + "Unknown collapse method: {!r}".format(method) + ) - *Parameter example:* - To collapse into groups of 10km, ignoring any groups - that span less than that distance: ``group=cf.Data(10, - 'km'), group_span=True``. + method = method2 - *Parameter example:* - To collapse a daily timeseries into monthly groups, - ignoring any groups that span less than 1 calendar - month: monthly values: ``group=cf.M(), group_span=True`` - (see `cf.M`). + # collapse_axes_all_sizes = domain_axes.filter_by_key(*axes) + collapse_axes_all_sizes = f.domain_axes( + filter_by_key=axes, todict=False + ) - *Parameter example:* - To collapse a timeseries into seasonal groups, ignoring - any groups that span less than three months: - ``group=cf.seasons(), group_span=cf.M(3)`` (see - `cf.seasons` and `cf.M`). + logger.info( + " axes = {}".format(axes) + ) # pragma: no cover + logger.info( + " method = {}".format(method) + ) # pragma: no cover + logger.info( + " collapse_axes_all_sizes = {}".format( + collapse_axes_all_sizes + ) + ) # pragma: no cover - group_contiguous: `int`, optional - Specify how to treat groups whose elements are not - contiguous or have overlapping cells. For example, when - creating a December to February means, the - *group_contiguous* parameter can be used to allow groups - which have no data for January. + if not collapse_axes_all_sizes: + raise ValueError( + "Can't collapse: Can not identify collapse axes" + ) - A group is considered to be contiguous unless it has - coordinates with bounds that do not coincide for adjacent - cells. The definition may be expanded to include groups - whose coordinate bounds that overlap. + if method in ( + "sum_of_weights", + "sum_of_weights2", + "sample_size", + "integral", + "maximum_absolute_value", + "minimum_absolute_value", + "mean_absolute_value", + "range", + "root_mean_square", + "sum_of_squares", + ): + collapse_axes = collapse_axes_all_sizes.todict() # copy() + else: + collapse_axes = collapse_axes_all_sizes.filter_by_size( + gt(1), todict=True + ) - By default *group_contiguous* is ``1``, meaning that - non-contiguous groups, and those whose coordinate bounds - overlap, are not collapsed + logger.info( + " collapse_axes = {}".format(collapse_axes) + ) # pragma: no cover - .. note:: Prior to version 3.1.0, the default value of - *group_contiguous* was ``0``. + if not collapse_axes: + # Do nothing if there are no collapse axes + if _create_zero_size_cell_bounds: + # Create null bounds if requested + for axis in axes: + # dc = f.dimension_coordinates( + # filter_by_axis=(axis,), axis_mode="and", todict=Tru#e + # ).value(None) + dc = f.dimension_coordinate( + filter_by_axis=(axis,), + default=None, + ) + if dc is not None and not dc.has_bounds(): + dc.set_bounds(dc.create_bounds(cellsize=0)) - The *group_contiguous* parameter is only applied to groups - defined by the *group*, *within_days* or *within_years* - parameters, and is otherwise ignored. + continue - The *group_contiguous* parameter may be one of: + # Check that there are enough elements to collapse + collapse_axes_sizes = [ + da.get_size() for da in collapse_axes.values() + ] + size = reduce(operator_mul, collapse_axes_sizes, 1) - =================== ===================================== - *group_contiguous* Description - =================== ===================================== - ``0`` Allow non-contiguous groups, and - those containing overlapping cells. + logger.info( + " collapse_axes_sizes = {}".format(collapse_axes_sizes) + ) # pragma: no cover - ``1`` This is the default. Ignore - non-contiguous groups, as well as - contiguous groups containing - overlapping cells. + grouped_collapse = ( + within is not None or over is not None or group is not None + ) - ``2`` Ignore non-contiguous groups, - allowing contiguous groups containing - overlapping cells. - =================== ===================================== + # -------------------------------------------------------- + # Set the group_by parameter + # -------------------------------------------------------- + if group_by is None: + if within is None and over is None: + group_by = "coords" + else: + group_by = "bounds" + elif ( + within is not None or over is not None + ) and group_by == "coords": + raise ValueError( + "Can't collapse: group_by parameter can't be " + "'coords' for a climatological time collapse." + ) - *Parameter example:* - To allow non-contiguous groups, and those containing - overlapping cells: ``group_contiguous=0``. + # -------------------------------------------------------- + # Set the coordinate parameter + # -------------------------------------------------------- + if coordinate is None and over is None: + coordinate = "mid_range" - within_days: optional - Define the groups for creating CF "within days" - climatological statistics. + if grouped_collapse: + if len(collapse_axes) > 1: + raise ValueError( + "Can't do a grouped collapse on multiple axes " + "simultaneously" + ) - Each group contains elements whose coordinates span a time - interval of up to one day. The results of the collapses - are concatenated so that the output axis has a size equal - to the number of groups. + # ------------------------------------------------------------ + # Grouped collapse: Calculate weights + # ------------------------------------------------------------ + g_weights = weights + if method not in _collapse_weighted_methods: + g_weights = None + else: + # if isinstance(weights, (dict, self.__class__, Data)): + # if measure: + # raise ValueError( + # "TODO") + # + # if scale is not None: + # raise ValueError( + # "TODO") + if method == "integral": + if not measure: + raise ValueError( + "Must set measure=True for 'integral' " + "collapses." + ) - .. note:: For CF compliance, a "within days" collapse - should be followed by an "over days" collapse. + if scale is not None: + raise ValueError( + "Can't set scale for 'integral' collapses." + ) + elif not measure and scale is None: + scale = 1.0 + elif measure and scale is not None: + raise ValueError("TODO") - The *within_days* parameter defines how the elements are - partitioned into groups, and may be one of: + # if weights is True: + # weights = tuple(collapse_axes.keys()) - ============== ========================================== - *within_days* Description - ============== ========================================== - `TimeDuration` Defines the group size in terms of a time - interval of up to one day. The first group - starts at or before the first coordinate - bound of the first axis element (or its - coordinate if there are no bounds) and - spans the defined group size. Each - subsequent group immediately follows the - preceding one. By default each group - contains the consective run of elements - whose coordinate cells lie within the - group limits (see the *group_by* - parameter). + g_weights = f.weights( + weights, + components=True, + axes=list(collapse_axes), # .keys()), + scale=scale, + measure=measure, + radius=radius, + great_circle=great_circle, + ) - * Groups may contain different numbers of - elements. + if not g_weights: + g_weights = None - * The start of the first group may be - before the first first axis element, - depending on the offset defined by the - time duration. For example, if - ``group=cf.D(hour=12)`` then the first - group will start on the closest midday - to the first axis element. - - `Query` Define groups from elements whose - coordinates satisfy the query - condition. Multiple groups are created: - one for each maximally consecutive run - within the selected elements. - - If a sequence of `Query` is provided then - groups are defined for each query. + # axis = collapse_axes.key() + axis = [a for a in collapse_axes][0] - * Groups may contain different numbers of - elements. + f = f._collapse_grouped( + method, + axis, + within=within, + over=over, + within_days=within_days, + within_years=within_years, + over_days=over_days, + over_years=over_years, + group=group, + group_span=group_span, + group_contiguous=group_contiguous, + regroup=regroup, + mtol=mtol, + ddof=ddof, + measure=measure, + weights=g_weights, + squeeze=squeeze, + coordinate=coordinate, + group_by=group_by, + axis_in=axes_in[0], + verbose=verbose, + ) - * If no units are specified then the units - of the coordinates are assumed. + if regroup: + # Grouped collapse: Return the numpy array + return f - * If a coordinate does not satisfy any of - the conditions then its element will not - be in a group. + # ---------------------------------------------------- + # Grouped collapse: Update the cell methods + # ---------------------------------------------------- + f._update_cell_methods( + method=method, + domain_axes=collapse_axes, + input_axes=axes_in, + within=within, + over=over, + verbose=verbose, + ) + continue - * If an element is selected by two or more - queries then the latest one in the - sequence defines which group it will be - in. - ============== ========================================== + elif regroup: + raise ValueError( + "Can't return an array of groups for a non-grouped " + "collapse" + ) - *Parameter example:* - To define groups of 6 hours, starting at 00:00, 06:00, - 12:00 and 18:00: ``within_days=cf.h(6)`` (see `cf.h`). + data_axes = f.get_data_axes() + iaxes = [ + data_axes.index(axis) + for axis in collapse_axes + if axis in data_axes + ] - *Parameter example:* - To define groups of 1 day, starting at 06:00: - ``within_days=cf.D(1, hour=6)`` (see `cf.D`). + # ------------------------------------------------------------ + # Calculate weights + # ------------------------------------------------------------ + logger.info( + " Input weights = {!r}".format(weights) + ) # pragma: no cover - *Parameter example:* - To define groups of 00:00 to 06:00 within each day, - ignoring the rest of each day: - ``within_days=cf.hour(cf.le(6))`` (see `cf.hour` and - `cf.le`). + if method not in _collapse_weighted_methods: + weights = None - *Parameter example:* - To define groups of 00:00 to 06:00 and 18:00 to 24:00 - within each day, ignoring the rest of each day: - ``within_days=[cf.hour(cf.le(6)), cf.hour(cf.gt(18))]`` - (see `cf.gt`, `cf.hour` and `cf.le`). + d_kwargs = {} + if weights is not None: + # if isinstance(weights, (dict, self.__class__, Data)): + # if measure: + # raise ValueError("TODO") + # + # if scale is not None: + # raise ValueError("TODO") - within_years: optional - Define the groups for creating CF "within years" - climatological statistics. + if method == "integral": + if not measure: + raise ValueError( + f"Must set measure=True for {method!r} collapses" + ) - Each group contains elements whose coordinates span a time - interval of up to one calendar year. The results of the - collapses are concatenated so that the output axis has a - size equal to the number of groups. + if scale is not None: + raise ValueError( + "Can't set scale for 'integral' collapses." + ) + elif not measure and scale is None: + scale = 1.0 + elif measure and scale is not None: + raise ValueError("TODO") - .. note:: For CF compliance, a "within years" collapse - should be followed by an "over years" collapse. + d_weights = f.weights( + weights, + components=True, + axes=list(collapse_axes.keys()), + scale=scale, + measure=measure, + radius=radius, + great_circle=great_circle, + ) - The *within_years* parameter defines how the elements are - partitioned into groups, and may be one of: + if d_weights: + d_kwargs["weights"] = d_weights - ============== ========================================== - *within_years* Description - ============== ========================================== - `TimeDuration` Define the group size in terms of a time - interval of up to one calendar year. The - first group starts at or before the first - coordinate bound of the first axis element - (or its coordinate if there are no bounds) - and spans the defined group size. Each - subsequent group immediately follows the - preceding one. By default each group - contains the consective run of elements - whose coordinate cells lie within the - group limits (see the *group_by* - parameter). + logger.info( + f" Output weights = {d_weights!r}" + ) # pragma: no cover - * Groups may contain different numbers of - elements. + elif method == "integral": + raise ValueError( + f"Must set the 'weights' parameter for {method!r} " + "collapses" + ) - * The start of the first group may be - before the first first axis element, - depending on the offset defined by the - time duration. For example, if - ``group=cf.Y(month=12)`` then the first - group will start on the closest 1st - December to the first axis element. + if method in _collapse_ddof_methods: + d_kwargs["ddof"] = ddof - `Query` Define groups from elements whose - coordinates satisfy the query - condition. Multiple groups are created: - one for each maximally consecutive run - within the selected elements. + # ======================================================== + # Collapse the data array + # ======================================================== + logger.info( + " Before collapse of data:\n" + f" iaxes, d_kwargs = {iaxes} {d_kwargs}\n" + f" f.shape = {f.shape}\n" + f" f.dtype = {f.dtype}\n" + ) # pragma: no cover - If a sequence of `Query` is provided then - groups are defined for each query. + getattr(f.data, method)( + axes=iaxes, + squeeze=squeeze, + mtol=mtol, + inplace=True, + **d_kwargs, + ) - * The first group may start outside of the - range of coordinates (the start of the - first group is controlled by parameters - of the `TimeDuration`). + if squeeze: + # ---------------------------------------------------- + # Remove the collapsed axes from the field's list of + # data array axes + # ---------------------------------------------------- + f.set_data_axes( + [axis for axis in data_axes if axis not in collapse_axes] + ) - * If group boundaries do not coincide with - coordinate bounds then some elements may - not be inside any group. + logger.info( + " After collapse of data:\n" + f" f.shape = {f.shape}\n" + f" f.dtype = {f.dtype}\n", + f"collapse_axes = {collapse_axes}", + ) # pragma: no cover - * If the group size is sufficiently small - then some elements may not be inside any - group. + # --------------------------------------------------------- + # Update dimension coordinates, auxiliary coordinates, + # cell measures and domain ancillaries + # --------------------------------------------------------- + for axis, domain_axis in collapse_axes.items(): + # Ignore axes which are already size 1 + size = domain_axis.get_size() + if size == 1: + continue - * Groups may contain different numbers of - elements. - ============== ========================================== + # REMOVE all cell measures and domain ancillaries + # which span this axis + c = f.constructs.filter( + filter_by_type=("cell_measure", "domain_ancillary"), + filter_by_axis=(axis,), + axis_mode="or", + todict=True, + ) + for key, value in c.items(): + logger.info( + f" Removing {value.construct_type}" + ) # pragma: no cover - *Parameter example:* - To define groups of 90 days: ``within_years=cf.D(90)`` - (see `cf.D`). + f.del_construct(key) - *Parameter example:* - To define groups of 3 calendar months, starting on the - 15th of a month: ``within_years=cf.M(3, day=15)`` (see - `cf.M`). + # REMOVE all 2+ dimensional auxiliary coordinates + # which span this axis + # c = auxiliary_coordinates.filter_by_naxes(gt(1), view=True) + c = f.auxiliary_coordinates( + filter_by_naxes=( + gt( + 1, + ), + ), + filter_by_axis=(axis,), + axis_mode="or", + todict=True, + ) + for key, value in c.items(): + logger.info( + f" Removing {value.construct_type} {key!r}" + ) # pragma: no cover - *Parameter example:* - To define groups for the season MAM within each year: - ``within_years=cf.mam()`` (see `cf.mam`). + f.del_construct(key) - *Parameter example:* - To define groups for February and for November to - December within each year: ``within_years=[cf.month(2), - cf.month(cf.ge(11))]`` (see `cf.month` and `cf.ge`). + # REMOVE all 1 dimensional auxiliary coordinates which + # span this axis and have different values in their + # data array and bounds. + # + # KEEP, after changing their data arrays, all + # one-dimensional auxiliary coordinates which span + # this axis and have the same values in their data + # array and bounds. + c = f.auxiliary_coordinates( + filter_by_axis=(axis,), axis_mode="exact", todict=True + ) + for key, aux in c.items(): + logger.info(f"key = {key}") # pragma: no cover - over_days: optional - Define the groups for creating CF "over days" - climatological statistics. + d = aux[0] - By default (or if *over_days* is `None`) each group - contains all elements for which the time coordinate cell - lower bounds have a common time of day but different - dates, and for which the time coordinate cell upper bounds - also have a common time of day but different dates. The - collapsed dime axis will have a size equal to the number - of groups that were found. + # TODODASK: remove once dask. For some reason, + # without this we now get LAMA related failures in + # Partition.nbytes ... + _ = aux.dtype - For example, elements corresponding to the two time - coordinate cells + if aux.has_bounds() or (aux[:-1] != aux[1:]).any(): + logger.info( + f" Removing {aux.construct_type} {key!r}" + ) # pragma: no cover - | ``1999-12-31 06:00:00/1999-12-31 18:00:00`` - | ``2000-01-01 06:00:00/2000-01-01 18:00:00`` + f.del_construct(key) + else: + # Change the data array for this auxiliary + # coordinate + aux.set_data(d.data, copy=False) + if d.has_bounds(): + aux.bounds.set_data(d.bounds.data, copy=False) - would be together in a group; and elements corresponding - to the two time coordinate cells + # Reset the axis size + f.domain_axes(todict=True)[axis].set_size(1) + logger.info( + f"Changing axis size to 1: {axis}" + ) # pragma: no cover - | ``1999-12-31 00:00:00/2000-01-01 00:00:00`` - | ``2000-01-01 00:00:00/2000-01-02 00:00:00`` + # dim = dimension_coordinates.filter_by_axis( + # axis, mode="exact", view=True + # ).value(None) + dim = f.dimension_coordinate( + filter_by_axis=(axis,), default=None + ) + if dim is None: + continue - would also be together in a different group. + # Create a new dimension coordinate for this axis + if dim.has_bounds(): + bounds_data = [dim.bounds.datum(0), dim.bounds.datum(-1)] + else: + bounds_data = [dim.datum(0), dim.datum(-1)] - .. note:: For CF compliance, an "over days" collapse - should be preceded by a "within days" collapse. + units = dim.Units - The default groups may be split into smaller groups if the - *over_days* parameter is one of: + if coordinate == "min": + coordinate = "minimum" + print( + "WARNING: coordinate='min' has been deprecated. " + "Use coordinate='minimum' instead." + ) + elif coordinate == "max": + coordinate = "maximum" + print( + "WARNING: coordinate='max' has been deprecated. " + "Use coordinate='maximum' instead." + ) - ============== ========================================== - *over_days* Description - ============== ========================================== - `TimeDuration` Split each default group into smaller - groups which span the given time duration, - which must be at least one day. + if coordinate == "mid_range": + data = Data( + [(bounds_data[0] + bounds_data[1]) * 0.5], units=units + ) + elif coordinate == "minimum": + data = dim.data.min() + elif coordinate == "maximum": + data = dim.data.max() + else: + raise ValueError( + "Can't collapse: Bad parameter value: " + f"coordinate={coordinate!r}" + ) - * Groups may contain different numbers of - elements. + bounds = self._Bounds(data=Data([bounds_data], units=units)) - * The start of the first group may be - before the first first axis element, - depending on the offset defined by the - time duration. For example, if - ``group=cf.M(day=15)`` then the first - group will start on the closest 15th of - a month to the first axis element. + dim.set_data(data, copy=False) + dim.set_bounds(bounds, copy=False) - `Query` Split each default group into smaller - groups whose coordinate cells satisfy the - query condition. + # -------------------------------------------------------- + # Update the cell methods + # -------------------------------------------------------- + if _update_cell_methods: + f._update_cell_methods( + method, + domain_axes=collapse_axes, + input_axes=axes_in, + within=within, + over=over, + verbose=verbose, + ) - If a sequence of `Query` is provided then - groups are defined for each query. + # ------------------------------------------------------------ + # Return the collapsed field (or the classification array) + # ------------------------------------------------------------ + return f - * Groups may contain different numbers of - elements. + @_manage_log_level_via_verbosity + def _collapse_grouped( + self, + method, + axis, + within=None, + over=None, + within_days=None, + within_years=None, + over_days=None, + over_years=None, + group=None, + group_span=None, + group_contiguous=False, + mtol=None, + ddof=None, + regroup=None, + coordinate=None, + measure=False, + weights=None, + squeeze=None, + group_by=None, + axis_in=None, + verbose=None, + ): + """Implements a grouped collapse on a field. - * If a coordinate does not satisfy any of - the conditions then its element will not - be in a group. + A grouped collapse is one for which an axis is not collapsed + completely to size 1. - * If an element is selected by two or more - queries then the latest one in the - sequence defines which group it will be - in. - ============== ========================================== + :Parameters: - *Parameter example:* - To define groups for January and for June to December, - ignoring all other months: ``over_days=[cf.month(1), - cf.month(cf.wi(6, 12))]`` (see `cf.month` and `cf.wi`). + method: `str` + See `collapse` for details. - *Parameter example:* - To define groups spanning 90 days: - ``over_days=cf.D(90)`` or ``over_days=cf.h(2160)``. (see - `cf.D` and `cf.h`). + measure: `bool`, optional + See `collapse` for details. - *Parameter example:* - To define groups that each span 3 calendar months, - starting and ending at 06:00 in the first day of each - month: ``over_days=cf.M(3, hour=6)`` (see `cf.M`). + over: `str` + See `collapse` for details. - *Parameter example:* - To define groups that each span a calendar month - ``over_days=cf.M()`` (see `cf.M`). + within: `str` + See `collapse` for details. - *Parameter example:* - To define groups for January and for June to December, - ignoring all other months: ``over_days=[cf.month(1), - cf.month(cf.wi(6, 12))]`` (see `cf.month` and `cf.wi`). + """ - over_years: optional - Define the groups for creating CF "over years" - climatological statistics. + def _ddddd( + classification, + n, + lower, + upper, + increasing, + coord, + group_by_coords, + extra_condition, + ): + """Returns configuration for a general collapse. - By default (or if *over_years* is `None`) each group - contains all elements for which the time coordinate cell - lower bounds have a common date of the year but different - years, and for which the time coordinate cell upper bounds - also have a common date of the year but different - years. The collapsed dime axis will have a size equal to - the number of groups that were found. + :Parameter: - For example, elements corresponding to the two time - coordinate cells + extra_condition: `Query` - | ``1999-12-01 00:00:00/2000-01-01 00:00:00`` - | ``2000-12-01 00:00:00/2001-01-01 00:00:00`` + :Returns: - would be together in a group. + `numpy.ndarray`, `int`, date-time, date-time - .. note:: For CF compliance, an "over years" collapse - should be preceded by a "within years" or "over - days" collapse. + """ + if group_by_coords: + q = ge(lower) & lt(upper) + else: + q = ge(lower, attr="lower_bounds") & le( + upper, attr="upper_bounds" + ) - The default groups may be split into smaller groups if the - *over_years* parameter is one of: + if extra_condition: + q &= extra_condition - ============== ========================================== - *over_years* Description - ============== ========================================== - `TimeDuration` Split each default group into smaller - groups which span the given time duration, - which must be at least one day. + index = q.evaluate(coord).array + classification[index] = n - * Groups may contain different numbers of - elements. + if increasing: + lower = upper + else: + upper = lower - * The start of the first group may be - before the first first axis element, - depending on the offset defined by the - time duration. For example, if - ``group=cf.Y(month=12)`` then the first - group will start on the closest 1st - December to the first axis element. + n += 1 - `Query` Split each default group into smaller - groups whose coordinate cells satisfy the - query condition. + return classification, n, lower, upper - If a sequence of `Query` is provided then - groups are defined for each query. + def _time_interval( + classification, + n, + coord, + interval, + lower, + upper, + lower_limit, + upper_limit, + group_by, + extra_condition=None, + ): + """Prepares for a collapse where the group is a + TimeDuration. - * Groups may contain different numbers of - elements. + :Parameters: - * If a coordinate does not satisfy any of - the conditions then its element will not - be in a group. + classification: `numpy.ndarray` - * If an element is selected by two or more - queries then the latest one in the - sequence defines which group it will be - in. - ============== ========================================== + n: `int` - *Parameter example:* - An element with coordinate bounds {1999-06-01 06:00:00, - 1999-09-01 06:00:00} **matches** an element with - coordinate bounds {2000-06-01 06:00:00, 2000-09-01 - 06:00:00}. + coord: `DimensionCoordinate` - *Parameter example:* - An element with coordinate bounds {1999-12-01 00:00:00, - 2000-12-01 00:00:00} **matches** an element with - coordinate bounds {2000-12-01 00:00:00, 2001-12-01 - 00:00:00}. + interval: `TimeDuration` - *Parameter example:* - To define groups spanning 10 calendar years: - ``over_years=cf.Y(10)`` or ``over_years=cf.M(120)`` (see - `cf.M` and `cf.Y`). + lower: date-time object - *Parameter example:* - To define groups spanning 5 calendar years, starting and - ending at 06:00 on 01 December of each year: - ``over_years=cf.Y(5, month=12, hour=6)`` (see `cf.Y`). + upper: date-time object - *Parameter example:* - To define one group spanning 1981 to 1990 and another - spanning 2001 to 2005: ``over_years=[cf.year(cf.wi(1981, - 1990), cf.year(cf.wi(2001, 2005)]`` (see `cf.year` and - `cf.wi`). + lower_limit: `datetime` - {{inplace: `bool`, optional}} + upper_limit: `datetime` - {{i: deprecated at version 3.0.0}} + group_by: `str` - kwargs: deprecated at version 3.0.0 + extra_condition: `Query`, optional - :Returns: + :Returns: - `Field` or `numpy.ndarray` - The collapsed field construct. Alternatively, if the - *regroup* parameter is True then a `numpy` array is - returned. + (`numpy.ndarray`, `int`) - **Examples:** + """ + group_by_coords = group_by == "coords" - There are further worked examples in - https://ncas-cms.github.io/cf-python/analysis.html#statistical-collapses + if coord.increasing: + # Increasing dimension coordinate + lower, upper = interval.bounds(lower) + while lower <= upper_limit: + lower, upper = interval.interval(lower) + classification, n, lower, upper = _ddddd( + classification, + n, + lower, + upper, + True, + coord, + group_by_coords, + extra_condition, + ) + else: + # Decreasing dimension coordinate + lower, upper = interval.bounds(upper) + while upper >= lower_limit: + lower, upper = interval.interval(upper, end=True) + classification, n, lower, upper = _ddddd( + classification, + n, + lower, + upper, + False, + coord, + group_by_coords, + extra_condition, + ) - """ - if _debug: - _DEPRECATION_ERROR_KWARGS( - self, - "collapse", - {"_debug": _debug}, - "Use keyword 'verbose' instead.", - ) # pragma: no cover + return classification, n - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, "collapse", kwargs - ) # pragma: no cover + def _time_interval_over( + classification, + n, + coord, + interval, + lower, + upper, + lower_limit, + upper_limit, + group_by, + extra_condition=None, + ): + """Prepares for a collapse over some TimeDuration. - if inplace: - f = self - else: - f = self.copy() + :Parameters: - # Whether or not to create null bounds for null - # collapses. I.e. if the collapse axis has size 1 and no - # bounds, whether or not to create upper and lower bounds to - # the coordinate value. If this occurs it's because the null - # collapse is part of a grouped collapse and so will be - # concatenated to other collapses for the final result: bounds - # will be made for the grouped collapse, so all elements need - # bounds. - # _create_zero_size_cell_bounds = kwargs.get( - # '_create_zero_size_cell_bounds', False) + classification: `numpy.ndarray` - # ------------------------------------------------------------ - # Parse the methods and axes - # ------------------------------------------------------------ - if ":" in method: - # Convert a cell methods string (such as 'area: mean dim3: - # dim2: max T: minimum height: variance') to a CellMethod - # construct - if axes is not None: - raise ValueError( - "Can't collapse: Can't set 'axes' when 'method' is " - "CF-like cell methods string" - ) + n: `int` - all_methods = [] - all_axes = [] - all_within = [] - all_over = [] + coord: `DimensionCoordinate` - for cm in CellMethod.create(method): - all_methods.append(cm.get_method(None)) - all_axes.append(cm.get_axes(())) - all_within.append(cm.get_qualifier("within", None)) - all_over.append(cm.get_qualifier("over", None)) - else: - x = method.split(" within ") - if method == x[0]: - within = None - x = method.split(" over ") - if method == x[0]: - over = None - else: - method, over = x - else: - method, within = x + interval: `TimeDuration` - if isinstance(axes, (str, int)): - axes = (axes,) + lower: date-time - all_methods = (method,) - all_within = (within,) - all_over = (over,) - all_axes = (axes,) + upper: date-time - # ------------------------------------------------------------ - # Convert axes into domain axis construct keys - # ------------------------------------------------------------ - domain_axes = None + lower_limit: date-time - input_axes = all_axes - all_axes = [] - for axes in input_axes: - if axes is None: - domain_axes = self.domain_axes( - todict=False, cached=domain_axes - ) - all_axes.append(list(domain_axes)) - continue + upper_limit: date-time - axes2 = [] - for axis in axes: - msg = ( - "Must have '{}' axes for an '{}' collapse. Can't " - "find {{!r}} axis" - ) - if axis == "area": - iterate_over = ("X", "Y") - msg = msg.format("', '".join(iterate_over), axis) - elif axis == "volume": - iterate_over = ("X", "Y", "Z") - msg = msg.format("', '".join(iterate_over), axis) - else: - iterate_over = (axis,) - msg = "Can't find the collapse axis identified by {!r}" + group_by: `str` - for x in iterate_over: - a = self.domain_axis(x, key=True, default=None) - if a is None: - raise ValueError(msg.format(x)) - axes2.append(a) + extra_condition: `Query`, optional - all_axes.append(axes2) + :Returns: - logger.info( - " all_methods, all_axes, all_within, all_over = " - "{} {} {} {}".format(all_methods, all_axes, all_within, all_over) - ) # pragma: no cover + (`numpy.ndarray`, `int`) - if group is not None and len(all_axes) > 1: - raise ValueError( - "Can't use the 'group' parameter for multiple collapses" - ) + """ + group_by_coords = group_by == "coords" - # ------------------------------------------------------------ - # - # ------------------------------------------------------------ - domain_axes = f.domain_axes(todict=False, cached=domain_axes) - # auxiliary_coordinates = f.auxiliary_coordinates(view=True) - # dimension_coordinates = f.dimension_coordinates(view=True) + if coord.increasing: + # Increasing dimension coordinate + # lower, upper = interval.bounds(lower) + upper = interval.interval(upper)[1] + while lower <= upper_limit: + lower, upper = interval.interval(lower) + classification, n, lower, upper = _ddddd( + classification, + n, + lower, + upper, + True, + coord, + group_by_coords, + extra_condition, + ) + else: + # Decreasing dimension coordinate + # lower, upper = interval.bounds(upper) + lower = interval.interval(upper, end=True)[0] + while upper >= lower_limit: + lower, upper = interval.interval(upper, end=True) + classification, n, lower, upper = _ddddd( + classification, + n, + lower, + upper, + False, + coord, + group_by_coords, + extra_condition, + ) - for method, axes, within, over, axes_in in zip( - all_methods, all_axes, all_within, all_over, input_axes + return classification, n + + def _data_interval( + classification, + n, + coord, + interval, + lower, + upper, + lower_limit, + upper_limit, + group_by, + extra_condition=None, ): + """Prepares for a collapse where the group is a data + interval. - method2 = _collapse_methods.get(method, None) - if method2 is None: - raise ValueError( - "Unknown collapse method: {!r}".format(method) - ) + :Returns: - method = method2 + `numpy.ndarray`, `int` - # collapse_axes_all_sizes = domain_axes.filter_by_key(*axes) - collapse_axes_all_sizes = f.domain_axes( - filter_by_key=axes, todict=False - ) + """ + group_by_coords = group_by == "coords" - logger.info( - " axes = {}".format(axes) - ) # pragma: no cover - logger.info( - " method = {}".format(method) - ) # pragma: no cover - logger.info( - " collapse_axes_all_sizes = {}".format( - collapse_axes_all_sizes - ) - ) # pragma: no cover + if coord.increasing: + # Increasing dimension coordinate + lower = lower.squeeze() + while lower <= upper_limit: + upper = lower + interval + classification, n, lower, upper = _ddddd( + classification, + n, + lower, + upper, + True, + coord, + group_by_coords, + extra_condition, + ) + else: + # Decreasing dimension coordinate + upper = upper.squeeze() + while upper >= lower_limit: + lower = upper - interval + classification, n, lower, upper = _ddddd( + classification, + n, + lower, + upper, + False, + coord, + group_by_coords, + extra_condition, + ) - if not collapse_axes_all_sizes: - raise ValueError( - "Can't collapse: Can not identify collapse axes" - ) + return classification, n - if method in ( - "sum_of_weights", - "sum_of_weights2", - "sample_size", - "integral", - "maximum_absolute_value", - "minimum_absolute_value", - "mean_absolute_value", - "range", - "root_mean_square", - "sum_of_squares", - ): - collapse_axes = collapse_axes_all_sizes.todict() # copy() - else: - collapse_axes = collapse_axes_all_sizes.filter_by_size( - gt(1), todict=True - ) + def _selection( + classification, + n, + coord, + selection, + parameter, + extra_condition=None, + group_span=None, + within=False, + ): + """Processes a group selection. - logger.info( - " collapse_axes = {}".format(collapse_axes) - ) # pragma: no cover + :Parameters: - if not collapse_axes: - # Do nothing if there are no collapse axes - if _create_zero_size_cell_bounds: - # Create null bounds if requested - for axis in axes: - # dc = f.dimension_coordinates( - # filter_by_axis=(axis,), axis_mode="and", todict=Tru#e - # ).value(None) - dc = f.dimension_coordinate( - filter_by_axis=(axis,), - default=None, - ) - if dc is not None and not dc.has_bounds(): - dc.set_bounds(dc.create_bounds(cellsize=0)) + classification: `numpy.ndarray` - continue + n: `int` - # Check that there are enough elements to collapse - collapse_axes_sizes = [ - da.get_size() for da in collapse_axes.values() - ] - size = reduce(operator_mul, collapse_axes_sizes, 1) + coord: `DimensionCoordinate` - logger.info( - " collapse_axes_sizes = {}".format(collapse_axes_sizes) - ) # pragma: no cover + selection: sequence of `Query` - grouped_collapse = ( - within is not None or over is not None or group is not None - ) + parameter: `str` + The name of the `cf.Field.collapse` parameter which + defined *selection*. This is used in error messages. - # -------------------------------------------------------- - # Set the group_by parameter - # -------------------------------------------------------- - if group_by is None: - if within is None and over is None: - group_by = "coords" - else: - group_by = "bounds" - elif ( - within is not None or over is not None - ) and group_by == "coords": + *Parameter example:* + ``parameter='within_years'`` + + extra_condition: `Query`, optional + + :Returns: + + `numpy.ndarray`, `int` + + """ + # Create an iterator for stepping through each Query in + # the selection sequence + try: + iterator = iter(selection) + except TypeError: raise ValueError( - "Can't collapse: group_by parameter can't be " - "'coords' for a climatological time collapse." + "Can't collapse: Bad parameter value: {}={!r}".format( + parameter, selection + ) ) - # -------------------------------------------------------- - # Set the coordinate parameter - # -------------------------------------------------------- - if coordinate is None and over is None: - coordinate = "mid_range" - - if grouped_collapse: - if len(collapse_axes) > 1: + for condition in iterator: + if not isinstance(condition, Query): raise ValueError( - "Can't do a grouped collapse on multiple axes " - "simultaneously" + "Can't collapse: {} sequence contains a non-{} " + "object: {!r}".format( + parameter, Query.__name__, condition + ) ) - # ------------------------------------------------------------ - # Grouped collapse: Calculate weights - # ------------------------------------------------------------ - g_weights = weights - if method not in _collapse_weighted_methods: - g_weights = None - else: - # if isinstance(weights, (dict, self.__class__, Data)): - # if measure: - # raise ValueError( - # "TODO") - # - # if scale is not None: - # raise ValueError( - # "TODO") - if method == "integral": - if not measure: - raise ValueError( - "Must set measure=True for 'integral' " - "collapses." - ) + if extra_condition is not None: + condition &= extra_condition - if scale is not None: - raise ValueError( - "Can't set scale for 'integral' collapses." - ) - elif not measure and scale is None: - scale = 1.0 - elif measure and scale is not None: - raise ValueError("TODO") + boolean_index = condition.evaluate(coord).array - # if weights is True: - # weights = tuple(collapse_axes.keys()) + classification[boolean_index] = n + n += 1 - g_weights = f.weights( - weights, - components=True, - axes=list(collapse_axes), # .keys()), - scale=scale, - measure=measure, - radius=radius, - great_circle=great_circle, - ) + # if group_span is not None: + # x = numpy_where(classification==n)[0] + # for i in range(1, max(1, int(float(len(x))/group_span))): + # n += 1 + # classification[x[i*group_span:(i + 1)*group_span]] = n + # n += 1 - if not g_weights: - g_weights = None + return classification, n - # axis = collapse_axes.key() - axis = [a for a in collapse_axes][0] + def _discern_runs(classification, within=False): + """Processes a group classification. - f = f._collapse_grouped( - method, - axis, - within=within, - over=over, - within_days=within_days, - within_years=within_years, - over_days=over_days, - over_years=over_years, - group=group, - group_span=group_span, - group_contiguous=group_contiguous, - regroup=regroup, - mtol=mtol, - ddof=ddof, - measure=measure, - weights=g_weights, - squeeze=squeeze, - coordinate=coordinate, - group_by=group_by, - axis_in=axes_in[0], - verbose=verbose, - ) + :Parameters: - if regroup: - # Grouped collapse: Return the numpy array - return f + classification: `numpy.ndarray` - # ---------------------------------------------------- - # Grouped collapse: Update the cell methods - # ---------------------------------------------------- - f._update_cell_methods( - method=method, - domain_axes=collapse_axes, - input_axes=axes_in, - within=within, - over=over, - verbose=verbose, - ) - continue + :Returns: - elif regroup: - raise ValueError( - "Can't return an array of groups for a non-grouped " - "collapse" - ) + `numpy.ndarray` - data_axes = f.get_data_axes() - iaxes = [ - data_axes.index(axis) - for axis in collapse_axes - if axis in data_axes - ] + """ + x = numpy_where(numpy_diff(classification))[0] + 1 + if not x.size: + if classification[0] >= 0: + classification[:] = 0 - # ------------------------------------------------------------ - # Calculate weights - # ------------------------------------------------------------ - logger.info( - " Input weights = {!r}".format(weights) - ) # pragma: no cover + return classification - if method not in _collapse_weighted_methods: - weights = None + if classification[0] >= 0: + classification[0 : x[0]] = 0 - d_kwargs = {} - if weights is not None: - # if isinstance(weights, (dict, self.__class__, Data)): - # if measure: - # raise ValueError("TODO") - # - # if scale is not None: - # raise ValueError("TODO") + n = 1 + for i, j in zip(x[:-1], x[1:]): + if classification[i] >= 0: + classification[i:j] = n + n += 1 - if method == "integral": - if not measure: - raise ValueError( - f"Must set measure=True for {method!r} collapses" - ) + if classification[x[-1]] >= 0: + classification[x[-1] :] = n + n += 1 - if scale is not None: - raise ValueError( - "Can't set scale for 'integral' collapses." - ) - elif not measure and scale is None: - scale = 1.0 - elif measure and scale is not None: - raise ValueError("TODO") + return classification - d_weights = f.weights( - weights, - components=True, - axes=list(collapse_axes.keys()), - scale=scale, - measure=measure, - radius=radius, - great_circle=great_circle, - ) + def _discern_runs_within(classification, coord): + """Processes group classification for a 'within' + collapse.""" + size = classification.size + if size < 2: + return classification - if d_weights: - d_kwargs["weights"] = d_weights + n = classification.max() + 1 - logger.info( - f" Output weights = {d_weights!r}" - ) # pragma: no cover + start = 0 + for i, c in enumerate(classification[: size - 1]): + if c < 0: + continue - elif method == "integral": - raise ValueError( - f"Must set the 'weights' parameter for {method!r} " - "collapses" - ) + if not coord[i : i + 2].contiguous(overlap=False): + classification[start : i + 1] = n + start = i + 1 + n += 1 - if method in _collapse_ddof_methods: - d_kwargs["ddof"] = ddof + return classification - # ======================================================== - # Collapse the data array - # ======================================================== - logger.info( - " Before collapse of data:\n" - f" iaxes, d_kwargs = {iaxes} {d_kwargs}\n" - f" f.shape = {f.shape}\n" - f" f.dtype = {f.dtype}\n" - ) # pragma: no cover + def _tyu(coord, group_by, time_interval): + """Returns bounding values and limits for a general + collapse. - getattr(f.data, method)( - axes=iaxes, - squeeze=squeeze, - mtol=mtol, - inplace=True, - **d_kwargs, - ) + :Parameters: - if squeeze: - # ---------------------------------------------------- - # Remove the collapsed axes from the field's list of - # data array axes - # ---------------------------------------------------- - f.set_data_axes( - [axis for axis in data_axes if axis not in collapse_axes] - ) + coord: `DimensionCoordinate` + The dimension coordinate construct associated with + the collapse. - logger.info( - " After collapse of data:\n" - f" f.shape = {f.shape}\n" - f" f.dtype = {f.dtype}\n", - f"collapse_axes = {collapse_axes}", - ) # pragma: no cover + group_by: `str` + As for the *group_by* parameter of the `collapse` method. - # --------------------------------------------------------- - # Update dimension coordinates, auxiliary coordinates, - # cell measures and domain ancillaries - # --------------------------------------------------------- - for axis, domain_axis in collapse_axes.items(): - # Ignore axes which are already size 1 - size = domain_axis.get_size() - if size == 1: - continue + time_interval: `bool` + If True then then return a tuple of date-time + objects. If False return a tuple of `Data` objects. - # REMOVE all cell measures and domain ancillaries - # which span this axis - c = f.constructs.filter( - filter_by_type=("cell_measure", "domain_ancillary"), - filter_by_axis=(axis,), - axis_mode="or", - todict=True, - ) - for key, value in c.items(): - logger.info( - f" Removing {value.construct_type}" - ) # pragma: no cover + :Returns: - f.del_construct(key) - - # REMOVE all 2+ dimensional auxiliary coordinates - # which span this axis - # c = auxiliary_coordinates.filter_by_naxes(gt(1), view=True) - c = f.auxiliary_coordinates( - filter_by_naxes=( - gt( - 1, - ), - ), - filter_by_axis=(axis,), - axis_mode="or", - todict=True, - ) - for key, value in c.items(): - logger.info( - f" Removing {value.construct_type} {key!r}" - ) # pragma: no cover - - f.del_construct(key) - - # REMOVE all 1 dimensional auxiliary coordinates which - # span this axis and have different values in their - # data array and bounds. - # - # KEEP, after changing their data arrays, all - # one-dimensional auxiliary coordinates which span - # this axis and have the same values in their data - # array and bounds. - c = f.auxiliary_coordinates( - filter_by_axis=(axis,), axis_mode="exact", todict=True - ) - for key, aux in c.items(): - logger.info(f"key = {key}") # pragma: no cover - - d = aux[0] - - # TODODASK: remove once dask. For some reason, - # without this we now get LAMA related failures in - # Partition.nbytes ... - _ = aux.dtype - - if aux.has_bounds() or (aux[:-1] != aux[1:]).any(): - logger.info( - f" Removing {aux.construct_type} {key!r}" - ) # pragma: no cover - - f.del_construct(key) - else: - # Change the data array for this auxiliary - # coordinate - aux.set_data(d.data, copy=False) - if d.has_bounds(): - aux.bounds.set_data(d.bounds.data, copy=False) - - # Reset the axis size - f.domain_axes(todict=True)[axis].set_size(1) - logger.info( - f"Changing axis size to 1: {axis}" - ) # pragma: no cover - - # dim = dimension_coordinates.filter_by_axis( - # axis, mode="exact", view=True - # ).value(None) - dim = f.dimension_coordinate( - filter_by_axis=(axis,), default=None - ) - if dim is None: - continue + `tuple` + A tuple of 4 `Data` object or, if *time_interval* is + True, a tuple of 4 date-time objects. - # Create a new dimension coordinate for this axis - if dim.has_bounds(): - bounds_data = [dim.bounds.datum(0), dim.bounds.datum(-1)] + """ + bounds = coord.get_bounds(None) + if bounds is not None: + lower_bounds = coord.lower_bounds + upper_bounds = coord.upper_bounds + lower = lower_bounds[0] + upper = upper_bounds[0] + lower_limit = lower_bounds[-1] + upper_limit = upper_bounds[-1] + elif group_by == "coords": + if coord.increasing: + lower = coord.data[0] + upper = coord.data[-1] else: - bounds_data = [dim.datum(0), dim.datum(-1)] - - units = dim.Units + lower = coord.data[-1] + upper = coord.data[0] - if coordinate == "min": - coordinate = "minimum" - print( - "WARNING: coordinate='min' has been deprecated. " - "Use coordinate='minimum' instead." - ) - elif coordinate == "max": - coordinate = "maximum" - print( - "WARNING: coordinate='max' has been deprecated. " - "Use coordinate='maximum' instead." - ) + lower_limit = lower + upper_limit = upper + else: + raise ValueError( + "Can't collapse: {!r} coordinate bounds are required " + "with group_by={!r}".format(coord.identity(), group_by) + ) - if coordinate == "mid_range": - data = Data( - [(bounds_data[0] + bounds_data[1]) * 0.5], units=units - ) - elif coordinate == "minimum": - data = dim.data.min() - elif coordinate == "maximum": - data = dim.data.max() - else: + if time_interval: + units = coord.Units + if units.isreftime: + lower = lower.datetime_array[0] + upper = upper.datetime_array[0] + lower_limit = lower_limit.datetime_array[0] + upper_limit = upper_limit.datetime_array[0] + elif not units.istime: raise ValueError( - "Can't collapse: Bad parameter value: " - f"coordinate={coordinate!r}" + "Can't group by {} when coordinates have units " + "{!r}".format( + TimeDuration.__class__.__name__, coord.Units + ) ) - bounds = self._Bounds(data=Data([bounds_data], units=units)) - - dim.set_data(data, copy=False) - dim.set_bounds(bounds, copy=False) - - # -------------------------------------------------------- - # Update the cell methods - # -------------------------------------------------------- - if _update_cell_methods: - f._update_cell_methods( - method, - domain_axes=collapse_axes, - input_axes=axes_in, - within=within, - over=over, - verbose=verbose, - ) - - # ------------------------------------------------------------ - # Return the collapsed field (or the classification array) - # ------------------------------------------------------------ - return f - - @_manage_log_level_via_verbosity - def _collapse_grouped( - self, - method, - axis, - within=None, - over=None, - within_days=None, - within_years=None, - over_days=None, - over_years=None, - group=None, - group_span=None, - group_contiguous=False, - mtol=None, - ddof=None, - regroup=None, - coordinate=None, - measure=False, - weights=None, - squeeze=None, - group_by=None, - axis_in=None, - verbose=None, - ): - """Implements a grouped collapse on a field. + return (lower, upper, lower_limit, upper_limit) - A grouped collapse is one for which an axis is not collapsed - completely to size 1. + def _group_weights(weights, iaxis, index): + """Subspaces weights components. - :Parameters: + :Parameters: - method: `str` - See `collapse` for details. + weights: `dict` or `None` - measure: `bool`, optional - See `collapse` for details. + iaxis: `int` - over: `str` - See `collapse` for details. + index: `list` - within: `str` - See `collapse` for details. + :Returns: - """ + `dict` or `None` - def _ddddd( - classification, - n, - lower, - upper, - increasing, - coord, - group_by_coords, - extra_condition, - ): - """Returns configuration for a general collapse. + **Examples:** - :Parameter: + >>> print(weights) + None + >>> print(_group_weights(weights, 2, [2, 3, 40])) + None + >>> print(_group_weights(weights, 1, slice(2, 56))) + None - extra_condition: `Query` + >>> weights - :Returns: + >>> _group_weights(weights, 2, [2, 3, 40]) - `numpy.ndarray`, `int`, date-time, date-time + >>> _group_weights(weights, 1, slice(2, 56)) """ - if group_by_coords: - q = ge(lower) & lt(upper) - else: - q = ge(lower, attr="lower_bounds") & le( - upper, attr="upper_bounds" - ) + if not isinstance(weights, dict): + return weights - if extra_condition: - q &= extra_condition + weights = weights.copy() + for iaxes, value in weights.items(): + if iaxis in iaxes: + indices = [slice(None)] * len(iaxes) + indices[iaxes.index(iaxis)] = index + weights[iaxes] = value[tuple(indices)] + break - index = q.evaluate(coord).array - classification[index] = n + return weights - if increasing: - lower = upper - else: - upper = lower + # START OF MAIN CODE - n += 1 - - return classification, n, lower, upper - - def _time_interval( - classification, - n, - coord, - interval, - lower, - upper, - lower_limit, - upper_limit, - group_by, - extra_condition=None, - ): - """Prepares for a collapse where the group is a - TimeDuration. - - :Parameters: - - classification: `numpy.ndarray` - - n: `int` - - coord: `DimensionCoordinate` + logger.info(" Grouped collapse:") # pragma: no cover + logger.info( + " method = {!r}".format(method) + ) # pragma: no cover + logger.info( + " axis_in = {!r}".format(axis_in) + ) # pragma: no cover + logger.info( + " axis = {!r}".format(axis) + ) # pragma: no cover + logger.info( + " over = {!r}".format(over) + ) # pragma: no cover + logger.info( + " over_days = {!r}".format(over_days) + ) # pragma: no cover + logger.info( + " over_years = {!r}".format(over_years) + ) # pragma: no cover + logger.info( + " within = {!r}".format(within) + ) # pragma: no cover + logger.info( + " within_days = {!r}".format(within_days) + ) # pragma: no cover + logger.info( + " within_years = {!r}".format(within_years) + ) # pragma: no cover + logger.info( + " regroup = {!r}".format(regroup) + ) # pragma: no cover + logger.info( + " group = {!r}".format(group) + ) # pragma: no cover + logger.info( + " group_span = {!r}".format(group_span) + ) # pragma: no cover + logger.info( + " group_contiguous = {!r}".format(group_contiguous) + ) # pragma: no cover - interval: `TimeDuration` + # Size of uncollapsed axis + axis_size = self.domain_axes(todict=True)[axis].get_size() + # Integer position of collapse axis + iaxis = self.get_data_axes().index(axis) - lower: date-time object + fl = [] - upper: date-time object + # If group, rolling window, classification, etc, do something + # special for size one axes - either return unchanged + # (possibly mofiying cell methods with , e.g, within_days', or + # raising an exception for 'can't match', I suppose. - lower_limit: `datetime` + classification = None - upper_limit: `datetime` + if group is not None: + if within is not None or over is not None: + raise ValueError( + "Can't set 'group' parameter for a climatological " + "collapse" + ) - group_by: `str` + if isinstance(group, numpy_ndarray): + classification = numpy_squeeze(group.copy()) - extra_condition: `Query`, optional + if classification.dtype.kind != "i": + raise ValueError( + "Can't group by numpy array of type {}".format( + classification.dtype.name + ) + ) + elif classification.shape != (axis_size,): + raise ValueError( + "Can't group by numpy array with incorrect " + "shape: {}".format(classification.shape) + ) - :Returns: + # Set group to None + group = None - (`numpy.ndarray`, `int`) + if group is not None: + if isinstance(group, Query): + group = (group,) - """ - group_by_coords = group_by == "coords" + if isinstance(group, int): + # ---------------------------------------------------- + # E.g. group=3 + # ---------------------------------------------------- + coord = None + classification = numpy_empty((axis_size,), int) - if coord.increasing: - # Increasing dimension coordinate - lower, upper = interval.bounds(lower) - while lower <= upper_limit: - lower, upper = interval.interval(lower) - classification, n, lower, upper = _ddddd( - classification, - n, - lower, - upper, - True, - coord, - group_by_coords, - extra_condition, - ) - else: - # Decreasing dimension coordinate - lower, upper = interval.bounds(upper) - while upper >= lower_limit: - lower, upper = interval.interval(upper, end=True) - classification, n, lower, upper = _ddddd( - classification, - n, - lower, - upper, - False, - coord, - group_by_coords, - extra_condition, - ) + start = 0 + end = group + n = 0 + while start < axis_size: + classification[start:end] = n + start = end + end += group + n += 1 - return classification, n + if group_span is True or group_span is None: + # Use the group definition as the group span + group_span = group - def _time_interval_over( - classification, - n, - coord, - interval, - lower, - upper, - lower_limit, - upper_limit, - group_by, - extra_condition=None, - ): - """Prepares for a collapse over some TimeDuration. + elif isinstance(group, TimeDuration): + # ---------------------------------------------------- + # E.g. group=cf.M() + # ---------------------------------------------------- + coord = self.dimension_coordinate( + filter_by_axis=(axis,), default=None + ) + if coord is None: + raise ValueError("dddddd siduhfsuildfhsuil dhfdui TODO") - :Parameters: + classification = numpy_empty((axis_size,), int) + classification.fill(-1) - classification: `numpy.ndarray` + lower, upper, lower_limit, upper_limit = _tyu( + coord, group_by, True + ) - n: `int` + classification, n = _time_interval( + classification, + 0, + coord=coord, + interval=group, + lower=lower, + upper=upper, + lower_limit=lower_limit, + upper_limit=upper_limit, + group_by=group_by, + ) - coord: `DimensionCoordinate` + if group_span is True or group_span is None: + # Use the group definition as the group span + group_span = group - interval: `TimeDuration` + elif isinstance(group, Data): + # ---------------------------------------------------- + # Chunks of + # ---------------------------------------------------- + coord = self.dimension_coordinate( + filter_by_axis=(axis,), default=None + ) + if coord is None: + raise ValueError("TODO asdas 4444444 dhfdui ") - lower: date-time + if coord.Units.isreftime: + raise ValueError( + "Can't group a reference-time axis with {!r}. Use " + "a TimeDuration instance instead.".format(group) + ) - upper: date-time + if group.size != 1: + raise ValueError( + "Group must have only one element: " + "{!r}".format(group) + ) - lower_limit: date-time + if group.Units and not group.Units.equivalent(coord.Units): + raise ValueError( + "Can't group by {!r} when coordinates have " + "non-equivalent units {!r}".format(group, coord.Units) + ) - upper_limit: date-time + classification = numpy_empty((axis_size,), int) + classification.fill(-1) - group_by: `str` + group = group.squeeze() - extra_condition: `Query`, optional + lower, upper, lower_limit, upper_limit = _tyu( + coord, group_by, False + ) - :Returns: + classification, n = _data_interval( + classification, + 0, + coord=coord, + interval=group, + lower=lower, + upper=upper, + lower_limit=lower_limit, + upper_limit=upper_limit, + group_by=group_by, + ) - (`numpy.ndarray`, `int`) + if group_span is True or group_span is None: + # Use the group definition as the group span + group_span = group - """ - group_by_coords = group_by == "coords" - - if coord.increasing: - # Increasing dimension coordinate - # lower, upper = interval.bounds(lower) - upper = interval.interval(upper)[1] - while lower <= upper_limit: - lower, upper = interval.interval(lower) - classification, n, lower, upper = _ddddd( - classification, - n, - lower, - upper, - True, - coord, - group_by_coords, - extra_condition, - ) else: - # Decreasing dimension coordinate - # lower, upper = interval.bounds(upper) - lower = interval.interval(upper, end=True)[0] - while upper >= lower_limit: - lower, upper = interval.interval(upper, end=True) - classification, n, lower, upper = _ddddd( - classification, - n, - lower, - upper, - False, - coord, - group_by_coords, - extra_condition, + # ---------------------------------------------------- + # E.g. group=[cf.month(4), cf.month(cf.wi(9, 11))] + # ---------------------------------------------------- + coord = self.dimension_coordinate( + filter_by_axis=(axis,), default=None + ) + if coord is None: + coord = self.auxiliary_coordinate( + filter_by_axis=(axis,), axis_mode="exact", default=None ) + if coord is None: + raise ValueError("asdad8777787 TODO") - return classification, n - - def _data_interval( - classification, - n, - coord, - interval, - lower, - upper, - lower_limit, - upper_limit, - group_by, - extra_condition=None, - ): - """Prepares for a collapse where the group is a data - interval. - - :Returns: + classification = numpy_empty((axis_size,), int) + classification.fill(-1) - `numpy.ndarray`, `int` + classification, n = _selection( + classification, + 0, + coord=coord, + selection=group, + parameter="group", + ) - """ - group_by_coords = group_by == "coords" + classification = _discern_runs(classification) - if coord.increasing: - # Increasing dimension coordinate - lower = lower.squeeze() - while lower <= upper_limit: - upper = lower + interval - classification, n, lower, upper = _ddddd( - classification, - n, - lower, - upper, - True, - coord, - group_by_coords, - extra_condition, - ) - else: - # Decreasing dimension coordinate - upper = upper.squeeze() - while upper >= lower_limit: - lower = upper - interval - classification, n, lower, upper = _ddddd( - classification, - n, - lower, - upper, - False, - coord, - group_by_coords, - extra_condition, + if group_span is None: + group_span = False + elif group_span is True: + raise ValueError( + "Can't collapse: Can't set group_span=True when " + f"group={group!r}" ) - return classification, n - - def _selection( - classification, - n, - coord, - selection, - parameter, - extra_condition=None, - group_span=None, - within=False, - ): - """Processes a group selection. - - :Parameters: - - classification: `numpy.ndarray` - - n: `int` - - coord: `DimensionCoordinate` - - selection: sequence of `Query` - - parameter: `str` - The name of the `cf.Field.collapse` parameter which - defined *selection*. This is used in error messages. - - *Parameter example:* - ``parameter='within_years'`` - - extra_condition: `Query`, optional - - :Returns: - - `numpy.ndarray`, `int` - - """ - # Create an iterator for stepping through each Query in - # the selection sequence - try: - iterator = iter(selection) - except TypeError: - raise ValueError( - "Can't collapse: Bad parameter value: {}={!r}".format( - parameter, selection - ) + if classification is None: + if over == "days": + # ---------------------------------------------------- + # Over days + # ---------------------------------------------------- + coord = self.dimension_coordinate( + filter_by_axis=(axis,), default=None ) - - for condition in iterator: - if not isinstance(condition, Query): + if coord is None or not coord.Units.isreftime: raise ValueError( - "Can't collapse: {} sequence contains a non-{} " - "object: {!r}".format( - parameter, Query.__name__, condition - ) + "Reference-time dimension coordinates are required " + "for an 'over days' collapse" ) - if extra_condition is not None: - condition &= extra_condition - - boolean_index = condition.evaluate(coord).array + if not coord.has_bounds(): + raise ValueError( + "Reference-time dimension coordinate bounds are " + "required for an 'over days' collapse" + ) - classification[boolean_index] = n - n += 1 + cell_methods = self.cell_methods().ordered() + w = [ + cm.get_qualifier("within", None) + for cm in cell_methods.values() + ] + if "days" not in w: + raise ValueError( + "An 'over days' collapse must come after a " + "'within days' cell method" + ) - # if group_span is not None: - # x = numpy_where(classification==n)[0] - # for i in range(1, max(1, int(float(len(x))/group_span))): - # n += 1 - # classification[x[i*group_span:(i + 1)*group_span]] = n - # n += 1 + # Parse the over_days parameter + if isinstance(over_days, Query): + over_days = (over_days,) + elif isinstance(over_days, TimeDuration): + if over_days.Units.istime and over_days < Data(1, "day"): + raise ValueError( + f"Bad parameter value: over_days={over_days!r}" + ) - return classification, n + coordinate = "minimum" - def _discern_runs(classification, within=False): - """Processes a group classification. + classification = numpy_empty((axis_size,), int) + classification.fill(-1) - :Parameters: + if isinstance(over_days, TimeDuration): + _, _, lower_limit, upper_limit = _tyu( + coord, "bounds", True + ) - classification: `numpy.ndarray` + bounds = coord.bounds + lower_bounds = coord.lower_bounds.datetime_array + upper_bounds = coord.upper_bounds.datetime_array - :Returns: + HMS0 = None - `numpy.ndarray` + n = 0 + for lower, upper in zip(lower_bounds, upper_bounds): + HMS_l = ( + eq(lower.hour, attr="hour") + & eq(lower.minute, attr="minute") + & eq(lower.second, attr="second") + ).addattr("lower_bounds") + HMS_u = ( + eq(upper.hour, attr="hour") + & eq(upper.minute, attr="minute") + & eq(upper.second, attr="second") + ).addattr("upper_bounds") + HMS = HMS_l & HMS_u - """ - x = numpy_where(numpy_diff(classification))[0] + 1 - if not x.size: - if classification[0] >= 0: - classification[:] = 0 + if not HMS0: + HMS0 = HMS + elif HMS.equals(HMS0): + # We've got repeat of the first cell, which + # means that we must have now classified all + # cells. Therefore we can stop. + break - return classification + logger.info( + " HMS = {!r}".format(HMS) + ) # pragma: no cover - if classification[0] >= 0: - classification[0 : x[0]] = 0 - - n = 1 - for i, j in zip(x[:-1], x[1:]): - if classification[i] >= 0: - classification[i:j] = n - n += 1 - - if classification[x[-1]] >= 0: - classification[x[-1] :] = n - n += 1 - - return classification - - def _discern_runs_within(classification, coord): - """Processes group classification for a 'within' - collapse.""" - size = classification.size - if size < 2: - return classification - - n = classification.max() + 1 - - start = 0 - for i, c in enumerate(classification[: size - 1]): - if c < 0: - continue - - if not coord[i : i + 2].contiguous(overlap=False): - classification[start : i + 1] = n - start = i + 1 - n += 1 - - return classification - - def _tyu(coord, group_by, time_interval): - """Returns bounding values and limits for a general - collapse. - - :Parameters: - - coord: `DimensionCoordinate` - The dimension coordinate construct associated with - the collapse. - - group_by: `str` - As for the *group_by* parameter of the `collapse` method. - - time_interval: `bool` - If True then then return a tuple of date-time - objects. If False return a tuple of `Data` objects. - - :Returns: - - `tuple` - A tuple of 4 `Data` object or, if *time_interval* is - True, a tuple of 4 date-time objects. - - """ - bounds = coord.get_bounds(None) - if bounds is not None: - lower_bounds = coord.lower_bounds - upper_bounds = coord.upper_bounds - lower = lower_bounds[0] - upper = upper_bounds[0] - lower_limit = lower_bounds[-1] - upper_limit = upper_bounds[-1] - elif group_by == "coords": - if coord.increasing: - lower = coord.data[0] - upper = coord.data[-1] - else: - lower = coord.data[-1] - upper = coord.data[0] + if over_days is None: + # -------------------------------------------- + # over_days=None + # -------------------------------------------- + # Over all days + index = HMS.evaluate(coord).array + classification[index] = n + n += 1 + elif isinstance(over_days, TimeDuration): + # -------------------------------------------- + # E.g. over_days=cf.M() + # -------------------------------------------- + classification, n = _time_interval_over( + classification, + n, + coord=coord, + interval=over_days, + lower=lower, + upper=upper, + lower_limit=lower_limit, + upper_limit=upper_limit, + group_by="bounds", + extra_condition=HMS, + ) + else: + # -------------------------------------------- + # E.g. over_days=[cf.month(cf.wi(4, 9))] + # -------------------------------------------- + classification, n = _selection( + classification, + n, + coord=coord, + selection=over_days, + parameter="over_days", + extra_condition=HMS, + ) - lower_limit = lower - upper_limit = upper - else: - raise ValueError( - "Can't collapse: {!r} coordinate bounds are required " - "with group_by={!r}".format(coord.identity(), group_by) + elif over == "years": + # ---------------------------------------------------- + # Over years + # ---------------------------------------------------- + coord = self.dimension_coordinate( + filter_by_axis=(axis,), default=None ) - - if time_interval: - units = coord.Units - if units.isreftime: - lower = lower.datetime_array[0] - upper = upper.datetime_array[0] - lower_limit = lower_limit.datetime_array[0] - upper_limit = upper_limit.datetime_array[0] - elif not units.istime: + if coord is None or not coord.Units.isreftime: raise ValueError( - "Can't group by {} when coordinates have units " - "{!r}".format( - TimeDuration.__class__.__name__, coord.Units - ) + "Reference-time dimension coordinates are required " + "for an 'over years' collapse" ) - return (lower, upper, lower_limit, upper_limit) - - def _group_weights(weights, iaxis, index): - """Subspaces weights components. - - :Parameters: - - weights: `dict` or `None` - - iaxis: `int` - - index: `list` - - :Returns: - - `dict` or `None` + bounds = coord.get_bounds(None) + if bounds is None: + raise ValueError( + "Reference-time dimension coordinate bounds are " + "required for an 'over years' collapse" + ) - **Examples:** + cell_methods = self.cell_methods().ordered() + w = [ + cm.get_qualifier("within", None) + for cm in cell_methods.values() + ] + o = [ + cm.get_qualifier("over", None) + for cm in cell_methods.values() + ] + if "years" not in w and "days" not in o: + raise ValueError( + "An 'over years' collapse must come after a " + "'within years' or 'over days' cell method" + ) - >>> print(weights) - None - >>> print(_group_weights(weights, 2, [2, 3, 40])) - None - >>> print(_group_weights(weights, 1, slice(2, 56))) - None + # Parse the over_years parameter + if isinstance(over_years, Query): + over_years = (over_years,) + elif isinstance(over_years, TimeDuration): + if over_years.Units.iscalendartime: + over_years.Units = Units("calendar_years") + if not over_years.isint or over_years < 1: + raise ValueError( + "over_years is not a whole number of " + "calendar years: {!r}".format(over_years) + ) + else: + raise ValueError( + "over_years is not a whole number of calendar " + f"years: {over_years!r}" + ) - >>> weights + coordinate = "minimum" - >>> _group_weights(weights, 2, [2, 3, 40]) + classification = numpy_empty((axis_size,), int) + classification.fill(-1) - >>> _group_weights(weights, 1, slice(2, 56)) + if isinstance(over_years, TimeDuration): + _, _, lower_limit, upper_limit = _tyu( + coord, "bounds", True + ) - """ - if not isinstance(weights, dict): - return weights + lower_bounds = coord.lower_bounds.datetime_array + upper_bounds = coord.upper_bounds.datetime_array + mdHMS0 = None - weights = weights.copy() - for iaxes, value in weights.items(): - if iaxis in iaxes: - indices = [slice(None)] * len(iaxes) - indices[iaxes.index(iaxis)] = index - weights[iaxes] = value[tuple(indices)] - break + n = 0 + for lower, upper in zip(lower_bounds, upper_bounds): + mdHMS_l = ( + eq(lower.month, attr="month") + & eq(lower.day, attr="day") + & eq(lower.hour, attr="hour") + & eq(lower.minute, attr="minute") + & eq(lower.second, attr="second") + ).addattr("lower_bounds") + mdHMS_u = ( + eq(upper.month, attr="month") + & eq(upper.day, attr="day") + & eq(upper.hour, attr="hour") + & eq(upper.minute, attr="minute") + & eq(upper.second, attr="second") + ).addattr("upper_bounds") + mdHMS = mdHMS_l & mdHMS_u - return weights + if not mdHMS0: + # Keep a record of the first cell + mdHMS0 = mdHMS + logger.info( + f" mdHMS0 = {mdHMS0!r}" + ) # pragma: no cover + elif mdHMS.equals(mdHMS0): + # We've got repeat of the first cell, which + # means that we must have now classified all + # cells. Therefore we can stop. + break - # START OF MAIN CODE + logger.info( + f" mdHMS = {mdHMS!r}" + ) # pragma: no cover - logger.info(" Grouped collapse:") # pragma: no cover - logger.info( - " method = {!r}".format(method) - ) # pragma: no cover - logger.info( - " axis_in = {!r}".format(axis_in) - ) # pragma: no cover - logger.info( - " axis = {!r}".format(axis) - ) # pragma: no cover - logger.info( - " over = {!r}".format(over) - ) # pragma: no cover - logger.info( - " over_days = {!r}".format(over_days) - ) # pragma: no cover - logger.info( - " over_years = {!r}".format(over_years) - ) # pragma: no cover - logger.info( - " within = {!r}".format(within) - ) # pragma: no cover - logger.info( - " within_days = {!r}".format(within_days) - ) # pragma: no cover - logger.info( - " within_years = {!r}".format(within_years) - ) # pragma: no cover - logger.info( - " regroup = {!r}".format(regroup) - ) # pragma: no cover - logger.info( - " group = {!r}".format(group) - ) # pragma: no cover - logger.info( - " group_span = {!r}".format(group_span) - ) # pragma: no cover - logger.info( - " group_contiguous = {!r}".format(group_contiguous) - ) # pragma: no cover - - # Size of uncollapsed axis - axis_size = self.domain_axes(todict=True)[axis].get_size() - # Integer position of collapse axis - iaxis = self.get_data_axes().index(axis) - - fl = [] - - # If group, rolling window, classification, etc, do something - # special for size one axes - either return unchanged - # (possibly mofiying cell methods with , e.g, within_days', or - # raising an exception for 'can't match', I suppose. - - classification = None - - if group is not None: - if within is not None or over is not None: - raise ValueError( - "Can't set 'group' parameter for a climatological " - "collapse" - ) - - if isinstance(group, numpy_ndarray): - classification = numpy_squeeze(group.copy()) - - if classification.dtype.kind != "i": - raise ValueError( - "Can't group by numpy array of type {}".format( - classification.dtype.name - ) - ) - elif classification.shape != (axis_size,): - raise ValueError( - "Can't group by numpy array with incorrect " - "shape: {}".format(classification.shape) - ) - - # Set group to None - group = None - - if group is not None: - if isinstance(group, Query): - group = (group,) - - if isinstance(group, int): - # ---------------------------------------------------- - # E.g. group=3 - # ---------------------------------------------------- - coord = None - classification = numpy_empty((axis_size,), int) - - start = 0 - end = group - n = 0 - while start < axis_size: - classification[start:end] = n - start = end - end += group - n += 1 - - if group_span is True or group_span is None: - # Use the group definition as the group span - group_span = group - - elif isinstance(group, TimeDuration): - # ---------------------------------------------------- - # E.g. group=cf.M() - # ---------------------------------------------------- - coord = self.dimension_coordinate( - filter_by_axis=(axis,), default=None - ) - if coord is None: - raise ValueError("dddddd siduhfsuildfhsuil dhfdui TODO") - - classification = numpy_empty((axis_size,), int) - classification.fill(-1) - - lower, upper, lower_limit, upper_limit = _tyu( - coord, group_by, True - ) - - classification, n = _time_interval( - classification, - 0, - coord=coord, - interval=group, - lower=lower, - upper=upper, - lower_limit=lower_limit, - upper_limit=upper_limit, - group_by=group_by, - ) - - if group_span is True or group_span is None: - # Use the group definition as the group span - group_span = group - - elif isinstance(group, Data): - # ---------------------------------------------------- - # Chunks of - # ---------------------------------------------------- - coord = self.dimension_coordinate( - filter_by_axis=(axis,), default=None - ) - if coord is None: - raise ValueError("TODO asdas 4444444 dhfdui ") - - if coord.Units.isreftime: - raise ValueError( - "Can't group a reference-time axis with {!r}. Use " - "a TimeDuration instance instead.".format(group) - ) - - if group.size != 1: - raise ValueError( - "Group must have only one element: " - "{!r}".format(group) - ) - - if group.Units and not group.Units.equivalent(coord.Units): - raise ValueError( - "Can't group by {!r} when coordinates have " - "non-equivalent units {!r}".format(group, coord.Units) - ) - - classification = numpy_empty((axis_size,), int) - classification.fill(-1) - - group = group.squeeze() - - lower, upper, lower_limit, upper_limit = _tyu( - coord, group_by, False - ) - - classification, n = _data_interval( - classification, - 0, - coord=coord, - interval=group, - lower=lower, - upper=upper, - lower_limit=lower_limit, - upper_limit=upper_limit, - group_by=group_by, - ) - - if group_span is True or group_span is None: - # Use the group definition as the group span - group_span = group - - else: - # ---------------------------------------------------- - # E.g. group=[cf.month(4), cf.month(cf.wi(9, 11))] - # ---------------------------------------------------- - coord = self.dimension_coordinate( - filter_by_axis=(axis,), default=None - ) - if coord is None: - coord = self.auxiliary_coordinate( - filter_by_axis=(axis,), axis_mode="exact", default=None - ) - if coord is None: - raise ValueError("asdad8777787 TODO") - - classification = numpy_empty((axis_size,), int) - classification.fill(-1) - - classification, n = _selection( - classification, - 0, - coord=coord, - selection=group, - parameter="group", - ) - - classification = _discern_runs(classification) - - if group_span is None: - group_span = False - elif group_span is True: - raise ValueError( - "Can't collapse: Can't set group_span=True when " - f"group={group!r}" - ) - - if classification is None: - if over == "days": - # ---------------------------------------------------- - # Over days - # ---------------------------------------------------- - coord = self.dimension_coordinate( - filter_by_axis=(axis,), default=None - ) - if coord is None or not coord.Units.isreftime: - raise ValueError( - "Reference-time dimension coordinates are required " - "for an 'over days' collapse" - ) - - if not coord.has_bounds(): - raise ValueError( - "Reference-time dimension coordinate bounds are " - "required for an 'over days' collapse" - ) - - cell_methods = self.cell_methods().ordered() - w = [ - cm.get_qualifier("within", None) - for cm in cell_methods.values() - ] - if "days" not in w: - raise ValueError( - "An 'over days' collapse must come after a " - "'within days' cell method" - ) - - # Parse the over_days parameter - if isinstance(over_days, Query): - over_days = (over_days,) - elif isinstance(over_days, TimeDuration): - if over_days.Units.istime and over_days < Data(1, "day"): - raise ValueError( - f"Bad parameter value: over_days={over_days!r}" - ) - - coordinate = "minimum" - - classification = numpy_empty((axis_size,), int) - classification.fill(-1) - - if isinstance(over_days, TimeDuration): - _, _, lower_limit, upper_limit = _tyu( - coord, "bounds", True - ) - - bounds = coord.bounds - lower_bounds = coord.lower_bounds.datetime_array - upper_bounds = coord.upper_bounds.datetime_array - - HMS0 = None - - n = 0 - for lower, upper in zip(lower_bounds, upper_bounds): - HMS_l = ( - eq(lower.hour, attr="hour") - & eq(lower.minute, attr="minute") - & eq(lower.second, attr="second") - ).addattr("lower_bounds") - HMS_u = ( - eq(upper.hour, attr="hour") - & eq(upper.minute, attr="minute") - & eq(upper.second, attr="second") - ).addattr("upper_bounds") - HMS = HMS_l & HMS_u - - if not HMS0: - HMS0 = HMS - elif HMS.equals(HMS0): - # We've got repeat of the first cell, which - # means that we must have now classified all - # cells. Therefore we can stop. - break - - logger.info( - " HMS = {!r}".format(HMS) - ) # pragma: no cover - - if over_days is None: - # -------------------------------------------- - # over_days=None - # -------------------------------------------- - # Over all days - index = HMS.evaluate(coord).array - classification[index] = n - n += 1 - elif isinstance(over_days, TimeDuration): - # -------------------------------------------- - # E.g. over_days=cf.M() - # -------------------------------------------- - classification, n = _time_interval_over( - classification, - n, - coord=coord, - interval=over_days, - lower=lower, - upper=upper, - lower_limit=lower_limit, - upper_limit=upper_limit, - group_by="bounds", - extra_condition=HMS, - ) - else: - # -------------------------------------------- - # E.g. over_days=[cf.month(cf.wi(4, 9))] - # -------------------------------------------- - classification, n = _selection( - classification, - n, - coord=coord, - selection=over_days, - parameter="over_days", - extra_condition=HMS, - ) - - elif over == "years": - # ---------------------------------------------------- - # Over years - # ---------------------------------------------------- - coord = self.dimension_coordinate( - filter_by_axis=(axis,), default=None - ) - if coord is None or not coord.Units.isreftime: - raise ValueError( - "Reference-time dimension coordinates are required " - "for an 'over years' collapse" - ) - - bounds = coord.get_bounds(None) - if bounds is None: - raise ValueError( - "Reference-time dimension coordinate bounds are " - "required for an 'over years' collapse" - ) - - cell_methods = self.cell_methods().ordered() - w = [ - cm.get_qualifier("within", None) - for cm in cell_methods.values() - ] - o = [ - cm.get_qualifier("over", None) - for cm in cell_methods.values() - ] - if "years" not in w and "days" not in o: - raise ValueError( - "An 'over years' collapse must come after a " - "'within years' or 'over days' cell method" - ) - - # Parse the over_years parameter - if isinstance(over_years, Query): - over_years = (over_years,) - elif isinstance(over_years, TimeDuration): - if over_years.Units.iscalendartime: - over_years.Units = Units("calendar_years") - if not over_years.isint or over_years < 1: - raise ValueError( - "over_years is not a whole number of " - "calendar years: {!r}".format(over_years) - ) - else: - raise ValueError( - "over_years is not a whole number of calendar " - f"years: {over_years!r}" - ) - - coordinate = "minimum" - - classification = numpy_empty((axis_size,), int) - classification.fill(-1) - - if isinstance(over_years, TimeDuration): - _, _, lower_limit, upper_limit = _tyu( - coord, "bounds", True - ) - - lower_bounds = coord.lower_bounds.datetime_array - upper_bounds = coord.upper_bounds.datetime_array - mdHMS0 = None - - n = 0 - for lower, upper in zip(lower_bounds, upper_bounds): - mdHMS_l = ( - eq(lower.month, attr="month") - & eq(lower.day, attr="day") - & eq(lower.hour, attr="hour") - & eq(lower.minute, attr="minute") - & eq(lower.second, attr="second") - ).addattr("lower_bounds") - mdHMS_u = ( - eq(upper.month, attr="month") - & eq(upper.day, attr="day") - & eq(upper.hour, attr="hour") - & eq(upper.minute, attr="minute") - & eq(upper.second, attr="second") - ).addattr("upper_bounds") - mdHMS = mdHMS_l & mdHMS_u - - if not mdHMS0: - # Keep a record of the first cell - mdHMS0 = mdHMS - logger.info( - f" mdHMS0 = {mdHMS0!r}" - ) # pragma: no cover - elif mdHMS.equals(mdHMS0): - # We've got repeat of the first cell, which - # means that we must have now classified all - # cells. Therefore we can stop. - break - - logger.info( - f" mdHMS = {mdHMS!r}" - ) # pragma: no cover - - if over_years is None: - # -------------------------------------------- - # over_years=None - # -------------------------------------------- - # Over all years - index = mdHMS.evaluate(coord).array - classification[index] = n - n += 1 - elif isinstance(over_years, TimeDuration): - # -------------------------------------------- - # E.g. over_years=cf.Y(2) - # -------------------------------------------- - classification, n = _time_interval_over( - classification, - n, - coord=coord, - interval=over_years, - lower=lower, - upper=upper, - lower_limit=lower_limit, - upper_limit=upper_limit, - group_by="bounds", - extra_condition=mdHMS, + if over_years is None: + # -------------------------------------------- + # over_years=None + # -------------------------------------------- + # Over all years + index = mdHMS.evaluate(coord).array + classification[index] = n + n += 1 + elif isinstance(over_years, TimeDuration): + # -------------------------------------------- + # E.g. over_years=cf.Y(2) + # -------------------------------------------- + classification, n = _time_interval_over( + classification, + n, + coord=coord, + interval=over_years, + lower=lower, + upper=upper, + lower_limit=lower_limit, + upper_limit=upper_limit, + group_by="bounds", + extra_condition=mdHMS, ) else: # -------------------------------------------- # E.g. over_years=cf.year(cf.lt(2000)) # -------------------------------------------- classification, n = _selection( - classification, - n, - coord=coord, - selection=over_years, - parameter="over_years", - extra_condition=mdHMS, - ) - - elif within == "days": - # ---------------------------------------------------- - # Within days - # ---------------------------------------------------- - coord = self.dimension_coordinate( - filter_by_axis=(axis,), default=None - ) - if coord is None or not coord.Units.isreftime: - raise ValueError( - "Reference-time dimension coordinates are required " - "for an 'over years' collapse" - ) - - bounds = coord.get_bounds(None) - if bounds is None: - raise ValueError( - "Reference-time dimension coordinate bounds are " - "required for a 'within days' collapse" - ) - - classification = numpy_empty((axis_size,), int) - classification.fill(-1) - - # Parse the within_days parameter - if isinstance(within_days, Query): - within_days = (within_days,) - elif isinstance(within_days, TimeDuration): - if ( - within_days.Units.istime - and TimeDuration(24, "hours") % within_days - ): - # % Data(1, 'day'): # % within_days: - raise ValueError( - f"Can't collapse: within_days={within_days!r} " - "is not an exact factor of 1 day" - ) - - if isinstance(within_days, TimeDuration): - # ------------------------------------------------ - # E.g. within_days=cf.h(6) - # ------------------------------------------------ - lower, upper, lower_limit, upper_limit = _tyu( - coord, "bounds", True - ) - - classification, n = _time_interval( - classification, - 0, - coord=coord, - interval=within_days, - lower=lower, - upper=upper, - lower_limit=lower_limit, - upper_limit=upper_limit, - group_by=group_by, - ) - - if group_span is True or group_span is None: - # Use the within_days definition as the group - # span - group_span = within_days - - else: - # ------------------------------------------------ - # E.g. within_days=cf.hour(cf.lt(12)) - # ------------------------------------------------ - classification, n = _selection( - classification, - 0, - coord=coord, - selection=within_days, - parameter="within_days", - ) - - classification = _discern_runs(classification) - - classification = _discern_runs_within( - classification, coord - ) - - if group_span is None: - group_span = False - elif group_span is True: - raise ValueError( - "Can't collapse: Can't set group_span=True when " - f"within_days={within_days!r}" - ) - - elif within == "years": - # ---------------------------------------------------- - # Within years - # ---------------------------------------------------- - coord = self.dimension_coordinate( - filter_by_axis=(axis,), default=None - ) - if coord is None or not coord.Units.isreftime: - raise ValueError( - "Can't collapse: Reference-time dimension " - 'coordinates are required for a "within years" ' - "collapse" - ) - - if not coord.has_bounds(): - raise ValueError( - "Can't collapse: Reference-time dimension coordinate " - 'bounds are required for a "within years" collapse' - ) - - classification = numpy_empty((axis_size,), int) - classification.fill(-1) - - # Parse within_years - if isinstance(within_years, Query): - within_years = (within_years,) - elif within_years is None: - raise ValueError( - "Must set the within_years parameter for a " - '"within years" climatalogical time collapse' - ) - - if isinstance(within_years, TimeDuration): - # ------------------------------------------------ - # E.g. within_years=cf.M() - # ------------------------------------------------ - lower, upper, lower_limit, upper_limit = _tyu( - coord, "bounds", True - ) - - classification, n = _time_interval( - classification, - 0, - coord=coord, - interval=within_years, - lower=lower, - upper=upper, - lower_limit=lower_limit, - upper_limit=upper_limit, - group_by=group_by, - ) - - if group_span is True or group_span is None: - # Use the within_years definition as the group - # span - group_span = within_years - - else: - # ------------------------------------------------ - # E.g. within_years=cf.season() - # ------------------------------------------------ - classification, n = _selection( - classification, - 0, - coord=coord, - selection=within_years, - parameter="within_years", - within=True, - ) - - classification = _discern_runs(classification, within=True) - - classification = _discern_runs_within( - classification, coord - ) - - if group_span is None: - group_span = False - elif group_span is True: - raise ValueError( - "Can't collapse: Can't set group_span=True when " - "within_years={!r}".format(within_years) - ) - - elif over is not None: - raise ValueError( - f"Can't collapse: Bad 'over' syntax: {over!r}" - ) - - elif within is not None: - raise ValueError( - f"Can't collapse: Bad 'within' syntax: {within!r}" - ) - - if classification is not None: - # --------------------------------------------------------- - # Collapse each group - # --------------------------------------------------------- - logger.info( - f" classification = {classification}" - ) # pragma: no cover - - unique = numpy_unique(classification) - unique = unique[numpy_where(unique >= 0)[0]] - unique.sort() - - ignore_n = -1 - for u in unique: - index = numpy_where(classification == u)[0].tolist() - - pc = self.subspace(**{axis: index}) - - # ---------------------------------------------------- - # Ignore groups that don't meet the specified criteria - # ---------------------------------------------------- - if over is None: - coord = pc.coordinate(axis_in, default=None) - - if group_span is not False: - if isinstance(group_span, int): - if ( - pc.domain_axes(todict=True)[axis].get_size() - != group_span - ): - classification[index] = ignore_n - ignore_n -= 1 - continue - else: - if coord is None: - raise ValueError( - "Can't collapse: Need an unambiguous 1-d " - "coordinate construct when " - f"group_span={group_span!r}" - ) - - bounds = coord.get_bounds(None) - if bounds is None: - raise ValueError( - "Can't collapse: Need unambiguous 1-d " - "coordinate cell bounds when " - f"group_span={group_span!r}" - ) - - lb = bounds[0, 0].get_data(_fill_value=False) - ub = bounds[-1, 1].get_data(_fill_value=False) - if coord.T: - lb = lb.datetime_array.item() - ub = ub.datetime_array.item() - - if not coord.increasing: - lb, ub = ub, lb - - if group_span + lb != ub: - # The span of this group is not the - # same as group_span, so don't - # collapse it. - classification[index] = ignore_n - ignore_n -= 1 - continue - - if ( - group_contiguous - and coord is not None - and coord.has_bounds() - and not coord.bounds.contiguous( - overlap=(group_contiguous == 2) - ) - ): - # This group is not contiguous, so don't - # collapse it. - classification[index] = ignore_n - ignore_n -= 1 - continue - - if regroup: - continue - - # ---------------------------------------------------- - # Still here? Then collapse the group - # ---------------------------------------------------- - w = _group_weights(weights, iaxis, index) - logger.info( - f" Collapsing group {u}:" - ) # pragma: no cover - - fl.append( - pc.collapse( - method, - axis, - weights=w, - measure=measure, - mtol=mtol, - ddof=ddof, - coordinate=coordinate, - squeeze=False, - inplace=True, - _create_zero_size_cell_bounds=True, - _update_cell_methods=False, - ) - ) - - if regroup: - # return the numpy array - return classification - - elif regroup: - raise ValueError("Can't return classification 2453456 ") - - # Still here? - if not fl: - c = "contiguous " if group_contiguous else "" - s = f" spanning {group_span}" if group_span is not False else "" - if within is not None: - s = f" within {within}{s}" - - raise ValueError( - f"Can't collapse: No {c}groups{s} were identified" - ) - - if len(fl) == 1: - f = fl[0] - else: - # Hack to fix missing bounds! - for g in fl: - try: - c = g.dimension_coordinate( - filter_by_axis=(axis,), default=None - ) - if not c.has_bounds(): - c.set_bounds(c.create_bounds()) - except Exception: - pass - - # -------------------------------------------------------- - # Sort the list of collapsed fields - # -------------------------------------------------------- - if ( - coord is not None - and coord.construct_type == "dimension_coordinate" - ): - fl.sort( - key=lambda g: g.dimension_coordinate( - filter_by_axis=(axis,) - ).datum(0), - reverse=coord.decreasing, - ) - - # -------------------------------------------------------- - # Concatenate the partial collapses - # -------------------------------------------------------- - try: - f = self.concatenate(fl, axis=iaxis, _preserve=False) - except ValueError as error: - raise ValueError(f"Can't collapse: {error}") - - if squeeze and f.domain_axes(todict=True)[axis].get_size() == 1: - # Remove a totally collapsed axis from the field's - # data array - f.squeeze(axis, inplace=True) - - # ------------------------------------------------------------ - # Return the collapsed field - # ------------------------------------------------------------ - self.__dict__ = f.__dict__ - logger.info(" End of grouped collapse") # pragma: no cover - - return self - - def _update_cell_methods( - self, - method=None, - domain_axes=None, - input_axes=None, - within=None, - over=None, - verbose=None, - ): - """Update the cell methods. - - :Parameters: - - method: `str` - - domain_axes: `Constructs` or `dict` - - {{verbose: `int` or `str` or `None`, optional}} - - :Returns: - - `None` - - """ - original_cell_methods = self.cell_methods().ordered() - logger.info(" Update cell methods:") # pragma: no cover - logger.info( - " Original cell methods = {}".format(original_cell_methods) - ) # pragma: no cover - logger.info( - " method = {!r}".format(method) - ) # pragma: no cover - logger.info( - " within = {!r}".format(within) - ) # pragma: no cover - logger.info( - " over = {!r}".format(over) - ) # pragma: no cover - - if input_axes and tuple(input_axes) == ("area",): - axes = ("area",) - else: - axes = tuple(domain_axes) - - comment = None - - method = _collapse_cell_methods.get(method, method) - - cell_method = CellMethod(axes=axes, method=method) - if within: - cell_method.set_qualifier("within", within) - elif over: - cell_method.set_qualifier("over", over) - - if comment: - cell_method.set_qualifier("comment", comment) - - if original_cell_methods: - # There are already some cell methods - if len(domain_axes) == 1: - # Only one axis has been collapsed - key, original_domain_axis = tuple(domain_axes.items())[0] - - lastcm = tuple(original_cell_methods.values())[-1] - lastcm_method = _collapse_cell_methods.get( - lastcm.get_method(None), lastcm.get_method(None) - ) - - if ( - original_domain_axis.get_size() - == self.domain_axes(todict=True)[key].get_size() - ): - if ( - lastcm.get_axes(None) == axes - and lastcm_method == method - and lastcm_method - in ( - "mean", - "maximum", - "minimum", - "point", - "sum", - "median", - "mode", - "minimum_absolute_value", - "maximum_absolute_value", - ) - and not lastcm.get_qualifier("within", None) - and not lastcm.get_qualifier("over", None) - ): - # It was a null collapse (i.e. the method is - # the same as the last one and the size of the - # collapsed axis hasn't changed). - if within: - lastcm.within = within - elif over: - lastcm.over = over - - cell_method = None - - if cell_method is not None: - self.set_construct(cell_method) - - logger.info( - f" Modified cell methods = {self.cell_methods().ordered()}" - ) # pragma: no cover - - @_deprecated_kwarg_check("axes") - def direction(self, identity, axes=None, **kwargs): - """Whether or not a domain axis is increasing. - - An domain axis is considered to be increasing if its dimension - coordinate values are increasing in index space or if it has no - dimension coordinate. - - .. seealso:: `directions` - - :Parameters: - - identity: - Select the domain axis construct by one of: - - * An identity or key of a 1-d coordinate construct that - whose data spans the domain axis construct. - - * A domain axis construct identity or key. - - * The position of the domain axis construct in the field - construct's data. - - The *identity* parameter selects the domain axis as - returned by this call of the field construct's - `domain_axis` method: ``f.domain_axis(identity)``. - - axes: deprecated at version 3.0.0 - Use the *identity* parameter instead. - - size: deprecated at version 3.0.0 - - kwargs: deprecated at version 3.0.0 - - :Returns: - - `bool` - Whether or not the domain axis is increasing. - - **Examples:** - - >>> print(f.dimension_coordinate('X').array) - array([ 0 30 60]) - >>> f.direction('X') - True - >>> g = f.flip('X') - >>> g.direction('X') - False - - """ - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, "direction", kwargs - ) # pragma: no cover - - axis = self.domain_axis(identity, key=True, default=None) - if axis is None: - return True - - for key, coord in self.dimension_coordinates(todict=True).items(): - if axis == self.get_data_axes(key)[0]: - return coord.direction() - - return True - - def directions(self): - """Return a dictionary mapping all domain axes to their - directions. - - .. seealso:: `direction` - - :Returns: - - `dict` - A dictionary whose key/value pairs are domain axis keys - and their directions. - - **Examples:** - - >>> d.directions() - {'dim1': True, 'dim0': False} - - """ - out = {key: True for key in self.domain_axes(todict=True)} - - for key, dc in self.dimension_coordinates(todict=True).items(): - direction = dc.direction() - if not direction: - axis = self.get_data_axes(key)[0] - out[axis] = dc.direction() - - return out - - @_inplace_enabled(default=False) - def insert_dimension(self, axis, position=0, inplace=False): - """Insert a size 1 axis into the data array. - - .. versionadded:: 3.0.0 - - .. seealso:: `domain_axis`, `flatten`, `flip`, `squeeze`, - `transpose`, `unsqueeze` - - :Parameters: - - axis: - Select the domain axis to insert, generally defined by that - which would be selected by passing the given axis description - to a call of the field construct's `domain_axis` method. For - example, for a value of ``'X'``, the domain axis construct - returned by ``f.domain_axis('X')`` is selected. - - If *axis* is `None` then a new domain axis construct will - created for the inserted dimension. - - position: `int`, optional - Specify the position that the new axis will have in the - data array. By default the new axis has position 0, the - slowest varying position. - - {{inplace: `bool`, optional}} - - :Returns: - - `Field` or `None` - The field construct with expanded data, or `None` if the - operation was in-place. - - **Examples:** - - >>> f = cf.example_field(0) - >>> print(f) - Field: specific_humidity (ncvar%q) - ---------------------------------- - Data : specific_humidity(latitude(5), longitude(8)) 1 - Cell methods : area: mean - Dimension coords: latitude(5) = [-75.0, ..., 75.0] degrees_north - : longitude(8) = [22.5, ..., 337.5] degrees_east - : time(1) = [2019-01-01 00:00:00] - >>> g = f.insert_dimension('T', 0) - >>> print(g) - Field: specific_humidity (ncvar%q) - ---------------------------------- - Data : specific_humidity(time(1), latitude(5), longitude(8)) 1 - Cell methods : area: mean - Dimension coords: latitude(5) = [-75.0, ..., 75.0] degrees_north - : longitude(8) = [22.5, ..., 337.5] degrees_east - : time(1) = [2019-01-01 00:00:00] - - A previously non-existent size 1 axis must be created prior to - insertion: - - >>> f.insert_dimension(None, 1, inplace=True) - >>> print(f) - Field: specific_humidity (ncvar%q) - ---------------------------------- - Data : specific_humidity(time(1), key%domainaxis3(1), latitude(5), longitude(8)) 1 - Cell methods : area: mean - Dimension coords: latitude(5) = [-75.0, ..., 75.0] degrees_north - : longitude(8) = [22.5, ..., 337.5] degrees_east - : time(1) = [2019-01-01 00:00:00] - - """ - f = _inplace_enabled_define_and_cleanup(self) - - if axis is None: - axis = f.set_construct(self._DomainAxis(1)) - else: - axis = f.domain_axis( - axis, - key=True, - default=ValueError("Can't identify a unique axis to insert"), - ) - - # Expand the dims in the field construct's data array - super(Field, f).insert_dimension( - axis=axis, position=position, inplace=True - ) - - return f - - def indices(self, *mode, **kwargs): - """Create indices that define a subspace of the field construct. - - The subspace is defined by identifying indices based on the - metadata constructs. - - Metadata constructs are selected conditions are specified on their - data. Indices for subspacing are then automatically inferred from - where the conditions are met. - - The returned tuple of indices may be used to created a subspace by - indexing the original field construct with them. - - Metadata constructs and the conditions on their data are defined - by keyword parameters. - - * Any domain axes that have not been identified remain unchanged. - - * Multiple domain axes may be subspaced simultaneously, and it - doesn't matter which order they are specified in. - - * Subspace criteria may be provided for size 1 domain axes that - are not spanned by the field construct's data. - - * Explicit indices may also be assigned to a domain axis - identified by a metadata construct, with either a Python `slice` - object, or a sequence of integers or booleans. - - * For a dimension that is cyclic, a subspace defined by a slice or - by a `Query` instance is assumed to "wrap" around the edges of - the data. - - * Conditions may also be applied to multi-dimensional metadata - constructs. The "compress" mode is still the default mode (see - the positional arguments), but because the indices may not be - acting along orthogonal dimensions, some missing data may still - need to be inserted into the field construct's data. - - **Auxiliary masks** - - When creating an actual subspace with the indices, if the first - element of the tuple of indices is ``'mask'`` then the extent of - the subspace is defined only by the values of elements three and - onwards. In this case the second element contains an "auxiliary" - data mask that is applied to the subspace after its initial - creation, in order to set unselected locations to missing data. - - .. seealso:: `subspace`, `where`, `__getitem__`, `__setitem__` - - :Parameters: - - mode: `str`, *optional* - There are three modes of operation, each of which provides - indices for a different type of subspace: - - ============== ========================================== - *mode* Description - ============== ========================================== - ``'compress'`` This is the default mode. Unselected - locations are removed to create the - returned subspace. Note that if a - multi-dimensional metadata construct is - being used to define the indices then some - missing data may still be inserted at - unselected locations. - - ``'envelope'`` The returned subspace is the smallest that - contains all of the selected - indices. Missing data is inserted at - unselected locations within the envelope. - - ``'full'`` The returned subspace has the same domain - as the original field construct. Missing - data is inserted at unselected locations. - ============== ========================================== - - kwargs: *optional* - A keyword name is an identity of a metadata construct, and - the keyword value provides a condition for inferring - indices that apply to the dimension (or dimensions) - spanned by the metadata construct's data. Indices are - created that select every location for which the metadata - construct's data satisfies the condition. - - :Returns: - - `tuple` - The indices meeting the conditions. - - **Examples:** - - >>> q = cf.example_field(0) - >>> print(q) - Field: specific_humidity (ncvar%q) - ---------------------------------- - Data : specific_humidity(latitude(5), longitude(8)) 1 - Cell methods : area: mean - Dimension coords: latitude(5) = [-75.0, ..., 75.0] degrees_north - : longitude(8) = [22.5, ..., 337.5] degrees_east - : time(1) = [2019-01-01 00:00:00] - >>> indices = q.indices(X=112.5) - >>> print(indices) - (slice(0, 5, 1), slice(2, 3, 1)) - >>> q[indices] - - >>> q.indices(X=112.5, latitude=cf.gt(-60)) - (slice(1, 5, 1), slice(2, 3, 1)) - >>> q.indices(latitude=cf.eq(-45) | cf.ge(20)) - (array([1, 3, 4]), slice(0, 8, 1)) - >>> q.indices(X=[1, 2, 4], Y=slice(None, None, -1)) - (slice(4, None, -1), array([1, 2, 4])) - >>> q.indices(X=cf.wi(-100, 200)) - (slice(0, 5, 1), slice(-2, 4, 1)) - >>> q.indices(X=slice(-2, 4)) - (slice(0, 5, 1), slice(-2, 4, 1)) - >>> q.indices('compress', X=[1, 2, 4, 6]) - (slice(0, 5, 1), array([1, 2, 4, 6])) - >>> q.indices(Y=[True, False, True, True, False]) - (array([0, 2, 3]), slice(0, 8, 1)) - >>> q.indices('envelope', X=[1, 2, 4, 6]) - ('mask', [], slice(0, 5, 1), slice(1, 7, 1)) - >>> indices = q.indices('full', X=[1, 2, 4, 6]) - ('mask', [], slice(0, 5, 1), slice(0, 8, 1)) - >>> print(indices) - >>> print(q) - + classification, + n, + coord=coord, + selection=over_years, + parameter="over_years", + extra_condition=mdHMS, + ) - >>> print(a) - Field: air_potential_temperature (ncvar%air_potential_temperature) - ------------------------------------------------------------------ - Data : air_potential_temperature(time(120), latitude(5), longitude(8)) K - Cell methods : area: mean - Dimension coords: time(120) = [1959-12-16 12:00:00, ..., 1969-11-16 00:00:00] - : latitude(5) = [-75.0, ..., 75.0] degrees_north - : longitude(8) = [22.5, ..., 337.5] degrees_east - : air_pressure(1) = [850.0] hPa - >>> a.indices(T=410.5) - (slice(2, 3, 1), slice(0, 5, 1), slice(0, 8, 1)) - >>> a.indices(T=cf.dt('1960-04-16')) - (slice(4, 5, 1), slice(0, 5, 1), slice(0, 8, 1)) - >>> indices = a.indices(T=cf.wi(cf.dt('1962-11-01'), - ... cf.dt('1967-03-17 07:30'))) - >>> print(indices) - (slice(35, 88, 1), slice(0, 5, 1), slice(0, 8, 1)) - >>> a[indices] - + elif within == "days": + # ---------------------------------------------------- + # Within days + # ---------------------------------------------------- + coord = self.dimension_coordinate( + filter_by_axis=(axis,), default=None + ) + if coord is None or not coord.Units.isreftime: + raise ValueError( + "Reference-time dimension coordinates are required " + "for an 'over years' collapse" + ) - >>> print(t) - Field: air_temperature (ncvar%ta) - --------------------------------- - Data : air_temperature(atmosphere_hybrid_height_coordinate(1), grid_latitude(10), grid_longitude(9)) K - Cell methods : grid_latitude(10): grid_longitude(9): mean where land (interval: 0.1 degrees) time(1): maximum - Field ancils : air_temperature standard_error(grid_latitude(10), grid_longitude(9)) = [[0.76, ..., 0.32]] K - Dimension coords: atmosphere_hybrid_height_coordinate(1) = [1.5] - : grid_latitude(10) = [2.2, ..., -1.76] degrees - : grid_longitude(9) = [-4.7, ..., -1.18] degrees - : time(1) = [2019-01-01 00:00:00] - Auxiliary coords: latitude(grid_latitude(10), grid_longitude(9)) = [[53.941, ..., 50.225]] degrees_N - : longitude(grid_longitude(9), grid_latitude(10)) = [[2.004, ..., 8.156]] degrees_E - : long_name=Grid latitude name(grid_latitude(10)) = [--, ..., b'kappa'] - Cell measures : measure:area(grid_longitude(9), grid_latitude(10)) = [[2391.9657, ..., 2392.6009]] km2 - Coord references: grid_mapping_name:rotated_latitude_longitude - : standard_name:atmosphere_hybrid_height_coordinate - Domain ancils : ncvar%a(atmosphere_hybrid_height_coordinate(1)) = [10.0] m - : ncvar%b(atmosphere_hybrid_height_coordinate(1)) = [20.0] - : surface_altitude(grid_latitude(10), grid_longitude(9)) = [[0.0, ..., 270.0]] m - >>> indices = t.indices(latitude=cf.wi(51, 53)) - >>> print(indices) - ('mask', [], slice(0, 1, 1), slice(3, 8, 1), slice(0, 9, 1)) - >>> t[indices] - + bounds = coord.get_bounds(None) + if bounds is None: + raise ValueError( + "Reference-time dimension coordinate bounds are " + "required for a 'within days' collapse" + ) - """ - if "exact" in mode: - _DEPRECATION_ERROR_ARG( - self, - "indices", - "exact", - "Keywords are now never interpreted as regular expressions.", - ) # pragma: no cover + classification = numpy_empty((axis_size,), int) + classification.fill(-1) - if len(mode) > 1: - raise ValueError( - "Can't provide more than one positional argument." - ) + # Parse the within_days parameter + if isinstance(within_days, Query): + within_days = (within_days,) + elif isinstance(within_days, TimeDuration): + if ( + within_days.Units.istime + and TimeDuration(24, "hours") % within_days + ): + # % Data(1, 'day'): # % within_days: + raise ValueError( + f"Can't collapse: within_days={within_days!r} " + "is not an exact factor of 1 day" + ) - envelope = "envelope" in mode - full = "full" in mode - compress = "compress" in mode or not (envelope or full) + if isinstance(within_days, TimeDuration): + # ------------------------------------------------ + # E.g. within_days=cf.h(6) + # ------------------------------------------------ + lower, upper, lower_limit, upper_limit = _tyu( + coord, "bounds", True + ) - logger.debug("Field.indices:") # pragma: no cover - logger.debug( - " envelope, full, compress = {} {} {}".format( - envelope, full, compress - ) - ) # pragma: no cover + classification, n = _time_interval( + classification, + 0, + coord=coord, + interval=within_days, + lower=lower, + upper=upper, + lower_limit=lower_limit, + upper_limit=upper_limit, + group_by=group_by, + ) - auxiliary_mask = [] + if group_span is True or group_span is None: + # Use the within_days definition as the group + # span + group_span = within_days - data_axes = self.get_data_axes() + else: + # ------------------------------------------------ + # E.g. within_days=cf.hour(cf.lt(12)) + # ------------------------------------------------ + classification, n = _selection( + classification, + 0, + coord=coord, + selection=within_days, + parameter="within_days", + ) - # Initialize indices - indices = [slice(None)] * self.ndim + classification = _discern_runs(classification) - domain_axes = self.domain_axes(todict=True) + classification = _discern_runs_within( + classification, coord + ) - parsed = {} - unique_axes = set() - n_axes = 0 - for identity, value in kwargs.items(): - if identity in domain_axes: - axes = (identity,) - key = None - construct = None - else: - key, construct = self.construct( - identity, - filter_by_data=True, - item=True, - default=(None, None), + if group_span is None: + group_span = False + elif group_span is True: + raise ValueError( + "Can't collapse: Can't set group_span=True when " + f"within_days={within_days!r}" + ) + + elif within == "years": + # ---------------------------------------------------- + # Within years + # ---------------------------------------------------- + coord = self.dimension_coordinate( + filter_by_axis=(axis,), default=None ) - if construct is None: + if coord is None or not coord.Units.isreftime: raise ValueError( - "Can't find indices: Ambiguous axis or axes: " - f"{identity!r}" + "Can't collapse: Reference-time dimension " + 'coordinates are required for a "within years" ' + "collapse" ) - axes = self.get_data_axes(key) + if not coord.has_bounds(): + raise ValueError( + "Can't collapse: Reference-time dimension coordinate " + 'bounds are required for a "within years" collapse' + ) - sorted_axes = tuple(sorted(axes)) - if sorted_axes not in parsed: - n_axes += len(sorted_axes) + classification = numpy_empty((axis_size,), int) + classification.fill(-1) - parsed.setdefault(sorted_axes, []).append( - (axes, key, construct, value) - ) + # Parse within_years + if isinstance(within_years, Query): + within_years = (within_years,) + elif within_years is None: + raise ValueError( + "Must set the within_years parameter for a " + '"within years" climatalogical time collapse' + ) - unique_axes.update(sorted_axes) + if isinstance(within_years, TimeDuration): + # ------------------------------------------------ + # E.g. within_years=cf.M() + # ------------------------------------------------ + lower, upper, lower_limit, upper_limit = _tyu( + coord, "bounds", True + ) - if len(unique_axes) < n_axes: - raise ValueError( - "Can't find indices: Multiple constructs with incompatible " - "domain axes" - ) + classification, n = _time_interval( + classification, + 0, + coord=coord, + interval=within_years, + lower=lower, + upper=upper, + lower_limit=lower_limit, + upper_limit=upper_limit, + group_by=group_by, + ) - for sorted_axes, axes_key_construct_value in parsed.items(): - axes, keys, constructs, points = list( - zip(*axes_key_construct_value) - ) - n_items = len(constructs) - n_axes = len(sorted_axes) + if group_span is True or group_span is None: + # Use the within_years definition as the group + # span + group_span = within_years + + else: + # ------------------------------------------------ + # E.g. within_years=cf.season() + # ------------------------------------------------ + classification, n = _selection( + classification, + 0, + coord=coord, + selection=within_years, + parameter="within_years", + within=True, + ) + + classification = _discern_runs(classification, within=True) + + classification = _discern_runs_within( + classification, coord + ) - if n_items > n_axes: - if n_axes == 1: - a = "axis" - else: - a = "axes" + if group_span is None: + group_span = False + elif group_span is True: + raise ValueError( + "Can't collapse: Can't set group_span=True when " + "within_years={!r}".format(within_years) + ) + elif over is not None: raise ValueError( - "Error: Can't specify {} conditions for {} {}: {}".format( - n_items, n_axes, a, points - ) + f"Can't collapse: Bad 'over' syntax: {over!r}" ) - create_mask = False - - item_axes = axes[0] + elif within is not None: + raise ValueError( + f"Can't collapse: Bad 'within' syntax: {within!r}" + ) - logger.debug( - " item_axes = {!r}".format(item_axes) - ) # pragma: no cover - logger.debug( - " keys = {!r}".format(keys) + if classification is not None: + # --------------------------------------------------------- + # Collapse each group + # --------------------------------------------------------- + logger.info( + f" classification = {classification}" ) # pragma: no cover - if n_axes == 1: - # ---------------------------------------------------- - # 1-d construct - # ---------------------------------------------------- - ind = None + unique = numpy_unique(classification) + unique = unique[numpy_where(unique >= 0)[0]] + unique.sort() - logger.debug( - " {} 1-d constructs: {!r}".format(n_items, constructs) - ) # pragma: no cover + ignore_n = -1 + for u in unique: + index = numpy_where(classification == u)[0].tolist() - axis = item_axes[0] - item = constructs[0] - value = points[0] + pc = self.subspace(**{axis: index}) - logger.debug( - " axis = {!r}".format(axis) - ) # pragma: no cover - logger.debug( - " value = {!r}".format(value) - ) # pragma: no cover + # ---------------------------------------------------- + # Ignore groups that don't meet the specified criteria + # ---------------------------------------------------- + if over is None: + coord = pc.coordinate(axis_in, default=None) - if isinstance(value, (list, slice, tuple, numpy_ndarray)): - # ------------------------------------------------ - # 1-dimensional CASE 1: Value is already an index, - # e.g. [0], (0,3), - # slice(0,4,2), - # numpy.array([2,4,7]), - # [True, False, True] - # ------------------------------------------------- - logger.debug(" 1-d CASE 1: ") # pragma: no cover + if group_span is not False: + if isinstance(group_span, int): + if ( + pc.domain_axes(todict=True)[axis].get_size() + != group_span + ): + classification[index] = ignore_n + ignore_n -= 1 + continue + else: + if coord is None: + raise ValueError( + "Can't collapse: Need an unambiguous 1-d " + "coordinate construct when " + f"group_span={group_span!r}" + ) - index = value + bounds = coord.get_bounds(None) + if bounds is None: + raise ValueError( + "Can't collapse: Need unambiguous 1-d " + "coordinate cell bounds when " + f"group_span={group_span!r}" + ) - if envelope or full: - size = self.constructs[axis].get_size() - d = Data(list(range(size))) - ind = (d[value].array,) - index = slice(None) + lb = bounds[0, 0].get_data(_fill_value=False) + ub = bounds[-1, 1].get_data(_fill_value=False) + if coord.T: + lb = lb.datetime_array.item() + ub = ub.datetime_array.item() - elif ( - item is not None - and isinstance(value, Query) - and value.operator in ("wi", "wo") - and item.construct_type == "dimension_coordinate" - and self.iscyclic(axis) - ): - # self.iscyclic(sorted_axes)): - # ------------------------------------------------ - # 1-dimensional CASE 2: Axis is cyclic and - # subspace criterion is a - # 'within' or 'without' - # Query instance - # ------------------------------------------------- - logger.debug(" 1-d CASE 2: ") # pragma: no cover + if not coord.increasing: + lb, ub = ub, lb - if item.increasing: - anchor0 = value.value[0] - anchor1 = value.value[1] - else: - anchor0 = value.value[1] - anchor1 = value.value[0] + if group_span + lb != ub: + # The span of this group is not the + # same as group_span, so don't + # collapse it. + classification[index] = ignore_n + ignore_n -= 1 + continue - a = self.anchor(axis, anchor0, dry_run=True)["roll"] - b = self.flip(axis).anchor(axis, anchor1, dry_run=True)[ - "roll" - ] + if ( + group_contiguous + and coord is not None + and coord.has_bounds() + and not coord.bounds.contiguous( + overlap=(group_contiguous == 2) + ) + ): + # This group is not contiguous, so don't + # collapse it. + classification[index] = ignore_n + ignore_n -= 1 + continue - size = item.size - if abs(anchor1 - anchor0) >= item.period(): - if value.operator == "wo": - set_start_stop = 0 - else: - set_start_stop = -a + if regroup: + continue - start = set_start_stop - stop = set_start_stop - elif a + b == size: - b = self.anchor(axis, anchor1, dry_run=True)["roll"] - if (b == a and value.operator == "wo") or not ( - b == a or value.operator == "wo" - ): - set_start_stop = -a - else: - set_start_stop = 0 + # ---------------------------------------------------- + # Still here? Then collapse the group + # ---------------------------------------------------- + w = _group_weights(weights, iaxis, index) + logger.info( + f" Collapsing group {u}:" + ) # pragma: no cover - start = set_start_stop - stop = set_start_stop - else: - if value.operator == "wo": - start = b - size - stop = -a + size - else: - start = -a - stop = b - size + fl.append( + pc.collapse( + method, + axis, + weights=w, + measure=measure, + mtol=mtol, + ddof=ddof, + coordinate=coordinate, + squeeze=False, + inplace=True, + _create_zero_size_cell_bounds=True, + _update_cell_methods=False, + ) + ) - index = slice(start, stop, 1) + if regroup: + # return the numpy array + return classification - if full: - # index = slice(start, start+size, 1) - d = Data(list(range(size))) - d.cyclic(0) - ind = (d[index].array,) + elif regroup: + raise ValueError("Can't return classification 2453456 ") - index = slice(None) + # Still here? + if not fl: + c = "contiguous " if group_contiguous else "" + s = f" spanning {group_span}" if group_span is not False else "" + if within is not None: + s = f" within {within}{s}" - elif item is not None: - # ------------------------------------------------- - # 1-dimensional CASE 3: All other 1-d cases - # ------------------------------------------------- - logger.debug(" 1-d CASE 3:") # pragma: no cover + raise ValueError( + f"Can't collapse: No {c}groups{s} were identified" + ) - item_match = value == item + if len(fl) == 1: + f = fl[0] + else: + # Hack to fix missing bounds! + for g in fl: + try: + c = g.dimension_coordinate( + filter_by_axis=(axis,), default=None + ) + if not c.has_bounds(): + c.set_bounds(c.create_bounds()) + except Exception: + pass - if not item_match.any(): - raise ValueError( - "No {!r} axis indices found from: {}".format( - identity, value - ) - ) + # -------------------------------------------------------- + # Sort the list of collapsed fields + # -------------------------------------------------------- + if ( + coord is not None + and coord.construct_type == "dimension_coordinate" + ): + fl.sort( + key=lambda g: g.dimension_coordinate( + filter_by_axis=(axis,) + ).datum(0), + reverse=coord.decreasing, + ) - index = numpy_asanyarray(item_match) + # -------------------------------------------------------- + # Concatenate the partial collapses + # -------------------------------------------------------- + try: + f = self.concatenate(fl, axis=iaxis, _preserve=False) + except ValueError as error: + raise ValueError(f"Can't collapse: {error}") - if envelope or full: - if numpy_ma_isMA(index): - ind = numpy_ma_where(index) - else: - ind = numpy_where(index) + if squeeze and f.domain_axes(todict=True)[axis].get_size() == 1: + # Remove a totally collapsed axis from the field's + # data array + f.squeeze(axis, inplace=True) - index = slice(None) + # ------------------------------------------------------------ + # Return the collapsed field + # ------------------------------------------------------------ + self.__dict__ = f.__dict__ + logger.info(" End of grouped collapse") # pragma: no cover - else: - raise ValueError( - "Must specify a domain axis construct or a construct " - "with data for which to create indices" - ) + return self - logger.debug( - " index = {}".format(index) - ) # pragma: no cover + def _update_cell_methods( + self, + method=None, + domain_axes=None, + input_axes=None, + within=None, + over=None, + verbose=None, + ): + """Update the cell methods. - # Put the index into the correct place in the list of - # indices. - # - # Note that we might overwrite it later if there's an - # auxiliary mask for this axis. - if axis in data_axes: - indices[data_axes.index(axis)] = index + :Parameters: - else: - # ----------------------------------------------------- - # N-dimensional constructs - # ----------------------------------------------------- - logger.debug( - " {} N-d constructs: {!r}".format(n_items, constructs) - ) # pragma: no cover - logger.debug( - " {} points : {!r}".format(len(points), points) - ) # pragma: no cover - logger.debug( - " field.shape : {}".format(self.shape) - ) # pragma: no cover + method: `str` - # Make sure that each N-d item has the same relative - # axis order as the field's data array. - # - # For example, if the data array of the field is - # ordered T Z Y X and the item is ordered Y T then the - # item is transposed so that it is ordered T Y. For - # example, if the field's data array is ordered Z Y X - # and the item is ordered X Y T (T is size 1) then - # transpose the item so that it is ordered Y X T. - g = self.transpose(data_axes, constructs=True) + domain_axes: `Constructs` or `dict` - # g = self - # data_axes = .get_data_axes(default=None) - # for item_axes2 in axes: - # if item_axes2 != data_axes: - # g = self.transpose(data_axes, constructs=True) - # break + {{verbose: `int` or `str` or `None`, optional}} - item_axes = g.get_data_axes(keys[0]) + :Returns: - constructs = [g.constructs[key] for key in keys] - logger.debug( - " transposed N-d constructs: {!r}".format(constructs) - ) # pragma: no cover + `None` - item_matches = [ - (value == construct).data - for value, construct in zip(points, constructs) - ] + """ + original_cell_methods = self.cell_methods().ordered() + logger.info(" Update cell methods:") # pragma: no cover + logger.info( + " Original cell methods = {}".format(original_cell_methods) + ) # pragma: no cover + logger.info( + " method = {!r}".format(method) + ) # pragma: no cover + logger.info( + " within = {!r}".format(within) + ) # pragma: no cover + logger.info( + " over = {!r}".format(over) + ) # pragma: no cover - item_match = item_matches.pop() + if input_axes and tuple(input_axes) == ("area",): + axes = ("area",) + else: + axes = tuple(domain_axes) - for m in item_matches: - item_match &= m + comment = None - item_match = item_match.array # LAMA alert + method = _collapse_cell_methods.get(method, method) - if numpy_ma_isMA: - ind = numpy_ma_where(item_match) - else: - ind = numpy_where(item_match) + cell_method = CellMethod(axes=axes, method=method) + if within: + cell_method.set_qualifier("within", within) + elif over: + cell_method.set_qualifier("over", over) - logger.debug( - " item_match = {}".format(item_match) - ) # pragma: no cover - logger.debug( - " ind = {}".format(ind) - ) # pragma: no cover + if comment: + cell_method.set_qualifier("comment", comment) - bounds = [ - item.bounds.array[ind] - for item in constructs - if item.has_bounds() - ] + if original_cell_methods: + # There are already some cell methods + if len(domain_axes) == 1: + # Only one axis has been collapsed + key, original_domain_axis = tuple(domain_axes.items())[0] - contains = False - if bounds: - points2 = [] - for v, construct in zip(points, constructs): - if isinstance(v, Query): - if v.operator == "contains": - contains = True - v = v.value - elif v.operator == "eq": - v = v.value - else: - contains = False - break + lastcm = tuple(original_cell_methods.values())[-1] + lastcm_method = _collapse_cell_methods.get( + lastcm.get_method(None), lastcm.get_method(None) + ) - v = Data.asdata(v) - if v.Units: - v.Units = construct.Units + if ( + original_domain_axis.get_size() + == self.domain_axes(todict=True)[key].get_size() + ): + if ( + lastcm.get_axes(None) == axes + and lastcm_method == method + and lastcm_method + in ( + "mean", + "maximum", + "minimum", + "point", + "sum", + "median", + "mode", + "minimum_absolute_value", + "maximum_absolute_value", + ) + and not lastcm.get_qualifier("within", None) + and not lastcm.get_qualifier("over", None) + ): + # It was a null collapse (i.e. the method is + # the same as the last one and the size of the + # collapsed axis hasn't changed). + if within: + lastcm.within = within + elif over: + lastcm.over = over - points2.append(v.datum()) + cell_method = None - if contains: - # The coordinates have bounds and the condition is - # a 'contains' Query object. Check each - # potentially matching cell for actually including - # the point. - try: - Path - except NameError: - raise ImportError( - "Must install matplotlib to create indices based " - "on {}-d constructs and a 'contains' Query " - "object".format(constructs[0].ndim) - ) + if cell_method is not None: + self.set_construct(cell_method) - if n_items != 2: - raise ValueError( - "Can't index for cell from {}-d coordinate " - "objects".format(n_axes) - ) + logger.info( + f" Modified cell methods = {self.cell_methods().ordered()}" + ) # pragma: no cover - if 0 < len(bounds) < n_items: - raise ValueError("bounds alskdaskds TODO") + @_deprecated_kwarg_check("axes") + def direction(self, identity, axes=None, **kwargs): + """Whether or not a domain axis is increasing. - # Remove grid cells if, upon closer inspection, - # they do actually contain the point. - delete = [ - n - for n, vertices in enumerate(zip(*zip(*bounds))) - if not Path(zip(*vertices)).contains_point(points2) - ] + An domain axis is considered to be increasing if its dimension + coordinate values are increasing in index space or if it has no + dimension coordinate. - if delete: - ind = [numpy_delete(ind_1d, delete) for ind_1d in ind] + .. seealso:: `directions` - if ind is not None: - mask_shape = [None] * self.ndim - masked_subspace_size = 1 - ind = numpy_array(ind) - logger.debug(" ind = {}".format(ind)) # pragma: no cover + :Parameters: - for i, (axis, start, stop) in enumerate( - zip(item_axes, ind.min(axis=1), ind.max(axis=1)) - ): - if axis not in data_axes: - continue + identity: + Select the domain axis construct by one of: - position = data_axes.index(axis) + * An identity or key of a 1-d coordinate construct that + whose data spans the domain axis construct. - if indices[position] == slice(None): - if compress: - # Create a compressed index for this axis - size = stop - start + 1 - index = sorted(set(ind[i])) - elif envelope: - # Create an envelope index for this axis - stop += 1 - size = stop - start - index = slice(start, stop) - elif full: - # Create a full index for this axis - start = 0 - # stop = self.axis_size(axis) - stop = domain_axes[axis].get_size() - size = stop - start - index = slice(start, stop) - else: - raise ValueError( - "Must have full, envelope or compress" - ) # pragma: no cover + * A domain axis construct identity or key. - indices[position] = index + * The position of the domain axis construct in the field + construct's data. - mask_shape[position] = size - masked_subspace_size *= size - ind[i] -= start + The *identity* parameter selects the domain axis as + returned by this call of the field construct's + `domain_axis` method: ``f.domain_axis(identity)``. - create_mask = ind.shape[1] < masked_subspace_size - else: - create_mask = False + axes: deprecated at version 3.0.0 + Use the *identity* parameter instead. - # -------------------------------------------------------- - # Create an auxiliary mask for these axes - # -------------------------------------------------------- - logger.debug( - " create_mask = {}".format(create_mask) - ) # pragma: no cover + size: deprecated at version 3.0.0 - if create_mask: - logger.debug( - " mask_shape = {}".format(mask_shape) - ) # pragma: no cover + kwargs: deprecated at version 3.0.0 - mask = self.data._create_auxiliary_mask_component( - mask_shape, ind, compress - ) - auxiliary_mask.append(mask) - logger.debug( - " mask_shape = {}".format(mask_shape) - ) # pragma: no cover - logger.debug( - " mask.shape = {}".format(mask.shape) - ) # pragma: no cover - # --- End: for + :Returns: - indices = tuple(parse_indices(self.shape, tuple(indices))) + `bool` + Whether or not the domain axis is increasing. - if auxiliary_mask: - indices = ("mask", auxiliary_mask) + indices + **Examples:** - logger.debug( - " Final indices = {}".format(indices) + >>> print(f.dimension_coordinate('X').array) + array([ 0 30 60]) + >>> f.direction('X') + True + >>> g = f.flip('X') + >>> g.direction('X') + False + + """ + if kwargs: + _DEPRECATION_ERROR_KWARGS( + self, "direction", kwargs ) # pragma: no cover - # Return the tuple of indices and the auxiliary mask (which - # may be None) - return indices + axis = self.domain_axis(identity, key=True, default=None) + if axis is None: + return True - @_inplace_enabled(default=True) - def set_data( - self, data, axes=None, set_axes=True, copy=True, inplace=True - ): - """Set the field construct data. + for key, coord in self.dimension_coordinates(todict=True).items(): + if axis == self.get_data_axes(key)[0]: + return coord.direction() - .. versionadded:: 3.0.0 + return True - .. seealso:: `data`, `del_data`, `get_data`, `has_data`, - `set_construct` + def directions(self): + """Return a dictionary mapping all domain axes to their + directions. - :Parameters: + .. seealso:: `direction` - data: `Data` - The data to be inserted. + :Returns: - {{data_like}} + `dict` + A dictionary whose key/value pairs are domain axis keys + and their directions. - axes: (sequence of) `str` or `int`, optional - Set the domain axes constructs that are spanned by the - data. If unset, and the *set_axes* parameter is True, then - an attempt will be made to assign existing domain axis - constructs to the data. + **Examples:** - The contents of the *axes* parameter is mapped to domain - axis constructs by translating each element into a domain - axis construct key via the `domain_axis` method. + >>> d.directions() + {'dim1': True, 'dim0': False} - *Parameter example:* - ``axes='domainaxis1'`` + """ + out = {key: True for key in self.domain_axes(todict=True)} - *Parameter example:* - ``axes='X'`` + for key, dc in self.dimension_coordinates(todict=True).items(): + direction = dc.direction() + if not direction: + axis = self.get_data_axes(key)[0] + out[axis] = dc.direction() - *Parameter example:* - ``axes=['latitude']`` + return out - *Parameter example:* - ``axes=['X', 'longitude']`` + @_inplace_enabled(default=False) + def insert_dimension(self, axis, position=0, inplace=False): + """Insert a size 1 axis into the data array. - *Parameter example:* - ``axes=[1, 0]`` + .. versionadded:: 3.0.0 - set_axes: `bool`, optional - If False then do not set the domain axes constructs that - are spanned by the data, even if the *axes* parameter has - been set. By default the axes are set either according to - the *axes* parameter, or if any domain axis constructs - exist then an attempt will be made to assign existing - domain axis constructs to the data. + .. seealso:: `domain_axis`, `flatten`, `flip`, `squeeze`, + `transpose`, `unsqueeze` - If the *axes* parameter is `None` and no domain axis - constructs exist then no attempt is made to assign domain - axes constructs to the data, regardless of the value of - *set_axes*. + :Parameters: - copy: `bool`, optional - If True then set a copy of the data. By default the data - are copied. + axis: + Select the domain axis to insert, generally defined by that + which would be selected by passing the given axis description + to a call of the field construct's `domain_axis` method. For + example, for a value of ``'X'``, the domain axis construct + returned by ``f.domain_axis('X')`` is selected. - {{inplace: `bool`, optional (default True)}} + If *axis* is `None` then a new domain axis construct will + created for the inserted dimension. - .. versionadded:: 3.7.0 + position: `int`, optional + Specify the position that the new axis will have in the + data array. By default the new axis has position 0, the + slowest varying position. + + {{inplace: `bool`, optional}} :Returns: - `None` or `Field` - If the operation was in-place then `None` is returned, - otherwise return a new `Field` instance containing the new - data. + `Field` or `None` + The field construct with expanded data, or `None` if the + operation was in-place. **Examples:** - >>> f = cf.Field() - >>> f.set_data([1, 2, 3]) - >>> f.has_data() - True - >>> f.get_data() - - >>> f.data - - >>> f.del_data() - - >>> g = f.set_data([4, 5, 6], inplace=False) - >>> g.data - - >>> f.has_data() - False - >>> print(f.get_data(None)) - None - >>> print(f.del_data(None)) - None - - """ - data = self._Data(data, copy=False) + >>> f = cf.example_field(0) + >>> print(f) + Field: specific_humidity (ncvar%q) + ---------------------------------- + Data : specific_humidity(latitude(5), longitude(8)) 1 + Cell methods : area: mean + Dimension coords: latitude(5) = [-75.0, ..., 75.0] degrees_north + : longitude(8) = [22.5, ..., 337.5] degrees_east + : time(1) = [2019-01-01 00:00:00] + >>> g = f.insert_dimension('T', 0) + >>> print(g) + Field: specific_humidity (ncvar%q) + ---------------------------------- + Data : specific_humidity(time(1), latitude(5), longitude(8)) 1 + Cell methods : area: mean + Dimension coords: latitude(5) = [-75.0, ..., 75.0] degrees_north + : longitude(8) = [22.5, ..., 337.5] degrees_east + : time(1) = [2019-01-01 00:00:00] - # Construct new field - f = _inplace_enabled_define_and_cleanup(self) + A previously non-existent size 1 axis must be created prior to + insertion: - domain_axes = f.domain_axes(todict=True) - if axes is None and not domain_axes: - set_axes = False + >>> f.insert_dimension(None, 1, inplace=True) + >>> print(f) + Field: specific_humidity (ncvar%q) + ---------------------------------- + Data : specific_humidity(time(1), key%domainaxis3(1), latitude(5), longitude(8)) 1 + Cell methods : area: mean + Dimension coords: latitude(5) = [-75.0, ..., 75.0] degrees_north + : longitude(8) = [22.5, ..., 337.5] degrees_east + : time(1) = [2019-01-01 00:00:00] - if not set_axes: - if not data.Units: - units = getattr(f, "Units", None) - if units is not None: - if copy: - copy = False - data = data.override_units(units, inplace=False) - else: - data.override_units(units, inplace=True) + """ + f = _inplace_enabled_define_and_cleanup(self) - super(cfdm.Field, f).set_data( - data, axes=None, copy=copy, inplace=True + if axis is None: + axis = f.set_construct(self._DomainAxis(1)) + else: + axis = f.domain_axis( + axis, + key=True, + default=ValueError("Can't identify a unique axis to insert"), ) - return f + # Expand the dims in the field construct's data array + super(Field, f).insert_dimension( + axis=axis, position=position, inplace=True + ) - if data.isscalar: - # -------------------------------------------------------- - # The data array is scalar - # -------------------------------------------------------- - if axes or axes == 0: - raise ValueError( - "Can't set data: Wrong number of axes for scalar data " - f"array: axes={axes}" - ) + return f - axes = [] + def indices(self, *mode, **kwargs): + """Create indices that define a subspace of the field construct. - elif axes is not None: - # -------------------------------------------------------- - # Axes have been set - # -------------------------------------------------------- - if isinstance(axes, (str, int, slice)): - axes = (axes,) + The subspace is defined by identifying indices based on the + metadata constructs. - axes = [f.domain_axis(axis, key=True) for axis in axes] + Metadata constructs are selected conditions are specified on their + data. Indices for subspacing are then automatically inferred from + where the conditions are met. - if len(axes) != data.ndim: - raise ValueError( - "Can't set data: {} axes provided, but {} needed".format( - len(axes), data.ndim - ) - ) + The returned tuple of indices may be used to created a subspace by + indexing the original field construct with them. - for axis, size in zip(axes, data.shape): - axis_size = domain_axes[axis].get_size(None) - if size != axis_size: - axes_shape = tuple( - domain_axes[axis].get_size(None) for axis in axes - ) - raise ValueError( - f"Can't set data: Data shape {data.shape} differs " - f"from shape implied by axes {axes}: {axes_shape}" - ) + Metadata constructs and the conditions on their data are defined + by keyword parameters. - elif f.get_data_axes(default=None) is None: - # -------------------------------------------------------- - # The data is not scalar and axes have not been set and - # the domain does not have data axes defined - # - # => infer the axes - # -------------------------------------------------------- - data_shape = data.shape - if len(data_shape) != len(set(data_shape)): - raise ValueError( - f"Can't insert data: Ambiguous data shape: {data_shape}. " - "Consider setting the axes parameter." - ) + * Any domain axes that have not been identified remain unchanged. - if not domain_axes: - raise ValueError("Can't set data: No domain axes exist") + * Multiple domain axes may be subspaced simultaneously, and it + doesn't matter which order they are specified in. - axes = [] - for n in data_shape: - da_key = f.domain_axis( - filter_by_size=(n,), key=True, default=None - ) - if da_key is None: - raise ValueError( - "Can't insert data: Ambiguous data shape: " - f"{data_shape}. Consider setting the axes parameter." - ) + * Subspace criteria may be provided for size 1 domain axes that + are not spanned by the field construct's data. - axes.append(da_key) + * Explicit indices may also be assigned to a domain axis + identified by a metadata construct, with either a Python `slice` + object, or a sequence of integers or booleans. - else: - # -------------------------------------------------------- - # The data is not scalar and axes have not been set, but - # there are data axes defined on the field. - # -------------------------------------------------------- - axes = f.get_data_axes() - if len(axes) != data.ndim: - raise ValueError( - f"Wrong number of axes for data array: {axes!r}" - ) + * For a dimension that is cyclic, a subspace defined by a slice or + by a `Query` instance is assumed to "wrap" around the edges of + the data. - for axis, size in zip(axes, data.shape): - if domain_axes[axis].get_size(None) != size: - raise ValueError( - "Can't insert data: Incompatible size for axis " - f"{axis!r}: {size}" - ) + * Conditions may also be applied to multi-dimensional metadata + constructs. The "compress" mode is still the default mode (see + the positional arguments), but because the indices may not be + acting along orthogonal dimensions, some missing data may still + need to be inserted into the field construct's data. - if not data.Units: - units = getattr(f, "Units", None) - if units is not None: - if copy: - copy = False - data = data.override_units(units, inplace=False) - else: - data.override_units(units, inplace=True) + **Auxiliary masks** - super(cfdm.Field, f).set_data(data, axes=axes, copy=copy, inplace=True) + When creating an actual subspace with the indices, if the first + element of the tuple of indices is ``'mask'`` then the extent of + the subspace is defined only by the values of elements three and + onwards. In this case the second element contains an "auxiliary" + data mask that is applied to the subspace after its initial + creation, in order to set unselected locations to missing data. - # Apply cyclic axes - if axes: - cyclic = self._cyclic - if cyclic: - cyclic_axes = [ - axes.index(axis) for axis in cyclic if axis in axes - ] - if cyclic_axes: - data.cyclic(cyclic_axes, True) + .. seealso:: `subspace`, `where`, `__getitem__`, `__setitem__` - return f + :Parameters: - def domain_mask(self, **kwargs): - """Return a boolean field that is True where criteria are met. + mode: `str`, *optional* + There are three modes of operation, each of which provides + indices for a different type of subspace: - .. versionadded:: 1.1 + ============== ========================================== + *mode* Description + ============== ========================================== + ``'compress'`` This is the default mode. Unselected + locations are removed to create the + returned subspace. Note that if a + multi-dimensional metadata construct is + being used to define the indices then some + missing data may still be inserted at + unselected locations. - .. seealso:: `indices`, `mask`, `subspace` + ``'envelope'`` The returned subspace is the smallest that + contains all of the selected + indices. Missing data is inserted at + unselected locations within the envelope. - :Parameters: + ``'full'`` The returned subspace has the same domain + as the original field construct. Missing + data is inserted at unselected locations. + ============== ========================================== - kwargs: optional - A dictionary of keyword arguments to pass to the `indices` - method to define the criteria to meet for a element to be - set as `True`. + kwargs: *optional* + A keyword name is an identity of a metadata construct, and + the keyword value provides a condition for inferring + indices that apply to the dimension (or dimensions) + spanned by the metadata construct's data. Indices are + created that select every location for which the metadata + construct's data satisfies the condition. :Returns: - `Field` - The domain mask. + `tuple` + The indices meeting the conditions. **Examples:** - Create a domain mask which is masked at all between between -30 - and 30 degrees of latitude: + >>> q = cf.example_field(0) + >>> print(q) + Field: specific_humidity (ncvar%q) + ---------------------------------- + Data : specific_humidity(latitude(5), longitude(8)) 1 + Cell methods : area: mean + Dimension coords: latitude(5) = [-75.0, ..., 75.0] degrees_north + : longitude(8) = [22.5, ..., 337.5] degrees_east + : time(1) = [2019-01-01 00:00:00] + >>> indices = q.indices(X=112.5) + >>> print(indices) + (slice(0, 5, 1), slice(2, 3, 1)) + >>> q[indices] + + >>> q.indices(X=112.5, latitude=cf.gt(-60)) + (slice(1, 5, 1), slice(2, 3, 1)) + >>> q.indices(latitude=cf.eq(-45) | cf.ge(20)) + (array([1, 3, 4]), slice(0, 8, 1)) + >>> q.indices(X=[1, 2, 4], Y=slice(None, None, -1)) + (slice(4, None, -1), array([1, 2, 4])) + >>> q.indices(X=cf.wi(-100, 200)) + (slice(0, 5, 1), slice(-2, 4, 1)) + >>> q.indices(X=slice(-2, 4)) + (slice(0, 5, 1), slice(-2, 4, 1)) + >>> q.indices('compress', X=[1, 2, 4, 6]) + (slice(0, 5, 1), array([1, 2, 4, 6])) + >>> q.indices(Y=[True, False, True, True, False]) + (array([0, 2, 3]), slice(0, 8, 1)) + >>> q.indices('envelope', X=[1, 2, 4, 6]) + ('mask', [], slice(0, 5, 1), slice(1, 7, 1)) + >>> indices = q.indices('full', X=[1, 2, 4, 6]) + ('mask', [], slice(0, 5, 1), slice(0, 8, 1)) + >>> print(indices) + >>> print(q) + + + >>> print(a) + Field: air_potential_temperature (ncvar%air_potential_temperature) + ------------------------------------------------------------------ + Data : air_potential_temperature(time(120), latitude(5), longitude(8)) K + Cell methods : area: mean + Dimension coords: time(120) = [1959-12-16 12:00:00, ..., 1969-11-16 00:00:00] + : latitude(5) = [-75.0, ..., 75.0] degrees_north + : longitude(8) = [22.5, ..., 337.5] degrees_east + : air_pressure(1) = [850.0] hPa + >>> a.indices(T=410.5) + (slice(2, 3, 1), slice(0, 5, 1), slice(0, 8, 1)) + >>> a.indices(T=cf.dt('1960-04-16')) + (slice(4, 5, 1), slice(0, 5, 1), slice(0, 8, 1)) + >>> indices = a.indices(T=cf.wi(cf.dt('1962-11-01'), + ... cf.dt('1967-03-17 07:30'))) + >>> print(indices) + (slice(35, 88, 1), slice(0, 5, 1), slice(0, 8, 1)) + >>> a[indices] + + + >>> print(t) + Field: air_temperature (ncvar%ta) + --------------------------------- + Data : air_temperature(atmosphere_hybrid_height_coordinate(1), grid_latitude(10), grid_longitude(9)) K + Cell methods : grid_latitude(10): grid_longitude(9): mean where land (interval: 0.1 degrees) time(1): maximum + Field ancils : air_temperature standard_error(grid_latitude(10), grid_longitude(9)) = [[0.76, ..., 0.32]] K + Dimension coords: atmosphere_hybrid_height_coordinate(1) = [1.5] + : grid_latitude(10) = [2.2, ..., -1.76] degrees + : grid_longitude(9) = [-4.7, ..., -1.18] degrees + : time(1) = [2019-01-01 00:00:00] + Auxiliary coords: latitude(grid_latitude(10), grid_longitude(9)) = [[53.941, ..., 50.225]] degrees_N + : longitude(grid_longitude(9), grid_latitude(10)) = [[2.004, ..., 8.156]] degrees_E + : long_name=Grid latitude name(grid_latitude(10)) = [--, ..., b'kappa'] + Cell measures : measure:area(grid_longitude(9), grid_latitude(10)) = [[2391.9657, ..., 2392.6009]] km2 + Coord references: grid_mapping_name:rotated_latitude_longitude + : standard_name:atmosphere_hybrid_height_coordinate + Domain ancils : ncvar%a(atmosphere_hybrid_height_coordinate(1)) = [10.0] m + : ncvar%b(atmosphere_hybrid_height_coordinate(1)) = [20.0] + : surface_altitude(grid_latitude(10), grid_longitude(9)) = [[0.0, ..., 270.0]] m + >>> indices = t.indices(latitude=cf.wi(51, 53)) + >>> print(indices) + ('mask', [], slice(0, 1, 1), slice(3, 8, 1), slice(0, 9, 1)) + >>> t[indices] + + + """ + if "exact" in mode: + _DEPRECATION_ERROR_ARG( + self, + "indices", + "exact", + "Keywords are now never interpreted as regular expressions.", + ) # pragma: no cover - >>> m = f.domain_mask(latitude=cf.wi(-30, 30)) + if len(mode) > 1: + raise ValueError( + "Can't provide more than one positional argument." + ) - """ - mask = self.copy() + envelope = "envelope" in mode + full = "full" in mode + compress = "compress" in mode or not (envelope or full) - mask.clear_properties() - mask.nc_del_variable(None) + logger.debug("Field.indices:") # pragma: no cover + logger.debug( + " envelope, full, compress = {} {} {}".format( + envelope, full, compress + ) + ) # pragma: no cover - for key in self.constructs.filter_by_type( - "cell_method", "field_ancillary", todict=True - ): - mask.del_construct(key) + auxiliary_mask = [] - false_everywhere = Data.zeros(self.shape, dtype=bool) + data_axes = self.get_data_axes() - mask.set_data(false_everywhere, axes=self.get_data_axes(), copy=False) + # Initialize indices + indices = [slice(None)] * self.ndim - mask.subspace[mask.indices(**kwargs)] = True + domain_axes = self.domain_axes(todict=True) - mask.long_name = "domain mask" + parsed = {} + unique_axes = set() + n_axes = 0 + for identity, value in kwargs.items(): + if identity in domain_axes: + axes = (identity,) + key = None + construct = None + else: + key, construct = self.construct( + identity, + filter_by_data=True, + item=True, + default=(None, None), + ) + if construct is None: + raise ValueError( + "Can't find indices: Ambiguous axis or axes: " + f"{identity!r}" + ) - return mask + axes = self.get_data_axes(key) - @_inplace_enabled(default=False) - @_manage_log_level_via_verbosity - def compute_vertical_coordinates( - self, default_to_zero=True, strict=True, inplace=False, verbose=None - ): - """Compute non-parametric vertical coordinates. + sorted_axes = tuple(sorted(axes)) + if sorted_axes not in parsed: + n_axes += len(sorted_axes) - When vertical coordinates are a function of horizontal location as - well as parameters which depend on vertical location, they cannot - be stored in a vertical dimension coordinate construct. In such - cases a parametric vertical dimension coordinate construct is - stored and a coordinate reference construct contains the formula - for computing the required non-parametric vertical coordinates. + parsed.setdefault(sorted_axes, []).append( + (axes, key, construct, value) + ) - {{formula terms links}} + unique_axes.update(sorted_axes) - For example, multi-dimensional non-parametric parametric ocean - altitude coordinates can be computed from one-dimensional - parametric ocean sigma coordinates. + if len(unique_axes) < n_axes: + raise ValueError( + "Can't find indices: Multiple constructs with incompatible " + "domain axes" + ) - Coordinate reference systems based on parametric vertical - coordinates are identified from the coordinate reference - constructs and, if possible, the corresponding non-parametric - vertical coordinates are computed and stored in a new auxiliary - coordinate construct. + for sorted_axes, axes_key_construct_value in parsed.items(): + axes, keys, constructs, points = list( + zip(*axes_key_construct_value) + ) + n_items = len(constructs) + n_axes = len(sorted_axes) - If there are no appropriate coordinate reference constructs then - the field construct is unchanged. + if n_items > n_axes: + if n_axes == 1: + a = "axis" + else: + a = "axes" - .. versionadded:: 3.8.0 + raise ValueError( + "Error: Can't specify {} conditions for {} {}: {}".format( + n_items, n_axes, a, points + ) + ) - .. seealso:: `CoordinateReference` + create_mask = False - :Parameters: + item_axes = axes[0] - {{default_to_zero: `bool`, optional}} + logger.debug( + " item_axes = {!r}".format(item_axes) + ) # pragma: no cover + logger.debug( + " keys = {!r}".format(keys) + ) # pragma: no cover - strict: `bool` - If False then allow the computation to occur when + if n_axes == 1: + # ---------------------------------------------------- + # 1-d construct + # ---------------------------------------------------- + ind = None - * A domain ancillary construct has no standard name, but - the corresponding term has a standard name that is - prescribed + logger.debug( + " {} 1-d constructs: {!r}".format(n_items, constructs) + ) # pragma: no cover - * When the computed standard name can not be found by - inference from the standard names of the domain - ancillary constructs, nor from the - ``computed_standard_name`` parameter of the relevant - coordinate reference construct. + axis = item_axes[0] + item = constructs[0] + value = points[0] - By default an exception is raised in these cases. + logger.debug( + " axis = {!r}".format(axis) + ) # pragma: no cover + logger.debug( + " value = {!r}".format(value) + ) # pragma: no cover - If a domain ancillary construct does have a standard name, - but one that is inconsistent with any prescribed standard - names, then an exception is raised regardless of the value - of *strict*. + if isinstance(value, (list, slice, tuple, numpy_ndarray)): + # ------------------------------------------------ + # 1-dimensional CASE 1: Value is already an index, + # e.g. [0], (0,3), + # slice(0,4,2), + # numpy.array([2,4,7]), + # [True, False, True] + # ------------------------------------------------- + logger.debug(" 1-d CASE 1: ") # pragma: no cover - {{inplace: `bool`, optional}} + index = value - {{verbose: `int` or `str` or `None`, optional}} + if envelope or full: + size = self.constructs[axis].get_size() + d = Data(list(range(size))) + ind = (d[value].array,) + index = slice(None) - :Returns: + elif ( + item is not None + and isinstance(value, Query) + and value.operator in ("wi", "wo") + and item.construct_type == "dimension_coordinate" + and self.iscyclic(axis) + ): + # self.iscyclic(sorted_axes)): + # ------------------------------------------------ + # 1-dimensional CASE 2: Axis is cyclic and + # subspace criterion is a + # 'within' or 'without' + # Query instance + # ------------------------------------------------- + logger.debug(" 1-d CASE 2: ") # pragma: no cover - `Field` or `None` - The field construct with the new non-parametric vertical - coordinates, or `None` if the operation was in-place. + if item.increasing: + anchor0 = value.value[0] + anchor1 = value.value[1] + else: + anchor0 = value.value[1] + anchor1 = value.value[0] - **Examples** + a = self.anchor(axis, anchor0, dry_run=True)["roll"] + b = self.flip(axis).anchor(axis, anchor1, dry_run=True)[ + "roll" + ] - >>> f = cf.example_field(1) - >>> print(f) - Field: air_temperature (ncvar%ta) - --------------------------------- - Data : air_temperature(atmosphere_hybrid_height_coordinate(1), grid_latitude(10), grid_longitude(9)) K - Cell methods : grid_latitude(10): grid_longitude(9): mean where land (interval: 0.1 degrees) time(1): maximum - Field ancils : air_temperature standard_error(grid_latitude(10), grid_longitude(9)) = [[0.76, ..., 0.32]] K - Dimension coords: atmosphere_hybrid_height_coordinate(1) = [1.5] - : grid_latitude(10) = [2.2, ..., -1.76] degrees - : grid_longitude(9) = [-4.7, ..., -1.18] degrees - : time(1) = [2019-01-01 00:00:00] - Auxiliary coords: latitude(grid_latitude(10), grid_longitude(9)) = [[53.941, ..., 50.225]] degrees_N - : longitude(grid_longitude(9), grid_latitude(10)) = [[2.004, ..., 8.156]] degrees_E - : long_name=Grid latitude name(grid_latitude(10)) = [--, ..., b'kappa'] - Cell measures : measure:area(grid_longitude(9), grid_latitude(10)) = [[2391.9657, ..., 2392.6009]] km2 - Coord references: grid_mapping_name:rotated_latitude_longitude - : standard_name:atmosphere_hybrid_height_coordinate - Domain ancils : ncvar%a(atmosphere_hybrid_height_coordinate(1)) = [10.0] m - : ncvar%b(atmosphere_hybrid_height_coordinate(1)) = [20.0] - : surface_altitude(grid_latitude(10), grid_longitude(9)) = [[0.0, ..., 270.0]] m - >>> print(f.auxiliary_coordinate('altitude', default=None)) - None - >>> g = f.compute_vertical_coordinates() - >>> print(g.auxiliary_coordinates) - Constructs: - {'auxiliarycoordinate0': , - 'auxiliarycoordinate1': , - 'auxiliarycoordinate2': , - 'auxiliarycoordinate3': } - >>> g.auxiliary_coordinate('altitude').dump() - Auxiliary coordinate: altitude - long_name = 'Computed from parametric atmosphere_hybrid_height_coordinate - vertical coordinates' - standard_name = 'altitude' - units = 'm' - Data(1, 10, 9) = [[[10.0, ..., 5410.0]]] m - Bounds:units = 'm' - Bounds:Data(1, 10, 9, 2) = [[[[5.0, ..., 5415.0]]]] m + size = item.size + if abs(anchor1 - anchor0) >= item.period(): + if value.operator == "wo": + set_start_stop = 0 + else: + set_start_stop = -a - """ - f = _inplace_enabled_define_and_cleanup(self) + start = set_start_stop + stop = set_start_stop + elif a + b == size: + b = self.anchor(axis, anchor1, dry_run=True)["roll"] + if (b == a and value.operator == "wo") or not ( + b == a or value.operator == "wo" + ): + set_start_stop = -a + else: + set_start_stop = 0 - for cr in f.coordinate_references(todict=True).values(): - # -------------------------------------------------------- - # Compute the non-parametric vertical coordinates, if - # possible. - # -------------------------------------------------------- - ( - standard_name, - computed_standard_name, - computed, - computed_axes, - k_axis, - ) = FormulaTerms.formula(f, cr, default_to_zero, strict) + start = set_start_stop + stop = set_start_stop + else: + if value.operator == "wo": + start = b - size + stop = -a + size + else: + start = -a + stop = b - size - if computed is None: - # No non-parametric vertical coordinates were - # computed - continue + index = slice(start, stop, 1) - # -------------------------------------------------------- - # Convert the computed domain ancillary construct to an - # auxiliary coordinate construct, and insert it into the - # field construct. - # -------------------------------------------------------- - c = f._AuxiliaryCoordinate(source=computed, copy=False) - c.clear_properties() - c.long_name = ( - "Computed from parametric {} " - "vertical coordinates".format(standard_name) - ) - if computed_standard_name: - c.standard_name = computed_standard_name + if full: + # index = slice(start, start+size, 1) + d = Data(list(range(size))) + d.cyclic(0) + ind = (d[index].array,) - logger.detail( - "Non-parametric coordinates:\n{}".format( - c.dump(display=False, _level=1) - ) - ) # pragma: no cover + index = slice(None) - key = f.set_construct(c, axes=computed_axes, copy=False) + elif item is not None: + # ------------------------------------------------- + # 1-dimensional CASE 3: All other 1-d cases + # ------------------------------------------------- + logger.debug(" 1-d CASE 3:") # pragma: no cover - # Reference the new coordinates from the coordinate - # reference construct - cr.set_coordinate(key) + item_match = value == item - logger.debug( - "Non-parametric coordinates construct key: {!r}\n" - "Updated coordinate reference construct:\n{}".format( - key, cr.dump(display=False, _level=1) - ) - ) # pragma: no cover + if not item_match.any(): + raise ValueError( + "No {!r} axis indices found from: {}".format( + identity, value + ) + ) - return f + index = numpy_asanyarray(item_match) - def match_by_construct(self, *identities, OR=False, **conditions): - """Whether or not there are particular metadata constructs. + if envelope or full: + if numpy_ma_isMA(index): + ind = numpy_ma_where(index) + else: + ind = numpy_where(index) - .. note:: The API changed at version 3.1.0 + index = slice(None) - .. versionadded:: 3.0.0 + else: + raise ValueError( + "Must specify a domain axis construct or a construct " + "with data for which to create indices" + ) - .. seealso:: `match`, `match_by_property`, `match_by_rank`, - `match_by_identity`, `match_by_ncvar`, - `match_by_units` + logger.debug( + " index = {}".format(index) + ) # pragma: no cover - :Parameters: + # Put the index into the correct place in the list of + # indices. + # + # Note that we might overwrite it later if there's an + # auxiliary mask for this axis. + if axis in data_axes: + indices[data_axes.index(axis)] = index - identities: optional - Identify the metadata constructs that have any of the - given identities or construct keys. + else: + # ----------------------------------------------------- + # N-dimensional constructs + # ----------------------------------------------------- + logger.debug( + " {} N-d constructs: {!r}".format(n_items, constructs) + ) # pragma: no cover + logger.debug( + " {} points : {!r}".format(len(points), points) + ) # pragma: no cover + logger.debug( + " field.shape : {}".format(self.shape) + ) # pragma: no cover - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); or a compiled regular expression - (e.g. ``re.compile('^atmosphere')``) that selects the - relevant constructs whose identities match via - `re.search`. + # Make sure that each N-d item has the same relative + # axis order as the field's data array. + # + # For example, if the data array of the field is + # ordered T Z Y X and the item is ordered Y T then the + # item is transposed so that it is ordered T Y. For + # example, if the field's data array is ordered Z Y X + # and the item is ordered X Y T (T is size 1) then + # transpose the item so that it is ordered Y X T. + g = self.transpose(data_axes, constructs=True) - Each construct has a number of identities, and is selected - if any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: + # g = self + # data_axes = .get_data_axes(default=None) + # for item_axes2 in axes: + # if item_axes2 != data_axes: + # g = self.transpose(data_axes, constructs=True) + # break - >>> x.identities() - ['time', - 'long_name=Time', - 'foo=bar', - 'standard_name=time', - 'ncvar%t', - 'T'] + item_axes = g.get_data_axes(keys[0]) - A construct key may optionally have the ``'key%'`` - prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. + constructs = [g.constructs[key] for key in keys] + logger.debug( + " transposed N-d constructs: {!r}".format(constructs) + ) # pragma: no cover - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. + item_matches = [ + (value == construct).data + for value, construct in zip(points, constructs) + ] - If a cell method construct identity is given (such as - ``'method:mean'``) then it will only be compared with the - most recently applied cell method operation. + item_match = item_matches.pop() - Alternatively, one or more cell method constucts may be - identified in a single string with a CF-netCDF cell - methods-like syntax for describing both the collapse - dimensions, the collapse method, and any cell method - construct qualifiers. If N cell methods are described in - this way then they will collectively identify the N most - recently applied cell method operations. For example, - ``'T: maximum within years T: mean over years'`` will be - compared with the most two most recently applied cell - method operations. + for m in item_matches: + item_match &= m - *Parameter example:* - ``'measure:area'`` + item_match = item_match.array # LAMA alert - *Parameter example:* - ``'latitude'`` + if numpy_ma_isMA: + ind = numpy_ma_where(item_match) + else: + ind = numpy_where(item_match) - *Parameter example:* - ``'long_name=Longitude'`` + logger.debug( + " item_match = {}".format(item_match) + ) # pragma: no cover + logger.debug( + " ind = {}".format(ind) + ) # pragma: no cover - *Parameter example:* - ``'domainancillary2', 'ncvar%areacello'`` + bounds = [ + item.bounds.array[ind] + for item in constructs + if item.has_bounds() + ] - conditions: optional - Identify the metadata constructs that have any of the - given identities or construct keys, and whose data satisfy - conditions. + contains = False + if bounds: + points2 = [] + for v, construct in zip(points, constructs): + if isinstance(v, Query): + if v.operator == "contains": + contains = True + v = v.value + elif v.operator == "eq": + v = v.value + else: + contains = False + break - A construct identity or construct key (as defined by the - *identities* parameter) is given as a keyword name and a - condition on its data is given as the keyword value. + v = Data.asdata(v) + if v.Units: + v.Units = construct.Units - The condition is satisfied if any of its data values - equals the value provided. + points2.append(v.datum()) + + if contains: + # The coordinates have bounds and the condition is + # a 'contains' Query object. Check each + # potentially matching cell for actually including + # the point. + try: + Path + except NameError: + raise ImportError( + "Must install matplotlib to create indices based " + "on {}-d constructs and a 'contains' Query " + "object".format(constructs[0].ndim) + ) - *Parameter example:* - ``longitude=180.0`` + if n_items != 2: + raise ValueError( + "Can't index for cell from {}-d coordinate " + "objects".format(n_axes) + ) - *Parameter example:* - ``time=cf.dt('1959-12-16')`` + if 0 < len(bounds) < n_items: + raise ValueError("bounds alskdaskds TODO") - *Parameter example:* - ``latitude=cf.ge(0)`` + # Remove grid cells if, upon closer inspection, + # they do actually contain the point. + delete = [ + n + for n, vertices in enumerate(zip(*zip(*bounds))) + if not Path(zip(*vertices)).contains_point(points2) + ] - *Parameter example:* - ``latitude=cf.ge(0), air_pressure=500`` + if delete: + ind = [numpy_delete(ind_1d, delete) for ind_1d in ind] - *Parameter example:* - ``**{'latitude': cf.ge(0), 'long_name=soil_level': 4}`` + if ind is not None: + mask_shape = [None] * self.ndim + masked_subspace_size = 1 + ind = numpy_array(ind) + logger.debug(" ind = {}".format(ind)) # pragma: no cover - OR: `bool`, optional - If True then return `True` if at least one metadata - construct matches at least one of the criteria given by - the *identities* or *conditions* arguments. By default - `True` is only returned if the field constructs matches - each of the given criteria. + for i, (axis, start, stop) in enumerate( + zip(item_axes, ind.min(axis=1), ind.max(axis=1)) + ): + if axis not in data_axes: + continue - mode: deprecated at version 3.1.0 - Use the *OR* parameter instead. + position = data_axes.index(axis) - constructs: deprecated at version 3.1.0 + if indices[position] == slice(None): + if compress: + # Create a compressed index for this axis + size = stop - start + 1 + index = sorted(set(ind[i])) + elif envelope: + # Create an envelope index for this axis + stop += 1 + size = stop - start + index = slice(start, stop) + elif full: + # Create a full index for this axis + start = 0 + # stop = self.axis_size(axis) + stop = domain_axes[axis].get_size() + size = stop - start + index = slice(start, stop) + else: + raise ValueError( + "Must have full, envelope or compress" + ) # pragma: no cover - :Returns: + indices[position] = index - `bool` - Whether or not the field construct contains the specified - metadata constructs. + mask_shape[position] = size + masked_subspace_size *= size + ind[i] -= start - **Examples:** + create_mask = ind.shape[1] < masked_subspace_size + else: + create_mask = False - TODO + # -------------------------------------------------------- + # Create an auxiliary mask for these axes + # -------------------------------------------------------- + logger.debug( + " create_mask = {}".format(create_mask) + ) # pragma: no cover - """ - if identities: - if identities[0] == "or": - _DEPRECATION_ERROR_ARG( - self, - "match_by_construct", - "or", - message="Use 'OR=True' instead.", - version="3.1.0", + if create_mask: + logger.debug( + " mask_shape = {}".format(mask_shape) ) # pragma: no cover - if identities[0] == "and": - _DEPRECATION_ERROR_ARG( - self, - "match_by_construct", - "and", - message="Use 'OR=False' instead.", - version="3.1.0", + mask = self.data._create_auxiliary_mask_component( + mask_shape, ind, compress + ) + auxiliary_mask.append(mask) + logger.debug( + " mask_shape = {}".format(mask_shape) + ) # pragma: no cover + logger.debug( + " mask.shape = {}".format(mask.shape) ) # pragma: no cover + # --- End: for - if not identities and not conditions: - return True + indices = tuple(parse_indices(self.shape, tuple(indices))) - constructs = self.constructs + if auxiliary_mask: + indices = ("mask", auxiliary_mask) + indices - if not constructs: - return False + logger.debug( + " Final indices = {}".format(indices) + ) # pragma: no cover - n = 0 + # Return the tuple of indices and the auxiliary mask (which + # may be None) + return indices - self_cell_methods = self.cell_methods() # TODO + @_inplace_enabled(default=True) + def set_data( + self, data, axes=None, set_axes=True, copy=True, inplace=True + ): + """Set the field construct data. - for identity in identities: - cms = False - try: - cms = ": " in identity - except TypeError: - cms = False + .. versionadded:: 3.0.0 - if cms: - cms = CellMethod.create(identity) - for cm in cms: - axes = [ - self.domain_axis(axis, key=True, default=axis) - for axis in cm.get_axes(()) - ] - if axes: - cm.set_axes(axes) + .. seealso:: `data`, `del_data`, `get_data`, `has_data`, + `set_construct` - if not cms: - filtered = constructs(identity) - if filtered: - # Check for cell methods - if set(filtered.construct_types().values()) == { - "cell_method" - }: - key = tuple(self_cell_methods.ordered())[-1] - filtered = self_cell_methods(key)(identity) # TODO - if not filtered: - if not OR: - return False + :Parameters: - n -= 1 + data: `Data` + The data to be inserted. - n += 1 - elif not OR: - return False - else: - cell_methods = tuple(self_cell_methods.ordered().values())[ - -len(cms) : - ] - for cm0, cm1 in zip(cms, cell_methods): - if cm0.has_axes() and set(cm0.get_axes()) != set( - cm1.get_axes(()) - ): - if not OR: - return False + {{data_like}} - n -= 1 - break + axes: (sequence of) `str` or `int`, optional + Set the domain axes constructs that are spanned by the + data. If unset, and the *set_axes* parameter is True, then + an attempt will be made to assign existing domain axis + constructs to the data. - if cm0.has_method() and ( - cm0.get_method() != cm1.get_method(None) - ): - if not OR: - return False + The contents of the *axes* parameter is mapped to domain + axis constructs by translating each element into a domain + axis construct key via the `domain_axis` method. - n -= 1 - break + *Parameter example:* + ``axes='domainaxis1'`` - ok = True - for key, value in cm0.qualifiers(): - if value != cm1.get_qualifier(key, None): - if not OR: - return False + *Parameter example:* + ``axes='X'`` - ok = False - break + *Parameter example:* + ``axes=['latitude']`` - if not ok: - n -= 1 - break + *Parameter example:* + ``axes=['X', 'longitude']`` - n += 1 + *Parameter example:* + ``axes=[1, 0]`` - if conditions: - for identity, value in conditions.items(): - if self.subspace("test", **{identity: value}): - n += 1 - elif not OR: - return False + set_axes: `bool`, optional + If False then do not set the domain axes constructs that + are spanned by the data, even if the *axes* parameter has + been set. By default the axes are set either according to + the *axes* parameter, or if any domain axis constructs + exist then an attempt will be made to assign existing + domain axis constructs to the data. - if OR: - return bool(n) + If the *axes* parameter is `None` and no domain axis + constructs exist then no attempt is made to assign domain + axes constructs to the data, regardless of the value of + *set_axes*. - return True + copy: `bool`, optional + If True then set a copy of the data. By default the data + are copied. - def match_by_rank(self, *ranks): - """Whether or not the number of domain axis constructs satisfies - conditions. + {{inplace: `bool`, optional (default True)}} + + .. versionadded:: 3.7.0 - .. versionadded:: 3.0.0 + :Returns: - .. seealso:: `match`, `match_by_property`, `match_by_identity`, - `match_by_ncvar`, `match_by_units`, - `match_by_construct` + `None` or `Field` + If the operation was in-place then `None` is returned, + otherwise return a new `Field` instance containing the new + data. - :Parameters: + **Examples:** - ranks: optional - Define conditions on the number of domain axis constructs. + >>> f = cf.Field() + >>> f.set_data([1, 2, 3]) + >>> f.has_data() + True + >>> f.get_data() + + >>> f.data + + >>> f.del_data() + + >>> g = f.set_data([4, 5, 6], inplace=False) + >>> g.data + + >>> f.has_data() + False + >>> print(f.get_data(None)) + None + >>> print(f.del_data(None)) + None - A condition is one of: + """ + data = self._Data(data, copy=False) - * `int` - * a `Query` object + # Construct new field + f = _inplace_enabled_define_and_cleanup(self) - The condition is satisfied if the number of domain axis - constructs equals the condition value. + domain_axes = f.domain_axes(todict=True) + if axes is None and not domain_axes: + set_axes = False - *Parameter example:* - To see if the field construct has 4 domain axis - constructs: ``4`` + if not set_axes: + if not data.Units: + units = getattr(f, "Units", None) + if units is not None: + if copy: + copy = False + data = data.override_units(units, inplace=False) + else: + data.override_units(units, inplace=True) - *Parameter example:* - To see if the field construct has at least 3 domain axis - constructs: ``cf.ge(3)`` + super(cfdm.Field, f).set_data( + data, axes=None, copy=copy, inplace=True + ) - :Returns: + return f - `bool` - Whether or not at least one of the conditions are met. + if data.isscalar: + # -------------------------------------------------------- + # The data array is scalar + # -------------------------------------------------------- + if axes or axes == 0: + raise ValueError( + "Can't set data: Wrong number of axes for scalar data " + f"array: axes={axes}" + ) - **Examples:** + axes = [] - >>> f.match_by_rank(3, 4) + elif axes is not None: + # -------------------------------------------------------- + # Axes have been set + # -------------------------------------------------------- + if isinstance(axes, (str, int, slice)): + axes = (axes,) - >>> f.match_by_rank(cf.wi(2, 4)) + axes = [f.domain_axis(axis, key=True) for axis in axes] - >>> f.match_by_rank(1, cf.gt(3)) + if len(axes) != data.ndim: + raise ValueError( + "Can't set data: {} axes provided, but {} needed".format( + len(axes), data.ndim + ) + ) - """ - if not ranks: - return True + for axis, size in zip(axes, data.shape): + axis_size = domain_axes[axis].get_size(None) + if size != axis_size: + axes_shape = tuple( + domain_axes[axis].get_size(None) for axis in axes + ) + raise ValueError( + f"Can't set data: Data shape {data.shape} differs " + f"from shape implied by axes {axes}: {axes_shape}" + ) - n_domain_axes = len(self.domain_axes(todict=True)) - for rank in ranks: - ok = rank == n_domain_axes - if ok: - return True + elif f.get_data_axes(default=None) is None: + # -------------------------------------------------------- + # The data is not scalar and axes have not been set and + # the domain does not have data axes defined + # + # => infer the axes + # -------------------------------------------------------- + data_shape = data.shape + if len(data_shape) != len(set(data_shape)): + raise ValueError( + f"Can't insert data: Ambiguous data shape: {data_shape}. " + "Consider setting the axes parameter." + ) - return False + if not domain_axes: + raise ValueError("Can't set data: No domain axes exist") - @_inplace_enabled(default=False) - def moving_window( - self, - method, - window_size=None, - axis=None, - weights=None, - mode=None, - cval=None, - origin=0, - scale=None, - radius="earth", - great_circle=False, - inplace=False, - ): - """Perform moving window calculations along an axis. + axes = [] + for n in data_shape: + da_key = f.domain_axis( + filter_by_size=(n,), key=True, default=None + ) + if da_key is None: + raise ValueError( + "Can't insert data: Ambiguous data shape: " + f"{data_shape}. Consider setting the axes parameter." + ) - Moving mean, sum, and integral calculations are possible. + axes.append(da_key) - By default moving means are unweighted, but weights based on - the axis cell sizes (or custom weights) may applied to the - calculation via the *weights* parameter. + else: + # -------------------------------------------------------- + # The data is not scalar and axes have not been set, but + # there are data axes defined on the field. + # -------------------------------------------------------- + axes = f.get_data_axes() + if len(axes) != data.ndim: + raise ValueError( + f"Wrong number of axes for data array: {axes!r}" + ) - By default moving integrals must be weighted. + for axis, size in zip(axes, data.shape): + if domain_axes[axis].get_size(None) != size: + raise ValueError( + "Can't insert data: Incompatible size for axis " + f"{axis!r}: {size}" + ) - When appropriate, a new cell method construct is created to - describe the calculation. + if not data.Units: + units = getattr(f, "Units", None) + if units is not None: + if copy: + copy = False + data = data.override_units(units, inplace=False) + else: + data.override_units(units, inplace=True) - .. note:: The `moving_window` method can not, in general, be - emulated by the `convolution_filter` method, as the - latter i) can not change the window weights as the - filter passes through the axis; and ii) does not - update the cell method constructs. + super(cfdm.Field, f).set_data(data, axes=axes, copy=copy, inplace=True) - .. versionadded:: 3.3.0 + # Apply cyclic axes + if axes: + cyclic = self._cyclic + if cyclic: + cyclic_axes = [ + axes.index(axis) for axis in cyclic if axis in axes + ] + if cyclic_axes: + data.cyclic(cyclic_axes, True) - .. seealso:: `bin`, `collapse`, `convolution_filter`, `radius`, - `weights` + return f - :Parameters: + def domain_mask(self, **kwargs): + """Return a boolean field that is True where criteria are met. - method: `str` - Define the moving window method. The method is given - by one of the following strings (see - https://ncas-cms.github.io/cf-python/analysis.html#collapse-methods - for precise definitions): + .. versionadded:: 1.1 - ================== ============================ ======== - *method* Description Weighted - ================== ============================ ======== - ``'sum'`` The sum of the values. Never + .. seealso:: `indices`, `mask`, `subspace` - ``'mean'`` The weighted or unweighted May be - mean of the values. + :Parameters: - ``'integral'`` The integral of values. Always - ================== ============================ ======== + kwargs: optional + A dictionary of keyword arguments to pass to the `indices` + method to define the criteria to meet for a element to be + set as `True`. - * Methods that are "Never" weighted ignore the - *weights* parameter, even if it is set. + :Returns: - * Methods that "May be" weighted will only be weighted - if the *weights* parameter is set. + `Field` + The domain mask. - * Methods that are "Always" weighted require the - *weights* parameter to be set. + **Examples:** - window_size: `int` - Specify the size of the window used to calculate the - moving window. + Create a domain mask which is masked at all between between -30 + and 30 degrees of latitude: - *Parameter example:* - A 5-point moving window is set with - ``window_size=5``. + >>> m = f.domain_mask(latitude=cf.wi(-30, 30)) - axis: `str` or `int` - Select the domain axis over which the filter is to be - applied, defined by that which would be selected by - passing the given axis description to a call of the - field construct's `domain_axis` method. For example, - for a value of ``'X'``, the domain axis construct - returned by ``f.domain_axis('X')`` is selected. + """ + mask = self.copy() - weights: optional - Specify the weights for the moving window. The weights - are, those that would be returned by this call of the - field construct's `weights` method: - ``f.weights(weights, axes=axis, radius=radius, - great_circle=great_circle, data=True)``. See the - *axis*, *radius* and *great_circle* parameters and - `cf.Field.weights` for details. + mask.clear_properties() + mask.nc_del_variable(None) - .. note:: By default *weights* is `None`, resulting in - **unweighted calculations**. + for key in self.constructs.filter_by_type( + "cell_method", "field_ancillary", todict=True + ): + mask.del_construct(key) - .. note:: Setting *weights* to `True` is generally a - good way to ensure that the moving window - calculations are appropriately weighted - according to the field construct's - metadata. In this case, if it is not - possible to create weights for the selected - *axis* then an exception will be raised. + false_everywhere = Data.zeros(self.shape, dtype=bool) - *Parameter example:* - To specify weights on the cell sizes of the selected - axis: ``weights=True``. + mask.set_data(false_everywhere, axes=self.get_data_axes(), copy=False) - mode: `str`, optional - The *mode* parameter determines how the input array is - extended when the filter overlaps an array border. The - default value is ``'constant'`` or, if the dimension - being convolved is cyclic (as ascertained by the - `iscyclic` method), ``'wrap'``. The valid values and - their behaviours are as follows: + mask.subspace[mask.indices(**kwargs)] = True - ============== ========================== =========================== - *mode* Description Behaviour - ============== ========================== =========================== - ``'reflect'`` The input is extended by ``(c b a | a b c | c b a)`` - reflecting about the edge + mask.long_name = "domain mask" - ``'constant'`` The input is extended by ``(k k k | a b c | k k k)`` - filling all values beyond - the edge with the same - constant value (``k``), - defined by the *cval* - parameter. + return mask - ``'nearest'`` The input is extended by ``(a a a | a b c | c c c)`` - replicating the last point + @_inplace_enabled(default=False) + @_manage_log_level_via_verbosity + def compute_vertical_coordinates( + self, default_to_zero=True, strict=True, inplace=False, verbose=None + ): + """Compute non-parametric vertical coordinates. - ``'mirror'`` The input is extended by ``(c b | a b c | b a)`` - reflecting about the - centre of the last point. + When vertical coordinates are a function of horizontal location as + well as parameters which depend on vertical location, they cannot + be stored in a vertical dimension coordinate construct. In such + cases a parametric vertical dimension coordinate construct is + stored and a coordinate reference construct contains the formula + for computing the required non-parametric vertical coordinates. - ``'wrap'`` The input is extended by ``(a b c | a b c | a b c)`` - wrapping around to the - opposite edge. - ============== ========================== =========================== + {{formula terms links}} - The position of the window relative to each value can - be changed by using the *origin* parameter. + For example, multi-dimensional non-parametric parametric ocean + altitude coordinates can be computed from one-dimensional + parametric ocean sigma coordinates. - cval: scalar, optional - Value to fill past the edges of the array if *mode* is - ``'constant'``. Ignored for other modes. Defaults to - `None`, in which case the edges of the array will be - filled with missing data. The only other valid value - is ``0``. + Coordinate reference systems based on parametric vertical + coordinates are identified from the coordinate reference + constructs and, if possible, the corresponding non-parametric + vertical coordinates are computed and stored in a new auxiliary + coordinate construct. - *Parameter example:* - To extend the input by filling all values beyond - the edge with zero: ``cval=0`` + If there are no appropriate coordinate reference constructs then + the field construct is unchanged. - origin: `int`, optional - Controls the placement of the filter. Defaults to 0, - which is the centre of the window. If the window size, - defined by the *window_size* parameter, is even then - then a value of 0 defines the index defined by - ``window_size/2 -1``. + .. versionadded:: 3.8.0 - *Parameter example:* - For a window size of 5, if ``origin=0`` then the - window is centred on each point. If ``origin=-2`` - then the window is shifted to include the previous - four points. If ``origin=1`` then the window is - shifted to include the previous point and the and - the next three points. + .. seealso:: `CoordinateReference` - radius: optional - Specify the radius used for calculating the areas of - cells defined in spherical polar coordinates. The - radius is that which would be returned by this call of - the field construct's `~cf.Field.radius` method: - ``f.radius(radius)``. See the `cf.Field.radius` for - details. + :Parameters: - By default *radius* is ``'earth'`` which means that if - and only if the radius can not found from the datums - of any coordinate reference constucts, then the - default radius taken as 6371229 metres. + {{default_to_zero: `bool`, optional}} - great_circle: `bool`, optional - If True then allow, if required, the derivation of i) - area weights from polygon geometry cells by assuming - that each cell part is a spherical polygon composed of - great circle segments; and ii) and the derivation of - line-length weights from line geometry cells by - assuming that each line part is composed of great - circle segments. + strict: `bool` + If False then allow the computation to occur when - scale: number, optional - If set to a positive number then scale the weights so - that they are less than or equal to that number. By - default the weights are scaled to lie between 0 and 1 - (i.e. *scale* is 1). + * A domain ancillary construct has no standard name, but + the corresponding term has a standard name that is + prescribed - Ignored if the moving window method is not - weighted. The *scale* parameter can not be set for - moving integrals. + * When the computed standard name can not be found by + inference from the standard names of the domain + ancillary constructs, nor from the + ``computed_standard_name`` parameter of the relevant + coordinate reference construct. - *Parameter example:* - To scale all weights so that they lie between 0 and - 0.5: ``scale=0.5``. + By default an exception is raised in these cases. + + If a domain ancillary construct does have a standard name, + but one that is inconsistent with any prescribed standard + names, then an exception is raised regardless of the value + of *strict*. {{inplace: `bool`, optional}} + {{verbose: `int` or `str` or `None`, optional}} + :Returns: `Field` or `None` - The field construct of moving window values, or `None` - if the operation was in-place. + The field construct with the new non-parametric vertical + coordinates, or `None` if the operation was in-place. - **Examples:** + **Examples** - >>> f = cf.example_field(0) + >>> f = cf.example_field(1) >>> print(f) - Field: specific_humidity (ncvar%q) - ---------------------------------- - Data : specific_humidity(latitude(5), longitude(8)) 1 - Cell methods : area: mean - Dimension coords: latitude(5) = [-75.0, ..., 75.0] degrees_north - : longitude(8) = [22.5, ..., 337.5] degrees_east - : time(1) = [2019-01-01 00:00:00] - >>> print(f.array) - [[0.007 0.034 0.003 0.014 0.018 0.037 0.024 0.029] - [0.023 0.036 0.045 0.062 0.046 0.073 0.006 0.066] - [0.11 0.131 0.124 0.146 0.087 0.103 0.057 0.011] - [0.029 0.059 0.039 0.07 0.058 0.072 0.009 0.017] - [0.006 0.036 0.019 0.035 0.018 0.037 0.034 0.013]] - >>> print(f.coordinate('X').bounds.array) - [[ 0. 45.] - [ 45. 90.] - [ 90. 135.] - [135. 180.] - [180. 225.] - [225. 270.] - [270. 315.] - [315. 360.]] - >>> f.iscyclic('X') - True - >>> f.iscyclic('Y') - False - - Create a weighted 3-point running mean for the cyclic 'X' - axis: - - >>> g = f.moving_window('mean', 3, axis='X', weights=True) - >>> print(g) - Field: specific_humidity (ncvar%q) - ---------------------------------- - Data : specific_humidity(latitude(5), longitude(8)) 1 - Cell methods : area: mean longitude(8): mean - Dimension coords: latitude(5) = [-75.0, ..., 75.0] degrees_north - : longitude(8) = [22.5, ..., 337.5] degrees_east + Field: air_temperature (ncvar%ta) + --------------------------------- + Data : air_temperature(atmosphere_hybrid_height_coordinate(1), grid_latitude(10), grid_longitude(9)) K + Cell methods : grid_latitude(10): grid_longitude(9): mean where land (interval: 0.1 degrees) time(1): maximum + Field ancils : air_temperature standard_error(grid_latitude(10), grid_longitude(9)) = [[0.76, ..., 0.32]] K + Dimension coords: atmosphere_hybrid_height_coordinate(1) = [1.5] + : grid_latitude(10) = [2.2, ..., -1.76] degrees + : grid_longitude(9) = [-4.7, ..., -1.18] degrees : time(1) = [2019-01-01 00:00:00] - >>> print(g.array) - [[0.02333 0.01467 0.017 0.01167 0.023 0.02633 0.03 0.02 ] - [0.04167 0.03467 0.04767 0.051 0.06033 0.04167 0.04833 0.03167] - [0.084 0.12167 0.13367 0.119 0.112 0.08233 0.057 0.05933] - [0.035 0.04233 0.056 0.05567 0.06667 0.04633 0.03267 0.01833] - [0.01833 0.02033 0.03 0.024 0.03 0.02967 0.028 0.01767]] - >>> print(g.coordinate('X').bounds.array) - [[-45. 90.] - [ 0. 135.] - [ 45. 180.] - [ 90. 225.] - [135. 270.] - [180. 315.] - [225. 360.] - [270. 405.]] - - Create an unweighted 3-point running mean for the cyclic 'X' - axis: + Auxiliary coords: latitude(grid_latitude(10), grid_longitude(9)) = [[53.941, ..., 50.225]] degrees_N + : longitude(grid_longitude(9), grid_latitude(10)) = [[2.004, ..., 8.156]] degrees_E + : long_name=Grid latitude name(grid_latitude(10)) = [--, ..., b'kappa'] + Cell measures : measure:area(grid_longitude(9), grid_latitude(10)) = [[2391.9657, ..., 2392.6009]] km2 + Coord references: grid_mapping_name:rotated_latitude_longitude + : standard_name:atmosphere_hybrid_height_coordinate + Domain ancils : ncvar%a(atmosphere_hybrid_height_coordinate(1)) = [10.0] m + : ncvar%b(atmosphere_hybrid_height_coordinate(1)) = [20.0] + : surface_altitude(grid_latitude(10), grid_longitude(9)) = [[0.0, ..., 270.0]] m + >>> print(f.auxiliary_coordinate('altitude', default=None)) + None + >>> g = f.compute_vertical_coordinates() + >>> print(g.auxiliary_coordinates) + Constructs: + {'auxiliarycoordinate0': , + 'auxiliarycoordinate1': , + 'auxiliarycoordinate2': , + 'auxiliarycoordinate3': } + >>> g.auxiliary_coordinate('altitude').dump() + Auxiliary coordinate: altitude + long_name = 'Computed from parametric atmosphere_hybrid_height_coordinate + vertical coordinates' + standard_name = 'altitude' + units = 'm' + Data(1, 10, 9) = [[[10.0, ..., 5410.0]]] m + Bounds:units = 'm' + Bounds:Data(1, 10, 9, 2) = [[[[5.0, ..., 5415.0]]]] m - >>> g = f.moving_window('mean', 3, axis='X') + """ + f = _inplace_enabled_define_and_cleanup(self) - Create an weighted 4-point running integral for the non-cyclic - 'Y' axis: + for cr in f.coordinate_references(todict=True).values(): + # -------------------------------------------------------- + # Compute the non-parametric vertical coordinates, if + # possible. + # -------------------------------------------------------- + ( + standard_name, + computed_standard_name, + computed, + computed_axes, + k_axis, + ) = FormulaTerms.formula(f, cr, default_to_zero, strict) - >>> g = f.moving_window('integral', 4, axis='Y', weights=True) - >>> g.Units - - >>> print(g.array) - [[ -- -- -- -- -- -- -- --] - [ 8.37 11.73 10.05 13.14 8.88 11.64 4.59 4.02] - [ 8.34 11.79 10.53 13.77 8.88 11.64 4.89 3.54] - [ -- -- -- -- -- -- -- --] - [ -- -- -- -- -- -- -- --]] - >>> print(g.coordinate('Y').bounds.array) - [[-90. 30.] - [-90. 60.] - [-60. 90.] - [-30. 90.] - [ 30. 90.]] - >>> g = f.moving_window('integral', 4, axis='Y', weights=True, cval=0) - >>> print(g.array) - [[ 7.5 9.96 8.88 11.04 7.14 9.48 4.32 3.51] - [ 8.37 11.73 10.05 13.14 8.88 11.64 4.59 4.02] - [ 8.34 11.79 10.53 13.77 8.88 11.64 4.89 3.54] - [ 7.65 10.71 9.18 11.91 7.5 9.45 4.71 1.56] - [ 1.05 2.85 1.74 3.15 2.28 3.27 1.29 0.9 ]] + if computed is None: + # No non-parametric vertical coordinates were + # computed + continue - """ - method_values = ("mean", "sum", "integral") - if method not in method_values: - raise ValueError( - f"Non-valid 'method' parameter value: {method!r}. " - f"Expected one of {method_values!r}" + # -------------------------------------------------------- + # Convert the computed domain ancillary construct to an + # auxiliary coordinate construct, and insert it into the + # field construct. + # -------------------------------------------------------- + c = f._AuxiliaryCoordinate(source=computed, copy=False) + c.clear_properties() + c.long_name = ( + "Computed from parametric {} " + "vertical coordinates".format(standard_name) ) + if computed_standard_name: + c.standard_name = computed_standard_name - if cval is not None and cval != 0: - raise ValueError("The cval parameter must be None or 0") + logger.detail( + "Non-parametric coordinates:\n{}".format( + c.dump(display=False, _level=1) + ) + ) # pragma: no cover - window_size = int(window_size) + key = f.set_construct(c, axes=computed_axes, copy=False) - # Construct new field - f = _inplace_enabled_define_and_cleanup(self) + # Reference the new coordinates from the coordinate + # reference construct + cr.set_coordinate(key) - # Find the axis for the moving window - axis = f.domain_axis(axis, key=True) - iaxis = self.get_data_axes().index(axis) + logger.debug( + "Non-parametric coordinates construct key: {!r}\n" + "Updated coordinate reference construct:\n{}".format( + key, cr.dump(display=False, _level=1) + ) + ) # pragma: no cover - if method == "sum" or weights is False: - weights = None + return f - if method == "integral": - measure = True - if weights is None: - raise ValueError( - "Must set weights parameter for 'integral' method" - ) + def match_by_construct(self, *identities, OR=False, **conditions): + """Whether or not there are particular metadata constructs. - if scale is not None: - raise ValueError( - "Can't set the 'scale' parameter for moving integrals" - ) - else: - if scale is None: - scale = 1.0 + .. note:: The API changed at version 3.1.0 - measure = False + .. versionadded:: 3.0.0 - if weights is not None: - if isinstance(weights, Data): - if weights.ndim > 1: - raise ValueError( - f"The input weights (shape {weights.shape}) do not " - f"match the selected axis (size {f.shape[iaxis]})" - ) + .. seealso:: `match`, `match_by_property`, `match_by_rank`, + `match_by_identity`, `match_by_ncvar`, + `match_by_units` - if weights.ndim == 1: - if weights.shape[0] != f.shape[iaxis]: - raise ValueError( - f"The input weights (size {weights.size}) do not " - f"match the selected axis (size {f.shape[iaxis]})" - ) + :Parameters: - # Get the data weights - w = f.weights( - weights, - axes=axis, - measure=measure, - scale=scale, - radius=radius, - great_circle=great_circle, - data=True, - ) + identities: optional + Identify the metadata constructs that have any of the + given identities or construct keys. - # Multiply the field by the (possibly adjusted) weights - if numpy_can_cast(w.dtype, f.dtype): - f *= w - else: - f = f * w + A construct identity is specified by a string + (e.g. ``'latitude'``, ``'long_name=time'``, + ``'ncvar%lat'``, etc.); or a compiled regular expression + (e.g. ``re.compile('^atmosphere')``) that selects the + relevant constructs whose identities match via + `re.search`. - # Create the window weights - window = numpy_full((window_size,), 1.0) - if weights is None and method == "mean": - # If there is no data weighting, make sure that the sum of - # the window weights is 1. - window /= window.size + Each construct has a number of identities, and is selected + if any of them match any of those provided. A construct's + identities are those returned by its `!identities` + method. In the following example, the construct ``x`` has + six identities: - f.convolution_filter( - window, - axis=axis, - mode=mode, - cval=cval, - origin=origin, - update_bounds=True, - inplace=True, - ) + >>> x.identities() + ['time', + 'long_name=Time', + 'foo=bar', + 'standard_name=time', + 'ncvar%t', + 'T'] - if weights is not None and method == "mean": - # Divide the field by the running sum of the adjusted data - # weights - w.convolution_filter( - window=window, - axis=iaxis, - mode=mode, - cval=0, - origin=origin, - inplace=True, - ) - if numpy_can_cast(w.dtype, f.dtype): - f /= w - else: - f = f / w + A construct key may optionally have the ``'key%'`` + prefix. For example ``'dimensioncoordinate2'`` and + ``'key%dimensioncoordinate2'`` are both acceptable keys. - # Add a cell method - if f.domain_axis(axis).get_size() > 1 or method == "integral": - f._update_cell_methods( - method=method, domain_axes=f.domain_axes(axis, todict=True) - ) + Note that in the output of a `print` call or `!dump` + method, a construct is always described by one of its + identities, and so this description may always be used as + an *identity* argument. - return f + If a cell method construct identity is given (such as + ``'method:mean'``) then it will only be compared with the + most recently applied cell method operation. - @_deprecated_kwarg_check("i") - @_inplace_enabled(default=False) - def convolution_filter( - self, - window=None, - axis=None, - mode=None, - cval=None, - origin=0, - update_bounds=True, - inplace=False, - weights=None, - i=False, - ): - """Convolve the field construct along the given axis with the - specified filter. + Alternatively, one or more cell method constucts may be + identified in a single string with a CF-netCDF cell + methods-like syntax for describing both the collapse + dimensions, the collapse method, and any cell method + construct qualifiers. If N cell methods are described in + this way then they will collectively identify the N most + recently applied cell method operations. For example, + ``'T: maximum within years T: mean over years'`` will be + compared with the most two most recently applied cell + method operations. - The magnitude of the integral of the filter (i.e. the sum of - the window weights defined by the *window* parameter) affects - the convolved values. For example, window weights of ``[0.2, - 0.2 0.2, 0.2, 0.2]`` will produce a non-weighted 5-point - running mean; and window weights of ``[1, 1, 1, 1, 1]`` will - produce a 5-point running sum. Note that the window weights - returned by functions of the `scipy.signal.windows` package do - not necessarily sum to 1 (see the examples for details). + *Parameter example:* + ``'measure:area'`` + + *Parameter example:* + ``'latitude'`` + + *Parameter example:* + ``'long_name=Longitude'`` + + *Parameter example:* + ``'domainancillary2', 'ncvar%areacello'`` + + conditions: optional + Identify the metadata constructs that have any of the + given identities or construct keys, and whose data satisfy + conditions. + + A construct identity or construct key (as defined by the + *identities* parameter) is given as a keyword name and a + condition on its data is given as the keyword value. - .. note:: The `moving_window` method can not, in general, be - emulated by the `convolution_filter` method, as the - latter i) can not change the window weights as the - filter passes through the axis; and ii) does not - update the cell method constructs. + The condition is satisfied if any of its data values + equals the value provided. - .. seealso:: `collapse`, `derivative`, `moving_window`, - `cf.relative_vorticity` + *Parameter example:* + ``longitude=180.0`` - :Parameters: + *Parameter example:* + ``time=cf.dt('1959-12-16')`` - window: sequence of numbers - Specify the window weights to use for the filter. + *Parameter example:* + ``latitude=cf.ge(0)`` *Parameter example:* - An unweighted 5-point moving average can be computed - with ``window=[0.2, 0.2, 0.2, 0.2, 0.2]`` + ``latitude=cf.ge(0), air_pressure=500`` - Note that the `scipy.signal.windows` package has suite - of window functions for creating window weights for - filtering (see the examples for details). + *Parameter example:* + ``**{'latitude': cf.ge(0), 'long_name=soil_level': 4}`` - .. versionadded:: 3.3.0 (replaces the old weights - parameter) + OR: `bool`, optional + If True then return `True` if at least one metadata + construct matches at least one of the criteria given by + the *identities* or *conditions* arguments. By default + `True` is only returned if the field constructs matches + each of the given criteria. - axis: - Select the domain axis over which the filter is to be - applied, defined by that which would be selected by - passing the given axis description to a call of the field - construct's `domain_axis` method. For example, for a value - of ``'X'``, the domain axis construct returned by - ``f.domain_axis('X')`` is selected. + mode: deprecated at version 3.1.0 + Use the *OR* parameter instead. - mode: `str`, optional - The *mode* parameter determines how the input array is - extended when the filter overlaps an array border. The - default value is ``'constant'`` or, if the dimension being - convolved is cyclic (as ascertained by the `iscyclic` - method), ``'wrap'``. The valid values and their behaviours - are as follows: + constructs: deprecated at version 3.1.0 - ============== ========================== =========================== - *mode* Description Behaviour - ============== ========================== =========================== - ``'reflect'`` The input is extended by ``(c b a | a b c | c b a)`` - reflecting about the edge + :Returns: - ``'constant'`` The input is extended by ``(k k k | a b c | k k k)`` - filling all values beyond - the edge with the same - constant value (``k``), - defined by the *cval* - parameter. + `bool` + Whether or not the field construct contains the specified + metadata constructs. - ``'nearest'`` The input is extended by ``(a a a | a b c | d d d)`` - replicating the last point + **Examples:** - ``'mirror'`` The input is extended by ``(c b | a b c | b a)`` - reflecting about the - centre of the last point. + TODO - ``'wrap'`` The input is extended by ``(a b c | a b c | a b c)`` - wrapping around to the - opposite edge. - ============== ========================== =========================== + """ + if identities: + if identities[0] == "or": + _DEPRECATION_ERROR_ARG( + self, + "match_by_construct", + "or", + message="Use 'OR=True' instead.", + version="3.1.0", + ) # pragma: no cover - The position of the window relative to each value can be - changed by using the *origin* parameter. + if identities[0] == "and": + _DEPRECATION_ERROR_ARG( + self, + "match_by_construct", + "and", + message="Use 'OR=False' instead.", + version="3.1.0", + ) # pragma: no cover - cval: scalar, optional - Value to fill past the edges of the array if *mode* is - ``'constant'``. Ignored for other modes. Defaults to - `None`, in which case the edges of the array will be - filled with missing data. + if not identities and not conditions: + return True - *Parameter example:* - To extend the input by filling all values beyond the - edge with zero: ``cval=0`` + constructs = self.constructs - origin: `int`, optional - Controls the placement of the filter. Defaults to 0, which - is the centre of the window. If the window has an even - number of weights then then a value of 0 defines the index - defined by ``width/2 -1``. + if not constructs: + return False - *Parameter example:* - For a weighted moving average computed with a weights - window of ``[0.1, 0.15, 0.5, 0.15, 0.1]``, if - ``origin=0`` then the average is centred on each - point. If ``origin=-2`` then the average is shifted to - include the previous four points. If ``origin=1`` then - the average is shifted to include the previous point and - the and the next three points. + n = 0 - update_bounds: `bool`, optional - If False then the bounds of a dimension coordinate - construct that spans the convolved axis are not - altered. By default, the bounds of a dimension coordinate - construct that spans the convolved axis are updated to - reflect the width and origin of the window. + self_cell_methods = self.cell_methods() # TODO - {{inplace: `bool`, optional}} + for identity in identities: + cms = False + try: + cms = ": " in identity + except TypeError: + cms = False - {{i: deprecated at version 3.0.0}} + if cms: + cms = CellMethod.create(identity) + for cm in cms: + axes = [ + self.domain_axis(axis, key=True, default=axis) + for axis in cm.get_axes(()) + ] + if axes: + cm.set_axes(axes) - weights: deprecated at version 3.3.0 - Use the *window* parameter instead. + if not cms: + filtered = constructs(identity) + if filtered: + # Check for cell methods + if set(filtered.construct_types().values()) == { + "cell_method" + }: + key = tuple(self_cell_methods.ordered())[-1] + filtered = self_cell_methods(key)(identity) # TODO + if not filtered: + if not OR: + return False - :Returns: + n -= 1 - `Field` or `None` - The convolved field construct, or `None` if the operation - was in-place. + n += 1 + elif not OR: + return False + else: + cell_methods = tuple(self_cell_methods.ordered().values())[ + -len(cms) : + ] + for cm0, cm1 in zip(cms, cell_methods): + if cm0.has_axes() and set(cm0.get_axes()) != set( + cm1.get_axes(()) + ): + if not OR: + return False - **Examples:** + n -= 1 + break - >>> f = cf.example_field(2) - >>> print(f) - Field: air_potential_temperature (ncvar%air_potential_temperature) - ------------------------------------------------------------------ - Data : air_potential_temperature(time(36), latitude(5), longitude(8)) K - Cell methods : area: mean - Dimension coords: time(36) = [1959-12-16 12:00:00, ..., 1962-11-16 00:00:00] - : latitude(5) = [-75.0, ..., 75.0] degrees_north - : longitude(8) = [22.5, ..., 337.5] degrees_east - : air_pressure(1) = [850.0] hPa - >>> print(f.array[:, 0, 0]) - [210.7 305.3 249.4 288.9 231.1 200. 234.4 289.2 204.3 203.6 261.8 256.2 - 212.3 231.7 255.1 213.9 255.8 301.2 213.3 200.1 204.6 203.2 244.6 238.4 - 304.5 269.8 267.9 282.4 215. 288.7 217.3 307.1 299.3 215.9 290.2 239.9] - >>> print(f.coordinate('T').bounds.dtarray[0]) - [cftime.DatetimeGregorian(1959-12-01 00:00:00) - cftime.DatetimeGregorian(1960-01-01 00:00:00)] - >>> print(f.coordinate('T').bounds.dtarray[2]) - [cftime.DatetimeGregorian(1960-02-01 00:00:00) - cftime.DatetimeGregorian(1960-03-01 00:00:00)] + if cm0.has_method() and ( + cm0.get_method() != cm1.get_method(None) + ): + if not OR: + return False - Create a 5-point (non-weighted) running mean: + n -= 1 + break - >>> g = f.convolution_filter([0.2, 0.2, 0.2, 0.2, 0.2], 'T') - >>> print(g) - Field: air_potential_temperature (ncvar%air_potential_temperature) - ------------------------------------------------------------------ - Data : air_potential_temperature(time(36), latitude(5), longitude(8)) K - Cell methods : area: mean - Dimension coords: time(36) = [1959-12-16 12:00:00, ..., 1962-11-16 00:00:00] - : latitude(5) = [-75.0, ..., 75.0] degrees_north - : longitude(8) = [22.5, ..., 337.5] degrees_east - : air_pressure(1) = [850.0] hPa - >>> print(g.array[:, 0, 0]) - [ -- -- 257.08 254.94 240.76 248.72 231.8 226.3 238.66 243.02 227.64 - 233.12 243.42 233.84 233.76 251.54 247.86 236.86 235.0 224.48 213.16 - 218.18 239.06 252.1 265.04 272.6 267.92 264.76 254.26 262.1 265.48 - 265.66 265.96 270.48 -- --] - >>> print(g.coordinate('T').bounds.dtarray[0]) - [cftime.DatetimeGregorian(1959-12-01 00:00:00) - cftime.DatetimeGregorian(1960-03-01 00:00:00)] - >>> print(g.coordinate('T').bounds.dtarray[2]) - [cftime.DatetimeGregorian(1959-12-01 00:00:00) - cftime.DatetimeGregorian(1960-05-01 00:00:00)] + ok = True + for key, value in cm0.qualifiers(): + if value != cm1.get_qualifier(key, None): + if not OR: + return False + + ok = False + break + + if not ok: + n -= 1 + break + + n += 1 + + if conditions: + for identity, value in conditions.items(): + if self.subspace("test", **{identity: value}): + n += 1 + elif not OR: + return False + + if OR: + return bool(n) - Create a 5-point running sum: + return True - >>> g = f.convolution_filter([1, 1, 1, 1, 1], 'T') - >>> print(g) - Field: air_potential_temperature (ncvar%air_potential_temperature) - ------------------------------------------------------------------ - Data : air_potential_temperature(time(36), latitude(5), longitude(8)) K - Cell methods : area: mean - Dimension coords: time(36) = [1959-12-16 12:00:00, ..., 1962-11-16 00:00:00] - : latitude(5) = [-75.0, ..., 75.0] degrees_north - : longitude(8) = [22.5, ..., 337.5] degrees_east - : air_pressure(1) = [850.0] hPa - >>> print(g.array[:, 0, 0]) - [ -- -- 1285.4 1274.7 1203.8 1243.6 1159.0 1131.5 1193.3 1215.1 - 1138.2 1165.6 1217.1 1169.2 1168.8 1257.7 1239.3 1184.3 1175.0 - 1122.4 1065.8 1090.9 1195.3 1260.5 1325.2 1363.0 1339.6 1323.8 - 1271.3 1310.5 1327.4 1328.3 1329.8 1352.4 -- --] - >>> print(g.coordinate('T').bounds.dtarray[0]) - [cftime.DatetimeGregorian(1959-12-01 00:00:00) - cftime.DatetimeGregorian(1960-03-01 00:00:00)] - >>> print(g.coordinate('T').bounds.dtarray[2]) - [cftime.DatetimeGregorian(1959-12-01 00:00:00) - cftime.DatetimeGregorian(1960-05-01 00:00:00)] + def match_by_rank(self, *ranks): + """Whether or not the number of domain axis constructs satisfies + conditions. - Calculate a convolution along the time axis with Gaussian window - weights, using the "nearest" mode at the border of the edges of - the time dimension (note that the window weights returned by - `scipy.signal.windows` functions do not necessarily sum to 1): + .. versionadded:: 3.0.0 - >>> import scipy.signal.windows - >>> gaussian_window = scipy.signal.windows.gaussian(3, std=0.4) - >>> print(gaussian_window) - [0.04393693 1. 0.04393693] - >>> g = f.convolution_filter(gaussian_window, 'T', mode='nearest') - >>> print(g.array[:, 0, 0]) - [233.37145775 325.51538316 275.50732596 310.01169661 252.58076685 - 220.4526426 255.89394793 308.47513278 225.95212089 224.07900476 - 282.00220208 277.03050023 233.73682991 252.23612278 274.67829762 - 236.34737939 278.43191451 321.81081556 235.32558483 218.46124456 - 222.31976533 222.93647058 264.00254989 262.52577025 326.82874967 - 294.94950081 292.16197475 303.61714525 240.09238279 307.69393641 - 243.47762505 329.79781991 322.27901629 241.80082237 310.22645435 - 263.19096851] - >>> print(g.coordinate('T').bounds.dtarray[0]) - [cftime.DatetimeGregorian(1959-12-01 00:00:00) - cftime.DatetimeGregorian(1960-02-01 00:00:00)] - >>> print(g.coordinate('T').bounds.dtarray[1]) - [cftime.DatetimeGregorian(1959-12-01 00:00:00) - cftime.DatetimeGregorian(1960-03-01 00:00:00)] + .. seealso:: `match`, `match_by_property`, `match_by_identity`, + `match_by_ncvar`, `match_by_units`, + `match_by_construct` - """ - if weights is not None: - _DEPRECATION_ERROR_KWARGS( - self, - "convolution_filter", - {"weights": weights}, - message="Use keyword 'window' instead.", - version="3.3.0", - ) # pragma: no cover + :Parameters: - if isinstance(window, str): - _DEPRECATION_ERROR( - "A string-valued 'window' parameter has been deprecated " - "at version 3.0.0 and is no longer available. Provide a " - "sequence of numerical window weights instead. " - "scipy.signal.windows may be used to generate particular " - "window functions." - ) # pragma: no cover + ranks: optional + Define conditions on the number of domain axis constructs. - if isinstance(window[0], str): - _DEPRECATION_ERROR( - "A string-valued 'window' parameter element has been " - "deprecated at version 3.0.0 and is no longer available. " - "Provide a sequence of numerical window weights instead. " - "scipy.signal.windows may be used to generate particular " - "window functions." - ) # pragma: no cover + A condition is one of: - # Retrieve the axis - axis_key = self.domain_axis(axis, key=True) - iaxis = self.get_data_axes().index(axis_key) + * `int` + * a `Query` object - # Default mode to 'wrap' if the axis is cyclic - if mode is None: - if self.iscyclic(axis_key): - mode = "wrap" - else: - mode = "constant" + The condition is satisfied if the number of domain axis + constructs equals the condition value. - # Construct new field - f = _inplace_enabled_define_and_cleanup(self) + *Parameter example:* + To see if the field construct has 4 domain axis + constructs: ``4`` - f.data.convolution_filter( - window=window, - axis=iaxis, - mode=mode, - cval=cval, - origin=origin, - inplace=True, - ) + *Parameter example:* + To see if the field construct has at least 3 domain axis + constructs: ``cf.ge(3)`` - # Update the bounds of the convolution axis if necessary - if update_bounds: - coord = f.dimension_coordinate( - filter_by_axis=(axis_key,), default=None - ) - if coord is not None and coord.has_bounds(): - old_bounds = coord.bounds.array - length = old_bounds.shape[0] - new_bounds = numpy_empty((length, 2)) - len_weights = len(window) - lower_offset = len_weights // 2 + origin - upper_offset = len_weights - 1 - lower_offset - if mode == "wrap": - if coord.direction(): - new_bounds[:, 0] = coord.roll( - 0, upper_offset - ).bounds.array[:, 0] - new_bounds[:, 1] = ( - coord.roll(0, -lower_offset).bounds.array[:, 1] - + coord.period() - ) - else: - new_bounds[:, 0] = ( - coord.roll(0, upper_offset).bounds.array[:, 0] - + 2 * coord.period() - ) - new_bounds[:, 1] = ( - coord.roll(0, -lower_offset).bounds.array[:, 1] - + coord.period() - ) - else: - new_bounds[upper_offset:length, 0] = old_bounds[ - 0 : length - upper_offset, 0 - ] - new_bounds[0:upper_offset, 0] = old_bounds[0, 0] - new_bounds[0 : length - lower_offset, 1] = old_bounds[ - lower_offset:length, 1 - ] - new_bounds[length - lower_offset : length, 1] = old_bounds[ - length - 1, 1 - ] + :Returns: - coord.set_bounds( - self._Bounds(data=Data(new_bounds, units=coord.Units)) - ) + `bool` + Whether or not at least one of the conditions are met. - return f + **Examples:** - def convert(self, identity, full_domain=True, cellsize=False): - """Convert a metadata construct into a new field construct. + >>> f.match_by_rank(3, 4) - The new field construct has the properties and data of the - metadata construct, and domain axis constructs corresponding to - the data. By default it also contains other metadata constructs - (such as dimension coordinate and coordinate reference constructs) - that define its domain. + >>> f.match_by_rank(cf.wi(2, 4)) - The `cf.read` function allows a field construct to be derived - directly from a netCDF variable that corresponds to a metadata - construct. In this case, the new field construct will have a - domain limited to that which can be inferred from the - corresponding netCDF variable - typically only domain axis and - dimension coordinate constructs. This will usually result in a - different field construct to that created with the convert method. + >>> f.match_by_rank(1, cf.gt(3)) - .. versionadded:: 3.0.0 + """ + if not ranks: + return True - .. seealso:: `cf.read` + n_domain_axes = len(self.domain_axes(todict=True)) + for rank in ranks: + ok = rank == n_domain_axes + if ok: + return True - :Parameters: + return False - identity: - Select the metadata construct by one of: + @_inplace_enabled(default=False) + def moving_window( + self, + method, + window_size=None, + axis=None, + weights=None, + mode=None, + cval=None, + origin=0, + scale=None, + radius="earth", + great_circle=False, + inplace=False, + ): + """Perform moving window calculations along an axis. - * The identity or key of a construct. + Moving mean, sum, and integral calculations are possible. - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); or a compiled regular expression - (e.g. ``re.compile('^atmosphere')``) that selects the - relevant constructs whose identities match via - `re.search`. + By default moving means are unweighted, but weights based on + the axis cell sizes (or custom weights) may applied to the + calculation via the *weights* parameter. - Each construct has a number of identities, and is selected - if any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: + By default moving integrals must be weighted. - >>> x.identities() - ['time', - 'long_name=Time', - 'foo=bar', - 'standard_name=time', - 'ncvar%t', - 'T'] + When appropriate, a new cell method construct is created to + describe the calculation. - A construct key may optionally have the ``'key%'`` - prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. + .. note:: The `moving_window` method can not, in general, be + emulated by the `convolution_filter` method, as the + latter i) can not change the window weights as the + filter passes through the axis; and ii) does not + update the cell method constructs. - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. + .. versionadded:: 3.3.0 - *Parameter example:* - ``identity='measure:area'`` + .. seealso:: `bin`, `collapse`, `convolution_filter`, `radius`, + `weights` - *Parameter example:* - ``identity='latitude'`` + :Parameters: - *Parameter example:* - ``identity='long_name=Longitude'`` + method: `str` + Define the moving window method. The method is given + by one of the following strings (see + https://ncas-cms.github.io/cf-python/analysis.html#collapse-methods + for precise definitions): - *Parameter example:* - ``identity='domainancillary2'`` + ================== ============================ ======== + *method* Description Weighted + ================== ============================ ======== + ``'sum'`` The sum of the values. Never - *Parameter example:* - ``identity='ncvar%areacello'`` + ``'mean'`` The weighted or unweighted May be + mean of the values. - full_domain: `bool`, optional - If False then do not create a domain, other than domain - axis constructs, for the new field construct. By default - as much of the domain as possible is copied to the new - field construct. + ``'integral'`` The integral of values. Always + ================== ============================ ======== - cellsize: `bool`, optional - If True then create a field construct from the selected - metadata construct's cell sizes. + * Methods that are "Never" weighted ignore the + *weights* parameter, even if it is set. - :Returns: + * Methods that "May be" weighted will only be weighted + if the *weights* parameter is set. - `Field` - The new field construct. + * Methods that are "Always" weighted require the + *weights* parameter to be set. - **Examples:** + window_size: `int` + Specify the size of the window used to calculate the + moving window. - TODO + *Parameter example:* + A 5-point moving window is set with + ``window_size=5``. - """ - key, construct = self.construct( - identity, item=True, default=(None, None) - ) - if key is None: - raise ValueError( - f"Can't find metadata construct with identity {identity!r}" - ) + axis: `str` or `int` + Select the domain axis over which the filter is to be + applied, defined by that which would be selected by + passing the given axis description to a call of the + field construct's `domain_axis` method. For example, + for a value of ``'X'``, the domain axis construct + returned by ``f.domain_axis('X')`` is selected. - f = super().convert(key, full_domain=full_domain) + weights: optional + Specify the weights for the moving window. The weights + are, those that would be returned by this call of the + field construct's `weights` method: + ``f.weights(weights, axes=axis, radius=radius, + great_circle=great_circle, data=True)``. See the + *axis*, *radius* and *great_circle* parameters and + `cf.Field.weights` for details. - if cellsize: - # Change the new field's data to cell sizes - try: - cs = construct.cellsize - except AttributeError as error: - raise ValueError(error) + .. note:: By default *weights* is `None`, resulting in + **unweighted calculations**. - f.set_data(cs.data, set_axes=False, copy=False) + .. note:: Setting *weights* to `True` is generally a + good way to ensure that the moving window + calculations are appropriately weighted + according to the field construct's + metadata. In this case, if it is not + possible to create weights for the selected + *axis* then an exception will be raised. - return f + *Parameter example:* + To specify weights on the cell sizes of the selected + axis: ``weights=True``. - @_inplace_enabled(default=False) - def cumsum( - self, axis, masked_as_zero=False, coordinate=None, inplace=False - ): - """Return the field cumulatively summed along the given axis. + mode: `str`, optional + The *mode* parameter determines how the input array is + extended when the filter overlaps an array border. The + default value is ``'constant'`` or, if the dimension + being convolved is cyclic (as ascertained by the + `iscyclic` method), ``'wrap'``. The valid values and + their behaviours are as follows: - The cell bounds of the axis are updated to describe the range - over which the sums apply, and a new "sum" cell method - construct is added to the resulting field construct. + ============== ========================== =========================== + *mode* Description Behaviour + ============== ========================== =========================== + ``'reflect'`` The input is extended by ``(c b a | a b c | c b a)`` + reflecting about the edge - .. versionadded:: 3.0.0 + ``'constant'`` The input is extended by ``(k k k | a b c | k k k)`` + filling all values beyond + the edge with the same + constant value (``k``), + defined by the *cval* + parameter. - .. seealso:: `collapse`, `convolution_filter`, `moving_window`, - `sum` + ``'nearest'`` The input is extended by ``(a a a | a b c | c c c)`` + replicating the last point - :Parameters: + ``'mirror'`` The input is extended by ``(c b | a b c | b a)`` + reflecting about the + centre of the last point. - axis: - Select the domain axis over which the cumulative sums are - to be calculated, defined by that which would be selected - by passing the given axis description to a call of the - field construct's `domain_axis` method. For example, for a - value of ``'X'``, the domain axis construct returned by - ``f.domain_axis('X')`` is selected. + ``'wrap'`` The input is extended by ``(a b c | a b c | a b c)`` + wrapping around to the + opposite edge. + ============== ========================== =========================== - masked_as_zero: `bool`, optional - If True then set missing data values to zero before - calculating the cumulative sum. By default the output data - will be masked at the same locations as the original data. - .. note:: Sums produced entirely from masked elements will - always result in masked values in the output - data, regardless of the setting of - *masked_as_zero*. + The position of the window relative to each value can + be changed by using the *origin* parameter. - coordinate: `str`, optional - Set how the cell coordinate values for the summed axis are - defined, relative to the new cell bounds. By default they - are unchanged from the original field construct. The - *coordinate* parameter may be one of: + cval: scalar, optional + Value to fill past the edges of the array if *mode* is + ``'constant'``. Ignored for other modes. Defaults to + `None`, in which case the edges of the array will be + filled with missing data. The only other valid value + is ``0``. - =============== ========================================= - *coordinate* Description - =============== ========================================= - `None` This is the default. - Output coordinates are unchanged. - ``'mid_range'`` An output coordinate is the average of - its output coordinate bounds. - ``'minimum'`` An output coordinate is the minimum of - its output coordinate bounds. - ``'maximum'`` An output coordinate is the maximum of - its output coordinate bounds. - =============== ========================================= + *Parameter example:* + To extend the input by filling all values beyond + the edge with zero: ``cval=0`` - *Parameter Example:* - ``coordinate='maximum'`` - {{inplace: `bool`, optional}} + origin: `int`, optional + Controls the placement of the filter. Defaults to 0, + which is the centre of the window. If the window size, + defined by the *window_size* parameter, is even then + then a value of 0 defines the index defined by + ``window_size/2 -1``. - :Returns: - `Field` or `None` - The field construct with the cumulatively summed axis, or - `None` if the operation was in-place. + *Parameter example:* + For a window size of 5, if ``origin=0`` then the + window is centred on each point. If ``origin=-2`` + then the window is shifted to include the previous + four points. If ``origin=1`` then the window is + shifted to include the previous point and the and + the next three points. - **Examples:** - >>> f = cf.example_field(2) - >>> print(f) - Field: air_potential_temperature (ncvar%air_potential_temperature) - ------------------------------------------------------------------ - Data : air_potential_temperature(time(36), latitude(5), longitude(8)) K - Cell methods : area: mean - Dimension coords: time(36) = [1959-12-16 12:00:00, ..., 1962-11-16 00:00:00] - : latitude(5) = [-75.0, ..., 75.0] degrees_north - : longitude(8) = [22.5, ..., 337.5] degrees_east - : air_pressure(1) = [850.0] hPa - >>> print(f.dimension_coordinate('T').bounds[[0, -1]].datetime_array) - [[cftime.DatetimeGregorian(1959-12-01 00:00:00) - cftime.DatetimeGregorian(1960-01-01 00:00:00)] - [cftime.DatetimeGregorian(1962-11-01 00:00:00) - cftime.DatetimeGregorian(1962-12-01 00:00:00)]] - >>> print(f.array[:, 0, 0]) - [210.7 305.3 249.4 288.9 231.1 200. 234.4 289.2 204.3 203.6 261.8 256.2 - 212.3 231.7 255.1 213.9 255.8 301.2 213.3 200.1 204.6 203.2 244.6 238.4 - 304.5 269.8 267.9 282.4 215. 288.7 217.3 307.1 299.3 215.9 290.2 239.9] - >>> g = f.cumsum('T') - >>> print(g) - Field: air_potential_temperature (ncvar%air_potential_temperature) - ------------------------------------------------------------------ - Data : air_potential_temperature(time(36), latitude(5), longitude(8)) K - Cell methods : area: mean time(36): sum - Dimension coords: time(36) = [1959-12-16 12:00:00, ..., 1962-11-16 00:00:00] - : latitude(5) = [-75.0, ..., 75.0] degrees_north - : longitude(8) = [22.5, ..., 337.5] degrees_east - : air_pressure(1) = [850.0] hPa - >>> print(g.dimension_coordinate('T').bounds[[0, -1]].datetime_array) - [[cftime.DatetimeGregorian(1959-12-01 00:00:00) - cftime.DatetimeGregorian(1960-01-01 00:00:00)] - [cftime.DatetimeGregorian(1959-12-01 00:00:00) - cftime.DatetimeGregorian(1962-12-01 00:00:00)]] - >>> print(g.array[:, 0, 0]) - [ 210.7 516. 765.4 1054.3 1285.4 1485.4 1719.8 2009. 2213.3 2416.9 - 2678.7 2934.9 3147.2 3378.9 3634. 3847.9 4103.7 4404.9 4618.2 4818.3 - 5022.9 5226.1 5470.7 5709.1 6013.6 6283.4 6551.3 6833.7 7048.7 7337.4 - 7554.7 7861.8 8161.1 8377. 8667.2 8907.1] - >>> g = f.cumsum('latitude', masked_as_zero=True) - >>> g = f.cumsum('latitude', coordinate='mid_range') - >>> f.cumsum('latitude', inplace=True) + radius: optional + Specify the radius used for calculating the areas of + cells defined in spherical polar coordinates. The + radius is that which would be returned by this call of + the field construct's `~cf.Field.radius` method: + ``f.radius(radius)``. See the `cf.Field.radius` for + details. - """ - # Retrieve the axis - axis_key = self.domain_axis(axis, key=True) - if axis_key is None: - raise ValueError("Invalid axis specifier: {!r}".format(axis)) + By default *radius* is ``'earth'`` which means that if + and only if the radius can not found from the datums + of any coordinate reference constucts, then the + default radius taken as 6371229 metres. + + great_circle: `bool`, optional + If True then allow, if required, the derivation of i) + area weights from polygon geometry cells by assuming + that each cell part is a spherical polygon composed of + great circle segments; and ii) and the derivation of + line-length weights from line geometry cells by + assuming that each line part is composed of great + circle segments. - # Construct new field - f = _inplace_enabled_define_and_cleanup(self) + scale: number, optional + If set to a positive number then scale the weights so + that they are less than or equal to that number. By + default the weights are scaled to lie between 0 and 1 + (i.e. *scale* is 1). - # Get the axis index - axis_index = f.get_data_axes().index(axis_key) + Ignored if the moving window method is not + weighted. The *scale* parameter can not be set for + moving integrals. - f.data.cumsum(axis_index, masked_as_zero=masked_as_zero, inplace=True) + *Parameter example:* + To scale all weights so that they lie between 0 and + 0.5: ``scale=0.5``. - if self.domain_axis(axis_key).get_size() > 1: - # Update the bounds of the summed axis if necessary - coord = f.dimension_coordinate( - filter_by_axis=(axis_key,), default=None - ) - if coord is not None and coord.has_bounds(): - bounds = coord.get_bounds() - bounds[:, 0] = bounds[0, 0] + {{inplace: `bool`, optional}} - data = coord.get_data(None, _fill_value=False) + :Returns: - if coordinate is not None and data is not None: - if coordinate == "mid_range": - data[...] = ( - (bounds[:, 0] + bounds[:, 1]) * 0.5 - ).squeeze() - elif coordinate == "minimum": - data[...] = coord.lower_bounds - elif coordinate == "maximum": - data[...] = coord.upper_bounds - else: - raise ValueError( - "'coordinate' parameter must be one of " - "(None, 'mid_range', 'minimum', 'maximum'). " - f"Got {coordinate!r}" - ) + `Field` or `None` + The field construct of moving window values, or `None` + if the operation was in-place. - # Add a cell method - f._update_cell_methods( - method="sum", domain_axes=f.domain_axes(axis_key, todict=True) - ) + **Examples:** - return f + >>> f = cf.example_field(0) + >>> print(f) + Field: specific_humidity (ncvar%q) + ---------------------------------- + Data : specific_humidity(latitude(5), longitude(8)) 1 + Cell methods : area: mean + Dimension coords: latitude(5) = [-75.0, ..., 75.0] degrees_north + : longitude(8) = [22.5, ..., 337.5] degrees_east + : time(1) = [2019-01-01 00:00:00] + >>> print(f.array) + [[0.007 0.034 0.003 0.014 0.018 0.037 0.024 0.029] + [0.023 0.036 0.045 0.062 0.046 0.073 0.006 0.066] + [0.11 0.131 0.124 0.146 0.087 0.103 0.057 0.011] + [0.029 0.059 0.039 0.07 0.058 0.072 0.009 0.017] + [0.006 0.036 0.019 0.035 0.018 0.037 0.034 0.013]] + >>> print(f.coordinate('X').bounds.array) + [[ 0. 45.] + [ 45. 90.] + [ 90. 135.] + [135. 180.] + [180. 225.] + [225. 270.] + [270. 315.] + [315. 360.]] + >>> f.iscyclic('X') + True + >>> f.iscyclic('Y') + False - @_inplace_enabled(default=False) - def flip(self, axes=None, inplace=False, i=False, **kwargs): - """Flip (reverse the direction of) axes of the field. + Create a weighted 3-point running mean for the cyclic 'X' + axis: - .. seealso:: `domain_axis`, `flatten`, `insert_dimension`, - `squeeze`, `transpose`, `unsqueeze` + >>> g = f.moving_window('mean', 3, axis='X', weights=True) + >>> print(g) + Field: specific_humidity (ncvar%q) + ---------------------------------- + Data : specific_humidity(latitude(5), longitude(8)) 1 + Cell methods : area: mean longitude(8): mean + Dimension coords: latitude(5) = [-75.0, ..., 75.0] degrees_north + : longitude(8) = [22.5, ..., 337.5] degrees_east + : time(1) = [2019-01-01 00:00:00] + >>> print(g.array) + [[0.02333 0.01467 0.017 0.01167 0.023 0.02633 0.03 0.02 ] + [0.04167 0.03467 0.04767 0.051 0.06033 0.04167 0.04833 0.03167] + [0.084 0.12167 0.13367 0.119 0.112 0.08233 0.057 0.05933] + [0.035 0.04233 0.056 0.05567 0.06667 0.04633 0.03267 0.01833] + [0.01833 0.02033 0.03 0.024 0.03 0.02967 0.028 0.01767]] + >>> print(g.coordinate('X').bounds.array) + [[-45. 90.] + [ 0. 135.] + [ 45. 180.] + [ 90. 225.] + [135. 270.] + [180. 315.] + [225. 360.] + [270. 405.]] - :Parameters: + Create an unweighted 3-point running mean for the cyclic 'X' + axis: - axes: (sequence of) `str` or `int`, optional - Select the domain axes to flip, defined by the domain axes - that would be selected by passing each given axis - description to a call of the field construct's - `domain_axis` method. For example, for a value of ``'X'``, - the domain axis construct returned by - ``f.domain_axis('X')`` is selected. + >>> g = f.moving_window('mean', 3, axis='X') - If no axes are provided then all axes are flipped. + Create an weighted 4-point running integral for the non-cyclic + 'Y' axis: - {{inplace: `bool`, optional}} + >>> g = f.moving_window('integral', 4, axis='Y', weights=True) + >>> g.Units + + >>> print(g.array) + [[ -- -- -- -- -- -- -- --] + [ 8.37 11.73 10.05 13.14 8.88 11.64 4.59 4.02] + [ 8.34 11.79 10.53 13.77 8.88 11.64 4.89 3.54] + [ -- -- -- -- -- -- -- --] + [ -- -- -- -- -- -- -- --]] + >>> print(g.coordinate('Y').bounds.array) + [[-90. 30.] + [-90. 60.] + [-60. 90.] + [-30. 90.] + [ 30. 90.]] + >>> g = f.moving_window('integral', 4, axis='Y', weights=True, cval=0) + >>> print(g.array) + [[ 7.5 9.96 8.88 11.04 7.14 9.48 4.32 3.51] + [ 8.37 11.73 10.05 13.14 8.88 11.64 4.59 4.02] + [ 8.34 11.79 10.53 13.77 8.88 11.64 4.89 3.54] + [ 7.65 10.71 9.18 11.91 7.5 9.45 4.71 1.56] + [ 1.05 2.85 1.74 3.15 2.28 3.27 1.29 0.9 ]] - {{i: deprecated at version 3.0.0}} + """ + method_values = ("mean", "sum", "integral") + if method not in method_values: + raise ValueError( + f"Non-valid 'method' parameter value: {method!r}. " + f"Expected one of {method_values!r}" + ) - kwargs: deprecated at version 3.0.0 + if cval is not None and cval != 0: + raise ValueError("The cval parameter must be None or 0") - :Returns: + window_size = int(window_size) - `Field` or `None` - The field construct with flipped axes, or `None` if the - operation was in-place. + # Construct new field + f = _inplace_enabled_define_and_cleanup(self) - **Examples:** + # Find the axis for the moving window + axis = f.domain_axis(axis, key=True) + iaxis = self.get_data_axes().index(axis) - >>> g = f.flip() - >>> g = f.flip('time') - >>> g = f.flip(1) - >>> g = f.flip(['time', 1, 'dim2']) - >>> f.flip(['dim2'], inplace=True) + if method == "sum" or weights is False: + weights = None - """ - if kwargs: - _DEPRECATION_ERROR_KWARGS(self, "flip", kwargs) # pragma: no cover + if method == "integral": + measure = True + if weights is None: + raise ValueError( + "Must set weights parameter for 'integral' method" + ) - if axes is None and not kwargs: - # Flip all the axes - axes = set(self.get_data_axes(default=())) - iaxes = list(range(self.ndim)) + if scale is not None: + raise ValueError( + "Can't set the 'scale' parameter for moving integrals" + ) else: - if isinstance(axes, (str, int)): - axes = (axes,) + if scale is None: + scale = 1.0 - axes = set([self.domain_axis(axis, key=True) for axis in axes]) + measure = False - data_axes = self.get_data_axes(default=()) - iaxes = [ - data_axes.index(axis) - for axis in axes.intersection(self.get_data_axes()) - ] + if weights is not None: + if isinstance(weights, Data): + if weights.ndim > 1: + raise ValueError( + f"The input weights (shape {weights.shape}) do not " + f"match the selected axis (size {f.shape[iaxis]})" + ) - # Flip the requested axes in the field's data array - f = _inplace_enabled_define_and_cleanup(self) - super(Field, f).flip(iaxes, inplace=True) + if weights.ndim == 1: + if weights.shape[0] != f.shape[iaxis]: + raise ValueError( + f"The input weights (size {weights.size}) do not " + f"match the selected axis (size {f.shape[iaxis]})" + ) + + # Get the data weights + w = f.weights( + weights, + axes=axis, + measure=measure, + scale=scale, + radius=radius, + great_circle=great_circle, + data=True, + ) + + # Multiply the field by the (possibly adjusted) weights + if numpy_can_cast(w.dtype, f.dtype): + f *= w + else: + f = f * w + + # Create the window weights + window = numpy_full((window_size,), 1.0) + if weights is None and method == "mean": + # If there is no data weighting, make sure that the sum of + # the window weights is 1. + window /= window.size + + f.convolution_filter( + window, + axis=axis, + mode=mode, + cval=cval, + origin=origin, + update_bounds=True, + inplace=True, + ) - # Flip any constructs which span the flipped axes - for key, construct in f.constructs.filter_by_data(todict=True).items(): - construct_axes = f.get_data_axes(key) - construct_flip_axes = axes.intersection(construct_axes) - if construct_flip_axes: - iaxes = [ - construct_axes.index(axis) for axis in construct_flip_axes - ] - construct.flip(iaxes, inplace=True) + if weights is not None and method == "mean": + # Divide the field by the running sum of the adjusted data + # weights + w.convolution_filter( + window=window, + axis=iaxis, + mode=mode, + cval=0, + origin=origin, + inplace=True, + ) + if numpy_can_cast(w.dtype, f.dtype): + f /= w + else: + f = f / w + + # Add a cell method + if f.domain_axis(axis).get_size() > 1 or method == "integral": + f._update_cell_methods( + method=method, domain_axes=f.domain_axes(axis, todict=True) + ) return f @_deprecated_kwarg_check("i") @_inplace_enabled(default=False) - def anchor( - self, axis, value, inplace=False, dry_run=False, i=False, **kwargs + def convolution_filter( + self, + window=None, + axis=None, + mode=None, + cval=None, + origin=0, + update_bounds=True, + inplace=False, + weights=None, + i=False, ): - """Roll a cyclic axis so that the given value lies in the first - coordinate cell. + """Convolve the field construct along the given axis with the + specified filter. - A unique axis is selected with the *axes* and *kwargs* parameters. + The magnitude of the integral of the filter (i.e. the sum of + the window weights defined by the *window* parameter) affects + the convolved values. For example, window weights of ``[0.2, + 0.2 0.2, 0.2, 0.2]`` will produce a non-weighted 5-point + running mean; and window weights of ``[1, 1, 1, 1, 1]`` will + produce a 5-point running sum. Note that the window weights + returned by functions of the `scipy.signal.windows` package do + not necessarily sum to 1 (see the examples for details). - .. versionadded:: 1.0 + .. note:: The `moving_window` method can not, in general, be + emulated by the `convolution_filter` method, as the + latter i) can not change the window weights as the + filter passes through the axis; and ii) does not + update the cell method constructs. - .. seealso:: `axis`, `cyclic`, `iscyclic`, `period`, `roll` + .. seealso:: `collapse`, `derivative`, `moving_window`, + `cf.relative_vorticity` :Parameters: - axis: - The cyclic axis to be rolled, defined by that which would - be selected by passing the given axis description to a - call of the field construct's `domain_axis` method. For - example, for a value of ``'X'``, the domain axis construct - returned by ``f.domain_axis('X')`` is selected. - - value: - Anchor the dimension coordinate values for the selected - cyclic axis to the *value*. May be any numeric scalar - object that can be converted to a `Data` object (which - includes `numpy` and `Data` objects). If *value* has units - then they must be compatible with those of the dimension - coordinates, otherwise it is assumed to have the same - units as the dimension coordinates. The coordinate values - are transformed so that *value* is "equal to or just - before" the new first coordinate value. More specifically: - - * Increasing dimension coordinates with positive period, - P, are transformed so that *value* lies in the - half-open range (L-P, F], where F and L are the - transformed first and last coordinate values, - respectively. - - .. - - * Decreasing dimension coordinates with positive period, - P, are transformed so that *value* lies in the - half-open range (L+P, F], where F and L are the - transformed first and last coordinate values, - respectively. - - *Parameter example:* - If the original dimension coordinates are ``0, 5, ..., - 355`` (evenly spaced) and the period is ``360`` then - ``value=0`` implies transformed coordinates of ``0, 5, - ..., 355``; ``value=-12`` implies transformed - coordinates of ``-10, -5, ..., 345``; ``value=380`` - implies transformed coordinates of ``380, 385, ..., - 715``. + window: sequence of numbers + Specify the window weights to use for the filter. *Parameter example:* - If the original dimension coordinates are ``355, 350, - ..., 0`` (evenly spaced) and the period is ``360`` then - ``value=355`` implies transformed coordinates of ``355, - 350, ..., 0``; ``value=0`` implies transformed - coordinates of ``0, -5, ..., -355``; ``value=392`` - implies transformed coordinates of ``390, 385, ..., - 30``. - - {{inplace: `bool`, optional}} - - dry_run: `bool`, optional - Return a dictionary of parameters which describe the - anchoring process. The field is not changed, even if *i* - is True. - - {{i: deprecated at version 3.0.0}} - - kwargs: deprecated at version 3.0.0 - - :Returns: - - `Field` - The rolled field. + An unweighted 5-point moving average can be computed + with ``window=[0.2, 0.2, 0.2, 0.2, 0.2]`` - **Examples:** + Note that the `scipy.signal.windows` package has suite + of window functions for creating window weights for + filtering (see the examples for details). - >>> f.iscyclic('X') - True - >>> f.dimension_coordinate('X').data - TODO - >>> print(f.dimension_coordinate('X').array) - [ 0 45 90 135 180 225 270 315] - >>> g = f.anchor('X', 230) - >>> print(g.dimension_coordinate('X').array) - [270 315 0 45 90 135 180 225] - >>> g = f.anchor('X', cf.Data(590, 'degreesE')) - >>> print(g.dimension_coordinate('X').array) - [630 675 360 405 450 495 540 585] - >>> g = f.anchor('X', cf.Data(-490, 'degreesE')) - >>> print(g.dimension_coordinate('X').array) - [-450 -405 -720 -675 -630 -585 -540 -495] + .. versionadded:: 3.3.0 (replaces the old weights + parameter) - >>> f.iscyclic('X') - True - >>> f.dimension_coordinate('X').data - - >>> f.anchor('X', 10000).dimension_coordinate('X').data - - >>> d = f.anchor('X', 10000, dry_run=True) - >>> d - {'axis': 'domainaxis2', - 'nperiod': , - 'roll': 28} - >>> (f.roll(d['axis'], d['roll']).dimension_coordinate( - ... d['axis']) + d['nperiod']).data - + axis: + Select the domain axis over which the filter is to be + applied, defined by that which would be selected by + passing the given axis description to a call of the field + construct's `domain_axis` method. For example, for a value + of ``'X'``, the domain axis construct returned by + ``f.domain_axis('X')`` is selected. - """ - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, "anchor", kwargs - ) # pragma: no cover + mode: `str`, optional + The *mode* parameter determines how the input array is + extended when the filter overlaps an array border. The + default value is ``'constant'`` or, if the dimension being + convolved is cyclic (as ascertained by the `iscyclic` + method), ``'wrap'``. The valid values and their behaviours + are as follows: - axis = self.domain_axis(axis, key=True) + ============== ========================== =========================== + *mode* Description Behaviour + ============== ========================== =========================== + ``'reflect'`` The input is extended by ``(c b a | a b c | c b a)`` + reflecting about the edge - if dry_run: - f = self - else: - f = _inplace_enabled_define_and_cleanup(self) + ``'constant'`` The input is extended by ``(k k k | a b c | k k k)`` + filling all values beyond + the edge with the same + constant value (``k``), + defined by the *cval* + parameter. - dim = f.dimension_coordinate(filter_by_axis=(axis,), default=None) - if dim is None: - raise ValueError( - "Can't shift non-cyclic " - f"{f.constructs.domain_axis_identity(axis)!r} axis" - ) + ``'nearest'`` The input is extended by ``(a a a | a b c | d d d)`` + replicating the last point - period = dim.period() - if period is None: - raise ValueError(f"Cyclic {dim.identity()!r} axis has no period") + ``'mirror'`` The input is extended by ``(c b | a b c | b a)`` + reflecting about the + centre of the last point. - value = Data.asdata(value) - if not value.Units: - value = value.override_units(dim.Units) - elif not value.Units.equivalent(dim.Units): - raise ValueError( - f"Anchor value has incompatible units: {value.Units!r}" - ) + ``'wrap'`` The input is extended by ``(a b c | a b c | a b c)`` + wrapping around to the + opposite edge. + ============== ========================== =========================== - axis_size = f.domain_axes(todict=True)[axis].get_size() - if axis_size <= 1: - # Don't need to roll a size one axis - if dry_run: - return {"axis": axis, "roll": 0, "nperiod": 0} - else: - if inplace: - f = None + The position of the window relative to each value can be + changed by using the *origin* parameter. - return f + cval: scalar, optional + Value to fill past the edges of the array if *mode* is + ``'constant'``. Ignored for other modes. Defaults to + `None`, in which case the edges of the array will be + filled with missing data. - c = dim.get_data(_fill_value=False) + *Parameter example:* + To extend the input by filling all values beyond the + edge with zero: ``cval=0`` - if dim.increasing: - # Adjust value so it's in the range [c[0], c[0]+period) - n = ((c[0] - value) / period).ceil() - value1 = value + n * period + origin: `int`, optional + Controls the placement of the filter. Defaults to 0, which + is the centre of the window. If the window has an even + number of weights then then a value of 0 defines the index + defined by ``width/2 -1``. - shift = axis_size - numpy_argmax((c - value1 >= 0).array) - if not dry_run: - f.roll(axis, shift, inplace=True) + *Parameter example:* + For a weighted moving average computed with a weights + window of ``[0.1, 0.15, 0.5, 0.15, 0.1]``, if + ``origin=0`` then the average is centred on each + point. If ``origin=-2`` then the average is shifted to + include the previous four points. If ``origin=1`` then + the average is shifted to include the previous point and + the and the next three points. - dim = f.dimension_coordinate(filter_by_axis=(axis,)) + update_bounds: `bool`, optional + If False then the bounds of a dimension coordinate + construct that spans the convolved axis are not + altered. By default, the bounds of a dimension coordinate + construct that spans the convolved axis are updated to + reflect the width and origin of the window. - n = ((value - dim.data[0]) / period).ceil() - else: - # Adjust value so it's in the range (c[0]-period, c[0]] - n = ((c[0] - value) / period).floor() - value1 = value + n * period + {{inplace: `bool`, optional}} - shift = axis_size - numpy_argmax((value1 - c >= 0).array) + {{i: deprecated at version 3.0.0}} - if not dry_run: - f.roll(axis, shift, inplace=True) + weights: deprecated at version 3.3.0 + Use the *window* parameter instead. - # TODO should this call be like the one above? - dim = f.dimension_coordinate(filter_by_axis=(axis,)) + :Returns: - n = ((value - dim.data[0]) / period).floor() + `Field` or `None` + The convolved field construct, or `None` if the operation + was in-place. - if dry_run: - return {"axis": axis, "roll": shift, "nperiod": n * period} + **Examples:** - if n: - np = n * period - dim += np - bounds = dim.get_bounds(None) - if bounds is not None: - bounds += np + >>> f = cf.example_field(2) + >>> print(f) + Field: air_potential_temperature (ncvar%air_potential_temperature) + ------------------------------------------------------------------ + Data : air_potential_temperature(time(36), latitude(5), longitude(8)) K + Cell methods : area: mean + Dimension coords: time(36) = [1959-12-16 12:00:00, ..., 1962-11-16 00:00:00] + : latitude(5) = [-75.0, ..., 75.0] degrees_north + : longitude(8) = [22.5, ..., 337.5] degrees_east + : air_pressure(1) = [850.0] hPa + >>> print(f.array[:, 0, 0]) + [210.7 305.3 249.4 288.9 231.1 200. 234.4 289.2 204.3 203.6 261.8 256.2 + 212.3 231.7 255.1 213.9 255.8 301.2 213.3 200.1 204.6 203.2 244.6 238.4 + 304.5 269.8 267.9 282.4 215. 288.7 217.3 307.1 299.3 215.9 290.2 239.9] + >>> print(f.coordinate('T').bounds.dtarray[0]) + [cftime.DatetimeGregorian(1959-12-01 00:00:00) + cftime.DatetimeGregorian(1960-01-01 00:00:00)] + >>> print(f.coordinate('T').bounds.dtarray[2]) + [cftime.DatetimeGregorian(1960-02-01 00:00:00) + cftime.DatetimeGregorian(1960-03-01 00:00:00)] - return f + Create a 5-point (non-weighted) running mean: - def argmax(self, axis=None): - """Return the indices of the maximum values along an axis. + >>> g = f.convolution_filter([0.2, 0.2, 0.2, 0.2, 0.2], 'T') + >>> print(g) + Field: air_potential_temperature (ncvar%air_potential_temperature) + ------------------------------------------------------------------ + Data : air_potential_temperature(time(36), latitude(5), longitude(8)) K + Cell methods : area: mean + Dimension coords: time(36) = [1959-12-16 12:00:00, ..., 1962-11-16 00:00:00] + : latitude(5) = [-75.0, ..., 75.0] degrees_north + : longitude(8) = [22.5, ..., 337.5] degrees_east + : air_pressure(1) = [850.0] hPa + >>> print(g.array[:, 0, 0]) + [ -- -- 257.08 254.94 240.76 248.72 231.8 226.3 238.66 243.02 227.64 + 233.12 243.42 233.84 233.76 251.54 247.86 236.86 235.0 224.48 213.16 + 218.18 239.06 252.1 265.04 272.6 267.92 264.76 254.26 262.1 265.48 + 265.66 265.96 270.48 -- --] + >>> print(g.coordinate('T').bounds.dtarray[0]) + [cftime.DatetimeGregorian(1959-12-01 00:00:00) + cftime.DatetimeGregorian(1960-03-01 00:00:00)] + >>> print(g.coordinate('T').bounds.dtarray[2]) + [cftime.DatetimeGregorian(1959-12-01 00:00:00) + cftime.DatetimeGregorian(1960-05-01 00:00:00)] - If no axis is specified then the returned index locates the - maximum of the whole data. + Create a 5-point running sum: - .. seealso:: `argmin`, `where` + >>> g = f.convolution_filter([1, 1, 1, 1, 1], 'T') + >>> print(g) + Field: air_potential_temperature (ncvar%air_potential_temperature) + ------------------------------------------------------------------ + Data : air_potential_temperature(time(36), latitude(5), longitude(8)) K + Cell methods : area: mean + Dimension coords: time(36) = [1959-12-16 12:00:00, ..., 1962-11-16 00:00:00] + : latitude(5) = [-75.0, ..., 75.0] degrees_north + : longitude(8) = [22.5, ..., 337.5] degrees_east + : air_pressure(1) = [850.0] hPa + >>> print(g.array[:, 0, 0]) + [ -- -- 1285.4 1274.7 1203.8 1243.6 1159.0 1131.5 1193.3 1215.1 + 1138.2 1165.6 1217.1 1169.2 1168.8 1257.7 1239.3 1184.3 1175.0 + 1122.4 1065.8 1090.9 1195.3 1260.5 1325.2 1363.0 1339.6 1323.8 + 1271.3 1310.5 1327.4 1328.3 1329.8 1352.4 -- --] + >>> print(g.coordinate('T').bounds.dtarray[0]) + [cftime.DatetimeGregorian(1959-12-01 00:00:00) + cftime.DatetimeGregorian(1960-03-01 00:00:00)] + >>> print(g.coordinate('T').bounds.dtarray[2]) + [cftime.DatetimeGregorian(1959-12-01 00:00:00) + cftime.DatetimeGregorian(1960-05-01 00:00:00)] - :Parameters: + Calculate a convolution along the time axis with Gaussian window + weights, using the "nearest" mode at the border of the edges of + the time dimension (note that the window weights returned by + `scipy.signal.windows` functions do not necessarily sum to 1): - :Returns: + >>> import scipy.signal.windows + >>> gaussian_window = scipy.signal.windows.gaussian(3, std=0.4) + >>> print(gaussian_window) + [0.04393693 1. 0.04393693] + >>> g = f.convolution_filter(gaussian_window, 'T', mode='nearest') + >>> print(g.array[:, 0, 0]) + [233.37145775 325.51538316 275.50732596 310.01169661 252.58076685 + 220.4526426 255.89394793 308.47513278 225.95212089 224.07900476 + 282.00220208 277.03050023 233.73682991 252.23612278 274.67829762 + 236.34737939 278.43191451 321.81081556 235.32558483 218.46124456 + 222.31976533 222.93647058 264.00254989 262.52577025 326.82874967 + 294.94950081 292.16197475 303.61714525 240.09238279 307.69393641 + 243.47762505 329.79781991 322.27901629 241.80082237 310.22645435 + 263.19096851] + >>> print(g.coordinate('T').bounds.dtarray[0]) + [cftime.DatetimeGregorian(1959-12-01 00:00:00) + cftime.DatetimeGregorian(1960-02-01 00:00:00)] + >>> print(g.coordinate('T').bounds.dtarray[1]) + [cftime.DatetimeGregorian(1959-12-01 00:00:00) + cftime.DatetimeGregorian(1960-03-01 00:00:00)] - **Examples:** + """ + if weights is not None: + _DEPRECATION_ERROR_KWARGS( + self, + "convolution_filter", + {"weights": weights}, + message="Use keyword 'window' instead.", + version="3.3.0", + ) # pragma: no cover - >>> g = f.argmax('T') + if isinstance(window, str): + _DEPRECATION_ERROR( + "A string-valued 'window' parameter has been deprecated " + "at version 3.0.0 and is no longer available. Provide a " + "sequence of numerical window weights instead. " + "scipy.signal.windows may be used to generate particular " + "window functions." + ) # pragma: no cover - """ - print("This method is not ready for use.") - return + if isinstance(window[0], str): + _DEPRECATION_ERROR( + "A string-valued 'window' parameter element has been " + "deprecated at version 3.0.0 and is no longer available. " + "Provide a sequence of numerical window weights instead. " + "scipy.signal.windows may be used to generate particular " + "window functions." + ) # pragma: no cover - # Keep these commented lines for using with the future dask version - # - # standard_name = None - # - # if axis is not None: - # axis_key = self.domain_axis( - # axis, key=True, default=ValueError("TODO") - # ) - # axis = self.get_data_axes.index(axis_key) - # standard_name = self.domain_axis_identity( - # axis_key, strict=True, default=None - # ) - # - # indices = self.data.argmax(axis, unravel=True) - # - # if axis is None: - # return self[indices] - # - # # What if axis_key does not span array? - # out = self.subspace(**{axis_key: [0]}) - # out.squeeze(axis_key, inplace=True) - # - # for i in indices.ndindex(): - # out.data[i] = org.data[indices[i].datum()] - # - # for key, c in tuple( - # out.constructs.filter_by_type( - # "dimension_coordinate", - # "auxiliary_coordinate", - # "cell_measure", - # "domain_ancillary", - # "field_ancillary", - # ) - # .filter_by_axis("and", axis_key) - # .items() - # ): - # - # out.del_construct(key) - # - # if c.construct_type == ( - # "cell_measure", - # "domain_ancillary", - # "field_ancillary", - # ): - # continue - # - # aux = self._AuxiliaryCoordinate() - # aux.set_properties(c.properties()) - # - # c_data = c.get_data(None) - # if c_data is not None: - # data = Data.empty(indices.shape, dtype=c.dtype) - # for x in indices.ndindex(): - # data[x] = c_data[indices[x]] - # - # aux.set_data(data, copy=False) - # - # c_bounds_data = c.get_bounds_data(None) - # if c_bounds_data is not None: - # bounds = Data.empty( - # indices.shape + (c_bounds_data.shape[-1],), - # dtype=c_bounds_data.dtype, - # ) - # for x in indices.ndindex(): - # bounds[x] = c_bounds_data[indices[x]] - # - # aux.set_bounds( - # self._Bounds(data=bounds, copy=False), copy=False - # ) - # - # out.set_construct(aux, axes=out.get_data_axes(), copy=False) - # - # if standard_name: - # cm = CellMethod() - # cm.create(standard_name + ": maximum") - # - # return out + # Retrieve the axis + axis_key = self.domain_axis(axis, key=True) + iaxis = self.get_data_axes().index(axis_key) - @_manage_log_level_via_verbosity - def autocyclic(self, key=None, coord=None, verbose=None): - """Set dimensions to be cyclic. + # Default mode to 'wrap' if the axis is cyclic + if mode is None: + if self.iscyclic(axis_key): + mode = "wrap" + else: + mode = "constant" - A dimension is set to be cyclic if it has a unique longitude (or - grid longitude) dimension coordinate construct with bounds and the - first and last bounds values differ by 360 degrees (or an - equivalent amount in other units). + # Construct new field + f = _inplace_enabled_define_and_cleanup(self) - .. versionadded:: 1.0 + f.data.convolution_filter( + window=window, + axis=iaxis, + mode=mode, + cval=cval, + origin=origin, + inplace=True, + ) - .. seealso:: `cyclic`, `iscyclic`, `period` + # Update the bounds of the convolution axis if necessary + if update_bounds: + coord = f.dimension_coordinate( + filter_by_axis=(axis_key,), default=None + ) + if coord is not None and coord.has_bounds(): + old_bounds = coord.bounds.array + length = old_bounds.shape[0] + new_bounds = numpy_empty((length, 2)) + len_weights = len(window) + lower_offset = len_weights // 2 + origin + upper_offset = len_weights - 1 - lower_offset + if mode == "wrap": + if coord.direction(): + new_bounds[:, 0] = coord.roll( + 0, upper_offset + ).bounds.array[:, 0] + new_bounds[:, 1] = ( + coord.roll(0, -lower_offset).bounds.array[:, 1] + + coord.period() + ) + else: + new_bounds[:, 0] = ( + coord.roll(0, upper_offset).bounds.array[:, 0] + + 2 * coord.period() + ) + new_bounds[:, 1] = ( + coord.roll(0, -lower_offset).bounds.array[:, 1] + + coord.period() + ) + else: + new_bounds[upper_offset:length, 0] = old_bounds[ + 0 : length - upper_offset, 0 + ] + new_bounds[0:upper_offset, 0] = old_bounds[0, 0] + new_bounds[0 : length - lower_offset, 1] = old_bounds[ + lower_offset:length, 1 + ] + new_bounds[length - lower_offset : length, 1] = old_bounds[ + length - 1, 1 + ] - :Parameters: + coord.set_bounds( + self._Bounds(data=Data(new_bounds, units=coord.Units)) + ) - {{verbose: `int` or `str` or `None`, optional}} + return f - :Returns: + def convert(self, identity, full_domain=True, cellsize=False): + """Convert a metadata construct into a new field construct. - `bool` + The new field construct has the properties and data of the + metadata construct, and domain axis constructs corresponding to + the data. By default it also contains other metadata constructs + (such as dimension coordinate and coordinate reference constructs) + that define its domain. - """ - if coord is None: - key, coord = self.dimension_coordinate( - "X", item=True, default=(None, None) - ) - if coord is None: - return False - elif not coord.X: - return False + The `cf.read` function allows a field construct to be derived + directly from a netCDF variable that corresponds to a metadata + construct. In this case, the new field construct will have a + domain limited to that which can be inferred from the + corresponding netCDF variable - typically only domain axis and + dimension coordinate constructs. This will usually result in a + different field construct to that created with the convert method. - bounds = coord.get_bounds(None) - if bounds is None: - self.cyclic(key, iscyclic=False) - return False + .. versionadded:: 3.0.0 - data = bounds.get_data(None, _fill_value=False) - if data is None: - self.cyclic(key, iscyclic=False) - return False + .. seealso:: `cf.read` - units = bounds.Units - if units.islongitude: - period = Data(360.0, units="degrees_east") - elif units == _units_degrees: - period = Data(360.0, units="degrees") - else: - self.cyclic(key, iscyclic=False) - return False + :Parameters: - period.Units = data.Units + identity: + Select the metadata construct by one of: - if abs(data.last_element() - data.first_element()) != period.array: - self.cyclic(key, iscyclic=False) - return False + * The identity or key of a construct. - self.cyclic(key, iscyclic=True, period=period) + A construct identity is specified by a string + (e.g. ``'latitude'``, ``'long_name=time'``, + ``'ncvar%lat'``, etc.); or a compiled regular expression + (e.g. ``re.compile('^atmosphere')``) that selects the + relevant constructs whose identities match via + `re.search`. - return True + Each construct has a number of identities, and is selected + if any of them match any of those provided. A construct's + identities are those returned by its `!identities` + method. In the following example, the construct ``x`` has + six identities: - @_deprecated_kwarg_check("i") - def squeeze(self, axes=None, inplace=False, i=False, **kwargs): - """Remove size 1 axes from the data. + >>> x.identities() + ['time', + 'long_name=Time', + 'foo=bar', + 'standard_name=time', + 'ncvar%t', + 'T'] - By default all size 1 axes are removed, but particular size 1 axes - may be selected for removal. + A construct key may optionally have the ``'key%'`` + prefix. For example ``'dimensioncoordinate2'`` and + ``'key%dimensioncoordinate2'`` are both acceptable keys. - Squeezed domain axis constructs are not removed from the metadata - constructs, nor from the domain of the field construct. + Note that in the output of a `print` call or `!dump` + method, a construct is always described by one of its + identities, and so this description may always be used as + an *identity* argument. - .. seealso:: `domain_axis`, `flatten`, `insert_dimension`, `flip`, - `remove_axes`, `transpose`, `unsqueeze` + *Parameter example:* + ``identity='measure:area'`` - :Parameters: + *Parameter example:* + ``identity='latitude'`` - axes: (sequence of) `str` or `int`, optional - Select the domain axes to squeeze, defined by the domain - axes that would be selected by passing each given axis - description to a call of the field construct's - `domain_axis` method. For example, for a value of ``'X'``, - the domain axis construct returned by - ``f.domain_axis('X')`` is selected. + *Parameter example:* + ``identity='long_name=Longitude'`` - If no axes are provided then all size 1 axes are squeezed. + *Parameter example:* + ``identity='domainancillary2'`` - {{inplace: `bool`, optional}} + *Parameter example:* + ``identity='ncvar%areacello'`` - {{i: deprecated at version 3.0.0}} + full_domain: `bool`, optional + If False then do not create a domain, other than domain + axis constructs, for the new field construct. By default + as much of the domain as possible is copied to the new + field construct. - kwargs: deprecated at version 3.0.0 + cellsize: `bool`, optional + If True then create a field construct from the selected + metadata construct's cell sizes. :Returns: - `Field` or `None` - The field construct with squeezed data, or `None` if the - operation was in-place. + `Field` + The new field construct. - **Examples:** + **Examples:** - >>> g = f.squeeze() - >>> g = f.squeeze('time') - >>> g = f.squeeze(1) - >>> g = f.squeeze(['time', 1, 'dim2']) - >>> f.squeeze(['dim2'], inplace=True) + TODO """ - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, "squeeze", kwargs - ) # pragma: no cover - - data_axes = self.get_data_axes() + key, construct = self.construct( + identity, item=True, default=(None, None) + ) + if key is None: + raise ValueError( + f"Can't find metadata construct with identity {identity!r}" + ) - if axes is None: - domain_axes = self.domain_axes(todict=True) - axes = [ - axis - for axis in data_axes - if domain_axes[axis].get_size(None) == 1 - ] - else: - if isinstance(axes, (str, int)): - axes = (axes,) + f = super().convert(key, full_domain=full_domain) - axes = [self.domain_axis(x, key=True) for x in axes] - axes = set(axes).intersection(data_axes) + if cellsize: + # Change the new field's data to cell sizes + try: + cs = construct.cellsize + except AttributeError as error: + raise ValueError(error) - iaxes = [data_axes.index(axis) for axis in axes] + f.set_data(cs.data, set_axes=False, copy=False) - # Squeeze the field's data array - return super().squeeze(iaxes, inplace=inplace) + return f @_inplace_enabled(default=False) - def swapaxes(self, axis0, axis1, inplace=False, i=False): - """Interchange two axes of the data. + def cumsum( + self, axis, masked_as_zero=False, coordinate=None, inplace=False + ): + """Return the field cumulatively summed along the given axis. - .. seealso:: `flatten`, `flip`, `insert_dimension`, `squeeze`, - `transpose` + The cell bounds of the axis are updated to describe the range + over which the sums apply, and a new "sum" cell method + construct is added to the resulting field construct. + + .. versionadded:: 3.0.0 + + .. seealso:: `collapse`, `convolution_filter`, `moving_window`, + `sum` :Parameters: - axis0, axis1: TODO - Select the axes to swap. Each axis is identified by its - original integer position. + axis: + Select the domain axis over which the cumulative sums are + to be calculated, defined by that which would be selected + by passing the given axis description to a call of the + field construct's `domain_axis` method. For example, for a + value of ``'X'``, the domain axis construct returned by + ``f.domain_axis('X')`` is selected. + + masked_as_zero: `bool`, optional + If True then set missing data values to zero before + calculating the cumulative sum. By default the output data + will be masked at the same locations as the original data. + .. note:: Sums produced entirely from masked elements will + always result in masked values in the output + data, regardless of the setting of + *masked_as_zero*. + + coordinate: `str`, optional + Set how the cell coordinate values for the summed axis are + defined, relative to the new cell bounds. By default they + are unchanged from the original field construct. The + *coordinate* parameter may be one of: + + =============== ========================================= + *coordinate* Description + =============== ========================================= + `None` This is the default. + Output coordinates are unchanged. + ``'mid_range'`` An output coordinate is the average of + its output coordinate bounds. + ``'minimum'`` An output coordinate is the minimum of + its output coordinate bounds. + ``'maximum'`` An output coordinate is the maximum of + its output coordinate bounds. + =============== ========================================= + *Parameter Example:* + ``coordinate='maximum'`` {{inplace: `bool`, optional}} :Returns: - `Field` or `None` - The field construct with data with swapped axis - positions. If the operation was in-place then `None` is - returned. + The field construct with the cumulatively summed axis, or + `None` if the operation was in-place. **Examples:** - - >>> f.shape - (1, 2, 3) - >>> f.swapaxes(1, 0).shape - (2, 1, 3) - >>> f.swapaxes(0, -1).shape - (3, 2, 1) - >>> f.swapaxes(1, 1).shape - (1, 2, 3) - >>> f.swapaxes(-1, -1).shape - (1, 2, 3) + >>> f = cf.example_field(2) + >>> print(f) + Field: air_potential_temperature (ncvar%air_potential_temperature) + ------------------------------------------------------------------ + Data : air_potential_temperature(time(36), latitude(5), longitude(8)) K + Cell methods : area: mean + Dimension coords: time(36) = [1959-12-16 12:00:00, ..., 1962-11-16 00:00:00] + : latitude(5) = [-75.0, ..., 75.0] degrees_north + : longitude(8) = [22.5, ..., 337.5] degrees_east + : air_pressure(1) = [850.0] hPa + >>> print(f.dimension_coordinate('T').bounds[[0, -1]].datetime_array) + [[cftime.DatetimeGregorian(1959-12-01 00:00:00) + cftime.DatetimeGregorian(1960-01-01 00:00:00)] + [cftime.DatetimeGregorian(1962-11-01 00:00:00) + cftime.DatetimeGregorian(1962-12-01 00:00:00)]] + >>> print(f.array[:, 0, 0]) + [210.7 305.3 249.4 288.9 231.1 200. 234.4 289.2 204.3 203.6 261.8 256.2 + 212.3 231.7 255.1 213.9 255.8 301.2 213.3 200.1 204.6 203.2 244.6 238.4 + 304.5 269.8 267.9 282.4 215. 288.7 217.3 307.1 299.3 215.9 290.2 239.9] + >>> g = f.cumsum('T') + >>> print(g) + Field: air_potential_temperature (ncvar%air_potential_temperature) + ------------------------------------------------------------------ + Data : air_potential_temperature(time(36), latitude(5), longitude(8)) K + Cell methods : area: mean time(36): sum + Dimension coords: time(36) = [1959-12-16 12:00:00, ..., 1962-11-16 00:00:00] + : latitude(5) = [-75.0, ..., 75.0] degrees_north + : longitude(8) = [22.5, ..., 337.5] degrees_east + : air_pressure(1) = [850.0] hPa + >>> print(g.dimension_coordinate('T').bounds[[0, -1]].datetime_array) + [[cftime.DatetimeGregorian(1959-12-01 00:00:00) + cftime.DatetimeGregorian(1960-01-01 00:00:00)] + [cftime.DatetimeGregorian(1959-12-01 00:00:00) + cftime.DatetimeGregorian(1962-12-01 00:00:00)]] + >>> print(g.array[:, 0, 0]) + [ 210.7 516. 765.4 1054.3 1285.4 1485.4 1719.8 2009. 2213.3 2416.9 + 2678.7 2934.9 3147.2 3378.9 3634. 3847.9 4103.7 4404.9 4618.2 4818.3 + 5022.9 5226.1 5470.7 5709.1 6013.6 6283.4 6551.3 6833.7 7048.7 7337.4 + 7554.7 7861.8 8161.1 8377. 8667.2 8907.1] + >>> g = f.cumsum('latitude', masked_as_zero=True) + >>> g = f.cumsum('latitude', coordinate='mid_range') + >>> f.cumsum('latitude', inplace=True) """ - data_axes = self.get_data_axes(default=None) + # Retrieve the axis + axis_key = self.domain_axis(axis, key=True) + if axis_key is None: + raise ValueError("Invalid axis specifier: {!r}".format(axis)) - da_key0 = self.domain_axis(axis0, key=True) - da_key1 = self.domain_axis(axis1, key=True) + # Construct new field + f = _inplace_enabled_define_and_cleanup(self) - if da_key0 not in data_axes: - raise ValueError( - "Can't swapaxes {}: Bad axis specification: {!r}".format( - self.__class__.__name__, axis0 - ) - ) + # Get the axis index + axis_index = f.get_data_axes().index(axis_key) - if da_key1 not in data_axes: - raise ValueError( - "Can't swapaxes {}: Bad axis specification: {!r}".format( - self.__class__.__name__, axis1 - ) + f.data.cumsum(axis_index, masked_as_zero=masked_as_zero, inplace=True) + + if self.domain_axis(axis_key).get_size() > 1: + # Update the bounds of the summed axis if necessary + coord = f.dimension_coordinate( + filter_by_axis=(axis_key,), default=None ) + if coord is not None and coord.has_bounds(): + bounds = coord.get_bounds() + bounds[:, 0] = bounds[0, 0] - axis0 = data_axes.index(da_key0) - axis1 = data_axes.index(da_key1) + data = coord.get_data(None, _fill_value=False) - f = _inplace_enabled_define_and_cleanup(self) - super(Field, f).swapaxes(axis0, axis1, inplace=True) + if coordinate is not None and data is not None: + if coordinate == "mid_range": + data[...] = ( + (bounds[:, 0] + bounds[:, 1]) * 0.5 + ).squeeze() + elif coordinate == "minimum": + data[...] = coord.lower_bounds + elif coordinate == "maximum": + data[...] = coord.upper_bounds + else: + raise ValueError( + "'coordinate' parameter must be one of " + "(None, 'mid_range', 'minimum', 'maximum'). " + f"Got {coordinate!r}" + ) - if data_axes is not None: - data_axes = list(data_axes) - data_axes[axis1], data_axes[axis0] = ( - data_axes[axis0], - data_axes[axis1], + # Add a cell method + f._update_cell_methods( + method="sum", domain_axes=f.domain_axes(axis_key, todict=True) ) - f.set_data_axes(data_axes) return f - @_deprecated_kwarg_check("i") - def transpose( - self, - axes=None, - constructs=False, - inplace=False, - items=True, - i=False, - **kwargs, - ): - """Permute the axes of the data array. - - By default the order of the axes is reversed, but any ordering may - be specified by selecting the axes of the output in the required - order. - - By default metadata constructs are not transposed, but they may be - if the *constructs* parameter is set. + @_inplace_enabled(default=False) + def flip(self, axes=None, inplace=False, i=False, **kwargs): + """Flip (reverse the direction of) axes of the field. - .. seealso:: `domain_axis`, `flatten`, `insert_dimension`, `flip`, - `squeeze`, `unsqueeze` + .. seealso:: `domain_axis`, `flatten`, `insert_dimension`, + `squeeze`, `transpose`, `unsqueeze` :Parameters: axes: (sequence of) `str` or `int`, optional - Select the domain axis order, defined by the domain axes + Select the domain axes to flip, defined by the domain axes that would be selected by passing each given axis description to a call of the field construct's `domain_axis` method. For example, for a value of ``'X'``, the domain axis construct returned by ``f.domain_axis('X')`` is selected. - Each dimension of the field construct's data must be - provided, or if no axes are specified then the axis order - is reversed. - - constructs: `bool` - If True then metadata constructs are also transposed so - that their axes are in the same relative order as in the - transposed data array of the field. By default metadata - constructs are not altered. + If no axes are provided then all axes are flipped. {{inplace: `bool`, optional}} - items: deprecated at version 3.0.0 - Use the *constructs* parameter instead. - {{i: deprecated at version 3.0.0}} kwargs: deprecated at version 3.0.0 @@ -15568,673 +14967,1279 @@ def transpose( :Returns: `Field` or `None` - The field construct with transposed data, or `None` if the + The field construct with flipped axes, or `None` if the operation was in-place. **Examples:** - >>> f.ndim - 3 - >>> g = f.transpose() - >>> g = f.transpose(['time', 1, 'dim2']) - >>> f.transpose(['time', -2, 'dim2'], inplace=True) + >>> g = f.flip() + >>> g = f.flip('time') + >>> g = f.flip(1) + >>> g = f.flip(['time', 1, 'dim2']) + >>> f.flip(['dim2'], inplace=True) """ - if not items: - _DEPRECATION_ERROR_KWARGS( - self, - "transpose", - {"items": items}, - "Use keyword 'constructs' instead.", - ) # pragma: no cover - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, "transpose", kwargs - ) # pragma: no cover + _DEPRECATION_ERROR_KWARGS(self, "flip", kwargs) # pragma: no cover - if axes is None: - iaxes = list(range(self.ndim - 1, -1, -1)) + if axes is None and not kwargs: + # Flip all the axes + axes = set(self.get_data_axes(default=())) + iaxes = list(range(self.ndim)) else: - data_axes = self.get_data_axes(default=()) if isinstance(axes, (str, int)): axes = (axes,) - axes2 = [self.domain_axis(x, key=True) for x in axes] - if sorted(axes2) != sorted(data_axes): - raise ValueError( - "Can't transpose {}: Bad axis specification: {!r}".format( - self.__class__.__name__, axes - ) - ) - - iaxes = [data_axes.index(axis) for axis in axes2] - - # Transpose the field's data array - return super().transpose(iaxes, constructs=constructs, inplace=inplace) - # @_inplace_enabled(default=False) - # def uncompress(self, inplace=False): - # '''Uncompress the construct. - # - # Compression saves space by identifying and removing unwanted - # missing data. Such compression techniques store the data more - # efficiently and result in no precision loss. - # - # Whether or not the construct is compressed does not alter its - # functionality nor external appearance. - # - # The following type of compression are available: - # - # * Ragged arrays for discrete sampling geometries (DSG). Three - # different types of ragged array representation are - # supported. - # - # .. - # - # * Compression by gathering. - # - # .. versionadded:: 3.0.6 - # - # .. seealso:: `cf.write`, `compress`, `flatten`, `varray` - # - # :Parameters: - # - # {{inplace: `bool`, optional}} - # - # :Returns: - # - # `Field` or `None` - # The uncompressed field construct, or `None` if the - # operation was in-place. - # - # **Examples:** - # - # TODO - # - # ''' - # f = _inplace_enabled_define_and_cleanup(self) - # super(Field, f).uncompress(inplace=True) - # - # for c in f.constructs.filter_by_data().values(): - # c.uncompress(inplace=True) - # - # return f + axes = set([self.domain_axis(axis, key=True) for axis in axes]) + + data_axes = self.get_data_axes(default=()) + iaxes = [ + data_axes.index(axis) + for axis in axes.intersection(self.get_data_axes()) + ] + + # Flip the requested axes in the field's data array + f = _inplace_enabled_define_and_cleanup(self) + super(Field, f).flip(iaxes, inplace=True) + + # Flip any constructs which span the flipped axes + for key, construct in f.constructs.filter_by_data(todict=True).items(): + construct_axes = f.get_data_axes(key) + construct_flip_axes = axes.intersection(construct_axes) + if construct_flip_axes: + iaxes = [ + construct_axes.index(axis) for axis in construct_flip_axes + ] + construct.flip(iaxes, inplace=True) + + return f @_deprecated_kwarg_check("i") @_inplace_enabled(default=False) - def unsqueeze(self, inplace=False, i=False, axes=None, **kwargs): - """Insert size 1 axes into the data array. + def anchor( + self, axis, value, inplace=False, dry_run=False, i=False, **kwargs + ): + """Roll a cyclic axis so that the given value lies in the first + coordinate cell. - All size 1 domain axes which are not spanned by the field - construct's data are inserted. + A unique axis is selected with the *axes* and *kwargs* parameters. - The axes are inserted into the slowest varying data array positions. + .. versionadded:: 1.0 - .. seealso:: `flatten`, `flip`, `insert_dimension`, `squeeze`, - `transpose` + .. seealso:: `axis`, `cyclic`, `iscyclic`, `period`, `roll` :Parameters: + axis: + The cyclic axis to be rolled, defined by that which would + be selected by passing the given axis description to a + call of the field construct's `domain_axis` method. For + example, for a value of ``'X'``, the domain axis construct + returned by ``f.domain_axis('X')`` is selected. + + value: + Anchor the dimension coordinate values for the selected + cyclic axis to the *value*. May be any numeric scalar + object that can be converted to a `Data` object (which + includes `numpy` and `Data` objects). If *value* has units + then they must be compatible with those of the dimension + coordinates, otherwise it is assumed to have the same + units as the dimension coordinates. The coordinate values + are transformed so that *value* is "equal to or just + before" the new first coordinate value. More specifically: + + * Increasing dimension coordinates with positive period, + P, are transformed so that *value* lies in the + half-open range (L-P, F], where F and L are the + transformed first and last coordinate values, + respectively. + + .. + + * Decreasing dimension coordinates with positive period, + P, are transformed so that *value* lies in the + half-open range (L+P, F], where F and L are the + transformed first and last coordinate values, + respectively. + + *Parameter example:* + If the original dimension coordinates are ``0, 5, ..., + 355`` (evenly spaced) and the period is ``360`` then + ``value=0`` implies transformed coordinates of ``0, 5, + ..., 355``; ``value=-12`` implies transformed + coordinates of ``-10, -5, ..., 345``; ``value=380`` + implies transformed coordinates of ``380, 385, ..., + 715``. + + *Parameter example:* + If the original dimension coordinates are ``355, 350, + ..., 0`` (evenly spaced) and the period is ``360`` then + ``value=355`` implies transformed coordinates of ``355, + 350, ..., 0``; ``value=0`` implies transformed + coordinates of ``0, -5, ..., -355``; ``value=392`` + implies transformed coordinates of ``390, 385, ..., + 30``. + {{inplace: `bool`, optional}} - {{i: deprecated at version 3.0.0}} + dry_run: `bool`, optional + Return a dictionary of parameters which describe the + anchoring process. The field is not changed, even if *i* + is True. - axes: deprecated at version 3.0.0 + {{i: deprecated at version 3.0.0}} kwargs: deprecated at version 3.0.0 :Returns: - `Field` or `None` - The field construct with size-1 axes inserted in its data, - or `None` if the operation was in-place. + `Field` + The rolled field. **Examples:** - >>> g = f.unsqueeze() - >>> f.unsqueeze(['dim2'], inplace=True) + >>> f.iscyclic('X') + True + >>> f.dimension_coordinate('X').data + TODO + >>> print(f.dimension_coordinate('X').array) + [ 0 45 90 135 180 225 270 315] + >>> g = f.anchor('X', 230) + >>> print(g.dimension_coordinate('X').array) + [270 315 0 45 90 135 180 225] + >>> g = f.anchor('X', cf.Data(590, 'degreesE')) + >>> print(g.dimension_coordinate('X').array) + [630 675 360 405 450 495 540 585] + >>> g = f.anchor('X', cf.Data(-490, 'degreesE')) + >>> print(g.dimension_coordinate('X').array) + [-450 -405 -720 -675 -630 -585 -540 -495] + + >>> f.iscyclic('X') + True + >>> f.dimension_coordinate('X').data + + >>> f.anchor('X', 10000).dimension_coordinate('X').data + + >>> d = f.anchor('X', 10000, dry_run=True) + >>> d + {'axis': 'domainaxis2', + 'nperiod': , + 'roll': 28} + >>> (f.roll(d['axis'], d['roll']).dimension_coordinate( + ... d['axis']) + d['nperiod']).data + """ if kwargs: _DEPRECATION_ERROR_KWARGS( - self, "unsqueeze", kwargs - ) # pragma: no cover - - if axes is not None: - _DEPRECATION_ERROR_KWARGS( - self, - "unsqueeze", - {"axes": axes}, - "All size one domain axes missing from the data are " - "inserted. Use method 'insert_dimension' to insert an " - "individual size one domain axis.", + self, "anchor", kwargs ) # pragma: no cover - f = _inplace_enabled_define_and_cleanup(self) - - size_1_axes = self.domain_axes(filter_by_size=(1,), todict=True) - for axis in set(size_1_axes).difference(self.get_data_axes()): - f.insert_dimension(axis, position=0, inplace=True) - - return f - - def auxiliary_coordinate( - self, - *identity, - default=ValueError(), - key=False, - item=False, - **filter_kwargs, - ): - """Return an auxiliary coordinate construct, or its key. - - .. versionadded:: 3.0.0 + axis = self.domain_axis(axis, key=True) - .. seealso:: `construct`, `auxiliary_coordinates`, `cell_measure`, - `cell_method`, `coordinate`, `coordinate_reference`, - `dimension_coordinate`, `domain_ancillary`, - `domain_axis`, `field_ancillary` + if dry_run: + f = self + else: + f = _inplace_enabled_define_and_cleanup(self) - :Parameters: + dim = f.dimension_coordinate(filter_by_axis=(axis,), default=None) + if dim is None: + raise ValueError( + "Can't shift non-cyclic " + f"{f.constructs.domain_axis_identity(axis)!r} axis" + ) - identity: optional - Select the auxiliary coordinate construct by one of: + period = dim.period() + if period is None: + raise ValueError(f"Cyclic {dim.identity()!r} axis has no period") - * `None`. This is the default, which selects the - auxiliary coordinate construct when there is only one - of them. + value = Data.asdata(value) + if not value.Units: + value = value.override_units(dim.Units) + elif not value.Units.equivalent(dim.Units): + raise ValueError( + f"Anchor value has incompatible units: {value.Units!r}" + ) - * The identity or key of an auxiliary coordinate - construct. + axis_size = f.domain_axes(todict=True)[axis].get_size() + if axis_size <= 1: + # Don't need to roll a size one axis + if dry_run: + return {"axis": axis, "roll": 0, "nperiod": 0} + else: + if inplace: + f = None - * The identity or key of a domain axis construct that is - spanned by a unique 1-d auxiliary coordinate - construct's data. + return f - * The position, in the field construct's data, of a - domain axis construct that is spanned by a unique 1-d - auxiliary coordinate construct's data. + c = dim.get_data(_fill_value=False) - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); a `Query` object - (e.g. ``cf.eq('longitude')``); or a compiled regular - expression (e.g. ``re.compile('^atmosphere')``) that - selects the relevant constructs whose identities match via - `re.search`. + if dim.increasing: + # Adjust value so it's in the range [c[0], c[0]+period) + n = ((c[0] - value) / period).ceil() + value1 = value + n * period - A construct has a number of identities, and is selected if - any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: + shift = axis_size - numpy_argmax((c - value1 >= 0).array) + if not dry_run: + f.roll(axis, shift, inplace=True) - >>> x.identities() - ['time', - 'long_name=Time', - 'foo=bar', - 'standard_name=time', - 'ncvar%t', - 'T'] + dim = f.dimension_coordinate(filter_by_axis=(axis,)) - A construct key may optionally have the ``'key%'`` - prefix. For example ``'auxiliarycoordinate2'`` and - ``'key%auxiliarycoordinate2'`` are both acceptable keys. + n = ((value - dim.data[0]) / period).ceil() + else: + # Adjust value so it's in the range (c[0]-period, c[0]] + n = ((c[0] - value) / period).floor() + value1 = value + n * period - A position of a domain axis construct in the field - construct's data is specified by an integer index. + shift = axis_size - numpy_argmax((value1 - c >= 0).array) - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. + if not dry_run: + f.roll(axis, shift, inplace=True) - *Parameter example:* - ``identity='Y'`` + # TODO should this call be like the one above? + dim = f.dimension_coordinate(filter_by_axis=(axis,)) - *Parameter example:* - ``identity='latitude'`` + n = ((value - dim.data[0]) / period).floor() - *Parameter example:* - ``identity='long_name=Latitude'`` + if dry_run: + return {"axis": axis, "roll": shift, "nperiod": n * period} - *Parameter example:* - ``identity='auxiliarycoordinate1'`` + if n: + np = n * period + dim += np + bounds = dim.get_bounds(None) + if bounds is not None: + bounds += np - *Parameter example:* - ``identity='domainaxis2'`` + return f - *Parameter example:* - ``identity='ncdim%y'`` + def argmax(self, axis=None): + """Return the indices of the maximum values along an axis. - *Parameter example:* - ``identity=0`` + If no axis is specified then the returned index locates the + maximum of the whole data. - key: `bool`, optional - If True then return the selected construct key. By - default the construct itself is returned. + .. seealso:: `argmin`, `where` - default: optional - Return the value of the *default* parameter if a construct - can not be found. If set to an `Exception` instance then - it will be raised instead. + :Parameters: :Returns: - `AuxiliaryCoordinate` or `str` - The selected auxiliary coordinate construct, or its key. - **Examples:** - TODO + >>> g = f.argmax('T') """ - c = self._select_construct( - ("auxiliary_coordinate",), - "auxiliary_coordinate", - identity, - key=key, - item=item, - default=None, - **filter_kwargs, - ) - if c is not None: - return c + print("This method is not ready for use.") + return - if identity: - da_key = self.domain_axis(*identity, key=True, default=None) - if da_key is not None: - return self._select_construct( - ("auxiliary_coordinate",), - "auxiliary_coordinate", - (), - key=key, - item=item, - default=default, - filter_by_axis=(da_key,), - axis_mode="exact", - ) + # Keep these commented lines for using with the future dask version + # + # standard_name = None + # + # if axis is not None: + # axis_key = self.domain_axis( + # axis, key=True, default=ValueError("TODO") + # ) + # axis = self.get_data_axes.index(axis_key) + # standard_name = self.domain_axis_identity( + # axis_key, strict=True, default=None + # ) + # + # indices = self.data.argmax(axis, unravel=True) + # + # if axis is None: + # return self[indices] + # + # # What if axis_key does not span array? + # out = self.subspace(**{axis_key: [0]}) + # out.squeeze(axis_key, inplace=True) + # + # for i in indices.ndindex(): + # out.data[i] = org.data[indices[i].datum()] + # + # for key, c in tuple( + # out.constructs.filter_by_type( + # "dimension_coordinate", + # "auxiliary_coordinate", + # "cell_measure", + # "domain_ancillary", + # "field_ancillary", + # ) + # .filter_by_axis("and", axis_key) + # .items() + # ): + # + # out.del_construct(key) + # + # if c.construct_type == ( + # "cell_measure", + # "domain_ancillary", + # "field_ancillary", + # ): + # continue + # + # aux = self._AuxiliaryCoordinate() + # aux.set_properties(c.properties()) + # + # c_data = c.get_data(None) + # if c_data is not None: + # data = Data.empty(indices.shape, dtype=c.dtype) + # for x in indices.ndindex(): + # data[x] = c_data[indices[x]] + # + # aux.set_data(data, copy=False) + # + # c_bounds_data = c.get_bounds_data(None) + # if c_bounds_data is not None: + # bounds = Data.empty( + # indices.shape + (c_bounds_data.shape[-1],), + # dtype=c_bounds_data.dtype, + # ) + # for x in indices.ndindex(): + # bounds[x] = c_bounds_data[indices[x]] + # + # aux.set_bounds( + # self._Bounds(data=bounds, copy=False), copy=False + # ) + # + # out.set_construct(aux, axes=out.get_data_axes(), copy=False) + # + # if standard_name: + # cm = CellMethod() + # cm.create(standard_name + ": maximum") + # + # return out - if default is None: - return default +# @_manage_log_level_via_verbosity +# def autocyclic(self, key=None, coord=None, verbose=None): +# """Set dimensions to be cyclic. +# +# A dimension is set to be cyclic if it has a unique longitude (or +# grid longitude) dimension coordinate construct with bounds and the +# first and last bounds values differ by 360 degrees (or an +# equivalent amount in other units). +# +# .. versionadded:: 1.0 +# +# .. seealso:: `cyclic`, `iscyclic`, `period` +# +# :Parameters: +# +# {{verbose: `int` or `str` or `None`, optional}} +# +# :Returns: +# +# `bool` +# +# """ +# if coord is None: +# key, coord = self.dimension_coordinate( +# "X", item=True, default=(None, None) +# ) +# if coord is None: +# return False +# elif not coord.X: +# return False +# +# bounds = coord.get_bounds(None) +# if bounds is None: +# self.cyclic(key, iscyclic=False) +# return False +# +# data = bounds.get_data(None, _fill_value=False) +# if data is None: +# self.cyclic(key, iscyclic=False) +# return False +# +# units = bounds.Units +# if units.islongitude: +# period = Data(360.0, units="degrees_east") +# elif units == _units_degrees: +# period = Data(360.0, units="degrees") +# else: +# self.cyclic(key, iscyclic=False) +# return False +# +# period.Units = data.Units +# +# if abs(data.last_element() - data.first_element()) != period.array: +# self.cyclic(key, iscyclic=False) +# return False +# +# self.cyclic(key, iscyclic=True, period=period) +# +# return True - return self._default( - default, - f"{self.__class__.__name__}.auxiliary_coordinate() can only " - "return a unique construct", - ) + @_deprecated_kwarg_check("i") + def squeeze(self, axes=None, inplace=False, i=False, **kwargs): + """Remove size 1 axes from the data. - def construct( - self, - *identity, - default=ValueError(), - key=False, - item=False, - **filter_kwargs, - ): - """Select a metadata construct by its identity. + By default all size 1 axes are removed, but particular size 1 axes + may be selected for removal. - .. seealso:: `del_construct`, `get_construct`, `has_construct`, - `set_construct` + Squeezed domain axis constructs are not removed from the metadata + constructs, nor from the domain of the field construct. + + .. seealso:: `domain_axis`, `flatten`, `insert_dimension`, `flip`, + `remove_axes`, `transpose`, `unsqueeze` :Parameters: - identity: optional - Select the construct. Must be + axes: (sequence of) `str` or `int`, optional + Select the domain axes to squeeze, defined by the domain + axes that would be selected by passing each given axis + description to a call of the field construct's + `domain_axis` method. For example, for a value of ``'X'``, + the domain axis construct returned by + ``f.domain_axis('X')`` is selected. - * The identity or key of a metadata construct. + If no axes are provided then all size 1 axes are squeezed. - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); a `Query` object - (e.g. ``cf.eq('longitude')``); or a compiled regular - expression (e.g. ``re.compile('^atmosphere')``) that - selects the relevant constructs whose identities match via - `re.search`. + {{inplace: `bool`, optional}} - A construct has a number of identities, and is selected if - any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: + {{i: deprecated at version 3.0.0}} - >>> x.identities() - ['time', - 'long_name=Time', - 'foo=bar', - 'standard_name=time', - 'ncvar%t', - 'T'] + kwargs: deprecated at version 3.0.0 - A construct key may optionally have the ``'key%'`` - prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. + :Returns: - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. + `Field` or `None` + The field construct with squeezed data, or `None` if the + operation was in-place. - *Parameter example:* - ``identity='T' + **Examples:** - *Parameter example:* - ``identity='measure:area'`` + >>> g = f.squeeze() + >>> g = f.squeeze('time') + >>> g = f.squeeze(1) + >>> g = f.squeeze(['time', 1, 'dim2']) + >>> f.squeeze(['dim2'], inplace=True) - *Parameter example:* - ``identity='cell_area'`` + """ + if kwargs: + _DEPRECATION_ERROR_KWARGS( + self, "squeeze", kwargs + ) # pragma: no cover - *Parameter example:* - ``identity='long_name=Cell Area'`` + data_axes = self.get_data_axes() - *Parameter example:* - ``identity='cellmeasure1'`` + if axes is None: + domain_axes = self.domain_axes(todict=True) + axes = [ + axis + for axis in data_axes + if domain_axes[axis].get_size(None) == 1 + ] + else: + if isinstance(axes, (str, int)): + axes = (axes,) - default: optional - Return the value of the *default* parameter if a construct - can not be found. + axes = [self.domain_axis(x, key=True) for x in axes] + axes = set(axes).intersection(data_axes) + + iaxes = [data_axes.index(axis) for axis in axes] - {{default Exception}} + # Squeeze the field's data array + return super().squeeze(iaxes, inplace=inplace) - If the *default* is `None`, or if *item* is True and - *default* is a 2-tuple of `Ǹone`s, then TODO + @_inplace_enabled(default=False) + def swapaxes(self, axis0, axis1, inplace=False, i=False): + """Interchange two axes of the data. - key: `bool`, optional - If True then return the selected construct key. By - default the construct itself is returned. + .. seealso:: `flatten`, `flip`, `insert_dimension`, `squeeze`, + `transpose` + + :Parameters: + + axis0, axis1: TODO + Select the axes to swap. Each axis is identified by its + original integer position. - item: TODO + {{inplace: `bool`, optional}} :Returns: - The selected coordinate construct, or its key. + `Field` or `None` + The field construct with data with swapped axis + positions. If the operation was in-place then `None` is + returned. **Examples:** - >>> f = cf.example_field(1) - >>> print(f) - Field: air_temperature (ncvar%ta) - --------------------------------- - Data : air_temperature(atmosphere_hybrid_height_coordinate(1), grid_latitude(10), grid_longitude(9)) K - Cell methods : grid_latitude(10): grid_longitude(9): mean where land (interval: 0.1 degrees) time(1): maximum - Field ancils : air_temperature standard_error(grid_latitude(10), grid_longitude(9)) = [[0.76, ..., 0.32]] K - Dimension coords: atmosphere_hybrid_height_coordinate(1) = [1.5] - : grid_latitude(10) = [2.2, ..., -1.76] degrees - : grid_longitude(9) = [-4.7, ..., -1.18] degrees - : time(1) = [2019-01-01 00:00:00] - Auxiliary coords: latitude(grid_latitude(10), grid_longitude(9)) = [[53.941, ..., 50.225]] degrees_N - : longitude(grid_longitude(9), grid_latitude(10)) = [[2.004, ..., 8.156]] degrees_E - : long_name=Grid latitude name(grid_latitude(10)) = [--, ..., b'kappa'] - Cell measures : measure:area(grid_longitude(9), grid_latitude(10)) = [[2391.9657, ..., 2392.6009]] km2 - Coord references: grid_mapping_name:rotated_latitude_longitude - : standard_name:atmosphere_hybrid_height_coordinate - Domain ancils : ncvar%a(atmosphere_hybrid_height_coordinate(1)) = [10.0] m - : ncvar%b(atmosphere_hybrid_height_coordinate(1)) = [20.0] - : surface_altitude(grid_latitude(10), grid_longitude(9)) = [[0.0, ..., 270.0]] m - - >>> f.construct('long_name=Grid latitude name') - - >>> f.construct('ncvar%a') - - >>> f.construct('measure:area') - - >>> f.construct('domainaxis0') - - >>> f.construct('height') - Traceback (most recent call last): - ... - ValueError: Can't return zero constructs - >>> f.construct('height', default=False) - False - >>> f.construct('height', default=TypeError("No height coordinates")) - Traceback (most recent call last): - ... - TypeError: No height coordinates + >>> f.shape + (1, 2, 3) + >>> f.swapaxes(1, 0).shape + (2, 1, 3) + >>> f.swapaxes(0, -1).shape + (3, 2, 1) + >>> f.swapaxes(1, 1).shape + (1, 2, 3) + >>> f.swapaxes(-1, -1).shape + (1, 2, 3) """ - return self._select_construct( - (), - "construct", - identity, - key=key, - item=item, - default=default, - **filter_kwargs, - ) + data_axes = self.get_data_axes(default=None) - def domain_ancillary( - self, - *identity, - default=ValueError(), - key=False, - item=False, - **filter_kwargs, - ): - """Return a domain ancillary construct, or its key. + da_key0 = self.domain_axis(axis0, key=True) + da_key1 = self.domain_axis(axis1, key=True) - .. versionadded:: 3.0.0 + if da_key0 not in data_axes: + raise ValueError( + "Can't swapaxes {}: Bad axis specification: {!r}".format( + self.__class__.__name__, axis0 + ) + ) - .. seealso:: `construct`, `auxiliary_coordinate`, `cell_measure`, - `cell_method`, `coordinate`, `coordinate_reference`, - `dimension_coordinate`, `domain_ancillaries`, - `domain_axis`, `field_ancillary` + if da_key1 not in data_axes: + raise ValueError( + "Can't swapaxes {}: Bad axis specification: {!r}".format( + self.__class__.__name__, axis1 + ) + ) - :Parameters: + axis0 = data_axes.index(da_key0) + axis1 = data_axes.index(da_key1) - identity: optional - Select the domain ancillary construct by one of: + f = _inplace_enabled_define_and_cleanup(self) + super(Field, f).swapaxes(axis0, axis1, inplace=True) - * `None`. This is the default, which selects the domain - ancillary construct when there is only one of them. + if data_axes is not None: + data_axes = list(data_axes) + data_axes[axis1], data_axes[axis0] = ( + data_axes[axis0], + data_axes[axis1], + ) + f.set_data_axes(data_axes) - * The identity or key of a domain ancillary construct. + return f - * The identity or key of a domain axis construct that is - spanned by a unique 1-d domain ancillary construct's data. + @_deprecated_kwarg_check("i") + def transpose( + self, + axes=None, + constructs=False, + inplace=False, + items=True, + i=False, + **kwargs, + ): + """Permute the axes of the data array. - * The position, in the field construct's data, of a domain - axis construct that is spanned by a unique 1-d domain - ancillary construct's data. + By default the order of the axes is reversed, but any ordering may + be specified by selecting the axes of the output in the required + order. - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); a `Query` object - (e.g. ``cf.eq('longitude')``); or a compiled regular - expression (e.g. ``re.compile('^atmosphere')``) that - selects the relevant constructs whose identities match via - `re.search`. + By default metadata constructs are not transposed, but they may be + if the *constructs* parameter is set. - A construct has a number of identities, and is selected if - any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: + .. seealso:: `domain_axis`, `flatten`, `insert_dimension`, `flip`, + `squeeze`, `unsqueeze` - >>> x.identities() - ['time', - 'long_name=Time', - 'foo=bar', - 'standard_name=time', - 'ncvar%t', - 'T'] + :Parameters: - A construct key may optionally have the ``'key%'`` - prefix. For example ``'domainancillary2'`` and - ``'key%domainancillary2'`` are both acceptable keys. + axes: (sequence of) `str` or `int`, optional + Select the domain axis order, defined by the domain axes + that would be selected by passing each given axis + description to a call of the field construct's + `domain_axis` method. For example, for a value of ``'X'``, + the domain axis construct returned by + ``f.domain_axis('X')`` is selected. - A position of a domain axis construct in the field - construct's data is specified by an integer index. + Each dimension of the field construct's data must be + provided, or if no axes are specified then the axis order + is reversed. - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. + constructs: `bool` + If True then metadata constructs are also transposed so + that their axes are in the same relative order as in the + transposed data array of the field. By default metadata + constructs are not altered. - *Parameter example:* - ``identity='Y'`` + {{inplace: `bool`, optional}} - *Parameter example:* - ``identity='latitude'`` + items: deprecated at version 3.0.0 + Use the *constructs* parameter instead. - *Parameter example:* - ``identity='long_name=Latitude'`` + {{i: deprecated at version 3.0.0}} - *Parameter example:* - ``identity='domainancillary1'`` + kwargs: deprecated at version 3.0.0 - *Parameter example:* - ``identity='ncdim%y'`` + :Returns: - *Parameter example:* - ``identity='domainaxis2'`` + `Field` or `None` + The field construct with transposed data, or `None` if the + operation was in-place. - *Parameter example:* - ``identity=0`` + **Examples:** - key: `bool`, optional - If True then return the selected construct key. By - default the construct itself is returned. + >>> f.ndim + 3 + >>> g = f.transpose() + >>> g = f.transpose(['time', 1, 'dim2']) + >>> f.transpose(['time', -2, 'dim2'], inplace=True) - default: optional - Return the value of the *default* parameter if a construct - can not be found. If set to an `Exception` instance then - it will be raised instead. + """ + if not items: + _DEPRECATION_ERROR_KWARGS( + self, + "transpose", + {"items": items}, + "Use keyword 'constructs' instead.", + ) # pragma: no cover - :Returns: + if kwargs: + _DEPRECATION_ERROR_KWARGS( + self, "transpose", kwargs + ) # pragma: no cover - `DomainAncillary` or `str` - The selected domain ancillary coordinate construct, or its - key. + if axes is None: + iaxes = list(range(self.ndim - 1, -1, -1)) + else: + data_axes = self.get_data_axes(default=()) + if isinstance(axes, (str, int)): + axes = (axes,) + axes2 = [self.domain_axis(x, key=True) for x in axes] + if sorted(axes2) != sorted(data_axes): + raise ValueError( + "Can't transpose {}: Bad axis specification: {!r}".format( + self.__class__.__name__, axes + ) + ) - **Examples:** + iaxes = [data_axes.index(axis) for axis in axes2] - TODO + # Transpose the field's data array + return super().transpose(iaxes, constructs=constructs, inplace=inplace) + + # @_inplace_enabled(default=False) + # def uncompress(self, inplace=False): + # '''Uncompress the construct. + # + # Compression saves space by identifying and removing unwanted + # missing data. Such compression techniques store the data more + # efficiently and result in no precision loss. + # + # Whether or not the construct is compressed does not alter its + # functionality nor external appearance. + # + # The following type of compression are available: + # + # * Ragged arrays for discrete sampling geometries (DSG). Three + # different types of ragged array representation are + # supported. + # + # .. + # + # * Compression by gathering. + # + # .. versionadded:: 3.0.6 + # + # .. seealso:: `cf.write`, `compress`, `flatten`, `varray` + # + # :Parameters: + # + # {{inplace: `bool`, optional}} + # + # :Returns: + # + # `Field` or `None` + # The uncompressed field construct, or `None` if the + # operation was in-place. + # + # **Examples:** + # + # TODO + # + # ''' + # f = _inplace_enabled_define_and_cleanup(self) + # super(Field, f).uncompress(inplace=True) + # + # for c in f.constructs.filter_by_data().values(): + # c.uncompress(inplace=True) + # + # return f - """ - return self._select_construct( - ("domain_ancillary",), - "domain_ancillary", - identity, - key=key, - default=default, - item=item, - **filter_kwargs, - ) + @_deprecated_kwarg_check("i") + @_inplace_enabled(default=False) + def unsqueeze(self, inplace=False, i=False, axes=None, **kwargs): + """Insert size 1 axes into the data array. - def cell_measure( - self, - *identity, - default=ValueError(), - key=False, - item=False, - **filter_kwargs, - ): - """Select a cell measure construct by its identity. + All size 1 domain axes which are not spanned by the field + construct's data are inserted. - .. versionadded:: 3.0.0 + The axes are inserted into the slowest varying data array positions. - .. seealso:: `construct`, `auxiliary_coordinate`, `cell_measures`, - `cell_method`, `coordinate`, `coordinate_reference`, - `dimension_coordinate`, `domain_ancillary`, - `domain_axis`, `field_ancillary` + .. seealso:: `flatten`, `flip`, `insert_dimension`, `squeeze`, + `transpose` :Parameters: - identity: optional - Select the cell measure construct by: - - * `None`. This is the default, which selects the cell - measure construct when there is only one of them. - - * The identity or key of a cell measure construct. - - * The identity or key of a domain axis construct that is - spanned by a unique 1-d cell measure construct's data. - - * The position, in the field construct's data, of a - domain axis construct that is spanned by a unique 1-d - cell measure construct's data. - - A construct identity is specified by a string - (e.g. ``'long_name=Cell Area', ``'ncvar%areacello'``, - etc.); a `Query` object (e.g. ``cf.eq('measure:area')``); - or a compiled regular expression - (e.g. ``re.compile('^atmosphere')``) that selects the - relevant constructs whose identities match via - `re.search`. - - Each construct has a number of identities, and is selected - if any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: - - >>> x.identities() - ['time', - 'long_name=Time', - 'foo=bar', - 'standard_name=time', - 'ncvar%t', - 'T'] - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'cellmeasure2'`` and - ``'key%cellmeasure2'`` are both acceptable keys. - - A position of a domain axis construct in the field - construct's data is specified by an integer index. - - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. + {{inplace: `bool`, optional}} - *Parameter example:* - ``identity='measure:area'`` + {{i: deprecated at version 3.0.0}} - *Parameter example:* - ``identity='cell_area'`` + axes: deprecated at version 3.0.0 - *Parameter example:* - ``identity='long_name=Cell Area'`` + kwargs: deprecated at version 3.0.0 - *Parameter example:* - ``identity='cellmeasure1'`` + :Returns: - *Parameter example:* - ``identity='domainaxis2'`` + `Field` or `None` + The field construct with size-1 axes inserted in its data, + or `None` if the operation was in-place. - *Parameter example:* - ``identity=0`` + **Examples:** - key: `bool`, optional - If True then return the selected construct key. By - default the construct itself is returned. + >>> g = f.unsqueeze() + >>> f.unsqueeze(['dim2'], inplace=True) - default: optional - Return the value of the *default* parameter if a construct - can not be found. If set to an `Exception` instance then - it will be raised instead. + """ + if kwargs: + _DEPRECATION_ERROR_KWARGS( + self, "unsqueeze", kwargs + ) # pragma: no cover - :Returns: + if axes is not None: + _DEPRECATION_ERROR_KWARGS( + self, + "unsqueeze", + {"axes": axes}, + "All size one domain axes missing from the data are " + "inserted. Use method 'insert_dimension' to insert an " + "individual size one domain axis.", + ) # pragma: no cover - `CellMeasure`or `str` - The selected cell measure construct, or its key. + f = _inplace_enabled_define_and_cleanup(self) - **Examples:** + size_1_axes = self.domain_axes(filter_by_size=(1,), todict=True) + for axis in set(size_1_axes).difference(self.get_data_axes()): + f.insert_dimension(axis, position=0, inplace=True) - TODO + return f - """ - return self._select_construct( - ("cell_measure",), - "cell_meausure", - identity, - key=key, - default=default, - item=item, - **filter_kwargs, - ) +# def auxiliary_coordinate( +# self, +# *identity, +# default=ValueError(), +# key=False, +# item=False, +# **filter_kwargs, +# ): +# """Return an auxiliary coordinate construct, or its key. +# +# .. versionadded:: 3.0.0 +# +# .. seealso:: `construct`, `auxiliary_coordinates`, `cell_measure`, +# `cell_method`, `coordinate`, `coordinate_reference`, +# `dimension_coordinate`, `domain_ancillary`, +# `domain_axis`, `field_ancillary` +# +# :Parameters: +# +# identity: optional +# Select the auxiliary coordinate construct by one of: +# +# * `None`. This is the default, which selects the +# auxiliary coordinate construct when there is only one +# of them. +# +# * The identity or key of an auxiliary coordinate +# construct. +# +# * The identity or key of a domain axis construct that is +# spanned by a unique 1-d auxiliary coordinate +# construct's data. +# +# * The position, in the field construct's data, of a +# domain axis construct that is spanned by a unique 1-d +# auxiliary coordinate construct's data. +# +# A construct identity is specified by a string +# (e.g. ``'latitude'``, ``'long_name=time'``, +# ``'ncvar%lat'``, etc.); a `Query` object +# (e.g. ``cf.eq('longitude')``); or a compiled regular +# expression (e.g. ``re.compile('^atmosphere')``) that +# selects the relevant constructs whose identities match via +# `re.search`. +# +# A construct has a number of identities, and is selected if +# any of them match any of those provided. A construct's +# identities are those returned by its `!identities` +# method. In the following example, the construct ``x`` has +# six identities: +# +# >>> x.identities() +# ['time', +# 'long_name=Time', +# 'foo=bar', +# 'standard_name=time', +# 'ncvar%t', +# 'T'] +# +# A construct key may optionally have the ``'key%'`` +# prefix. For example ``'auxiliarycoordinate2'`` and +# ``'key%auxiliarycoordinate2'`` are both acceptable keys. +# +# A position of a domain axis construct in the field +# construct's data is specified by an integer index. +# +# Note that in the output of a `print` call or `!dump` +# method, a construct is always described by one of its +# identities, and so this description may always be used as +# an *identity* argument. +# +# *Parameter example:* +# ``identity='Y'`` +# +# *Parameter example:* +# ``identity='latitude'`` +# +# *Parameter example:* +# ``identity='long_name=Latitude'`` +# +# *Parameter example:* +# ``identity='auxiliarycoordinate1'`` +# +# *Parameter example:* +# ``identity='domainaxis2'`` +# +# *Parameter example:* +# ``identity='ncdim%y'`` +# +# *Parameter example:* +# ``identity=0`` +# +# key: `bool`, optional +# If True then return the selected construct key. By +# default the construct itself is returned. +# +# default: optional +# Return the value of the *default* parameter if a construct +# can not be found. If set to an `Exception` instance then +# it will be raised instead. +# +# :Returns: +# +# `AuxiliaryCoordinate` or `str` +# The selected auxiliary coordinate construct, or its key. +# +# **Examples:** +# +# TODO +# +# """ +# return self._filter_interface( +# ("auxiliary_coordinate",), +# "auxiliary_coordinate", +# identity, +# construct=True, +# key=key, +# item=item, +# default=default, +# **filter_kwargs, +# ) +# if c is not None: +# return c +# +# if identity: +# da_key = self.domain_axis(*identity, key=True, default=None) +# if da_key is not None: +# return self._select_construct( +# ("auxiliary_coordinate",), +# "auxiliary_coordinate", +# (), +# construct=True, +# key=key, +# item=item, +# default=default, +# filter_by_axis=(da_key,), +# axis_mode="exact", +# ) +# +# if default is None: +# return default +# +# return self._default( +# default, +# f"{self.__class__.__name__}.auxiliary_coordinate() can only " +# "return a unique construct", +# ) +# +# def construct( +# self, +# *identity, +# default=ValueError(), +# key=False, +# item=False, +# **filter_kwargs, +# ): +# """Select a metadata construct by its identity. +# +# .. seealso:: `del_construct`, `get_construct`, `has_construct`, +# `set_construct` +# +# :Parameters: +# +# identity: optional +# Select the construct. Must be +# +# * The identity or key of a metadata construct. +# +# A construct identity is specified by a string +# (e.g. ``'latitude'``, ``'long_name=time'``, +# ``'ncvar%lat'``, etc.); a `Query` object +# (e.g. ``cf.eq('longitude')``); or a compiled regular +# expression (e.g. ``re.compile('^atmosphere')``) that +# selects the relevant constructs whose identities match via +# `re.search`. +# +# A construct has a number of identities, and is selected if +# any of them match any of those provided. A construct's +# identities are those returned by its `!identities` +# method. In the following example, the construct ``x`` has +# six identities: +# +# >>> x.identities() +# ['time', +# 'long_name=Time', +# 'foo=bar', +# 'standard_name=time', +# 'ncvar%t', +# 'T'] +# +# A construct key may optionally have the ``'key%'`` +# prefix. For example ``'dimensioncoordinate2'`` and +# ``'key%dimensioncoordinate2'`` are both acceptable keys. +# +# Note that in the output of a `print` call or `!dump` +# method, a construct is always described by one of its +# identities, and so this description may always be used as +# an *identity* argument. +# +# *Parameter example:* +# ``identity='T' +# +# *Parameter example:* +# ``identity='measure:area'`` +# +# *Parameter example:* +# ``identity='cell_area'`` +# +# *Parameter example:* +# ``identity='long_name=Cell Area'`` +# +# *Parameter example:* +# ``identity='cellmeasure1'`` +# +# default: optional +# Return the value of the *default* parameter if a construct +# can not be found. +# +# {{default Exception}} +# +# If the *default* is `None`, or if *item* is True and +# *default* is a 2-tuple of `Ǹone`s, then TODO +# +# key: `bool`, optional +# If True then return the selected construct key. By +# default the construct itself is returned. +# +# item: TODO +# +# :Returns: +# +# The selected coordinate construct, or its key. +# +# **Examples:** +# +# >>> f = cf.example_field(1) +# >>> print(f) +# Field: air_temperature (ncvar%ta) +# --------------------------------- +# Data : air_temperature(atmosphere_hybrid_height_coordinate(1), grid_latitude(10), grid_longitude(9)) K +# Cell methods : grid_latitude(10): grid_longitude(9): mean where land (interval: 0.1 degrees) time(1): maximum +# Field ancils : air_temperature standard_error(grid_latitude(10), grid_longitude(9)) = [[0.76, ..., 0.32]] K +# Dimension coords: atmosphere_hybrid_height_coordinate(1) = [1.5] +# : grid_latitude(10) = [2.2, ..., -1.76] degrees +# : grid_longitude(9) = [-4.7, ..., -1.18] degrees +# : time(1) = [2019-01-01 00:00:00] +# Auxiliary coords: latitude(grid_latitude(10), grid_longitude(9)) = [[53.941, ..., 50.225]] degrees_N +# : longitude(grid_longitude(9), grid_latitude(10)) = [[2.004, ..., 8.156]] degrees_E +# : long_name=Grid latitude name(grid_latitude(10)) = [--, ..., b'kappa'] +# Cell measures : measure:area(grid_longitude(9), grid_latitude(10)) = [[2391.9657, ..., 2392.6009]] km2 +# Coord references: grid_mapping_name:rotated_latitude_longitude +# : standard_name:atmosphere_hybrid_height_coordinate +# Domain ancils : ncvar%a(atmosphere_hybrid_height_coordinate(1)) = [10.0] m +# : ncvar%b(atmosphere_hybrid_height_coordinate(1)) = [20.0] +# : surface_altitude(grid_latitude(10), grid_longitude(9)) = [[0.0, ..., 270.0]] m +# +# >>> f.construct('long_name=Grid latitude name') +# +# >>> f.construct('ncvar%a') +# +# >>> f.construct('measure:area') +# +# >>> f.construct('domainaxis0') +# +# >>> f.construct('height') +# Traceback (most recent call last): +# ... +# ValueError: Can't return zero constructs +# >>> f.construct('height', default=False) +# False +# >>> f.construct('height', default=TypeError("No height coordinates")) +# Traceback (most recent call last): +# ... +# TypeError: No height coordinates +# +# """ +# return self._filter_interface( +# (), +# "construct", +# identity, +# construct=True, +# key=key, +# item=item, +# default=default, +# **filter_kwargs, +# ) +# +# def domain_ancillary( +# self, +# *identity, +# default=ValueError(), +# key=False, +# item=False, +# **filter_kwargs, +# ): +# """Return a domain ancillary construct, or its key. +# +# .. versionadded:: 3.0.0 +# +# .. seealso:: `construct`, `auxiliary_coordinate`, `cell_measure`, +# `cell_method`, `coordinate`, `coordinate_reference`, +# `dimension_coordinate`, `domain_ancillaries`, +# `domain_axis`, `field_ancillary` +# +# :Parameters: +# +# identity: optional +# Select the domain ancillary construct by one of: +# +# * `None`. This is the default, which selects the domain +# ancillary construct when there is only one of them. +# +# * The identity or key of a domain ancillary construct. +# +# * The identity or key of a domain axis construct that is +# spanned by a unique 1-d domain ancillary construct's data. +# +# * The position, in the field construct's data, of a domain +# axis construct that is spanned by a unique 1-d domain +# ancillary construct's data. +# +# A construct identity is specified by a string +# (e.g. ``'latitude'``, ``'long_name=time'``, +# ``'ncvar%lat'``, etc.); a `Query` object +# (e.g. ``cf.eq('longitude')``); or a compiled regular +# expression (e.g. ``re.compile('^atmosphere')``) that +# selects the relevant constructs whose identities match via +# `re.search`. +# +# A construct has a number of identities, and is selected if +# any of them match any of those provided. A construct's +# identities are those returned by its `!identities` +# method. In the following example, the construct ``x`` has +# six identities: +# +# >>> x.identities() +# ['time', +# 'long_name=Time', +# 'foo=bar', +# 'standard_name=time', +# 'ncvar%t', +# 'T'] +# +# A construct key may optionally have the ``'key%'`` +# prefix. For example ``'domainancillary2'`` and +# ``'key%domainancillary2'`` are both acceptable keys. +# +# A position of a domain axis construct in the field +# construct's data is specified by an integer index. +# +# Note that in the output of a `print` call or `!dump` +# method, a construct is always described by one of its +# identities, and so this description may always be used as +# an *identity* argument. +# +# *Parameter example:* +# ``identity='Y'`` +# +# *Parameter example:* +# ``identity='latitude'`` +# +# *Parameter example:* +# ``identity='long_name=Latitude'`` +# +# *Parameter example:* +# ``identity='domainancillary1'`` +# +# *Parameter example:* +# ``identity='ncdim%y'`` +# +# *Parameter example:* +# ``identity='domainaxis2'`` +# +# *Parameter example:* +# ``identity=0`` +# +# key: `bool`, optional +# If True then return the selected construct key. By +# default the construct itself is returned. +# +# default: optional +# Return the value of the *default* parameter if a construct +# can not be found. If set to an `Exception` instance then +# it will be raised instead. +# +# :Returns: +# +# `DomainAncillary` or `str` +# The selected domain ancillary coordinate construct, or its +# key. +# +# **Examples:** +# +# TODO +# +# """ +# return self._filter_interface( +# ("domain_ancillary",), +# "domain_ancillary", +# identity, +# construct=True, +# key=key, +# default=default, +# item=item, +# **filter_kwargs, +# ) +# +# def cell_measure( +# self, +# *identity, +# default=ValueError(), +# key=False, +# item=False, +# **filter_kwargs, +# ): +# """Select a cell measure construct by its identity. +# +# .. versionadded:: 3.0.0 +# +# .. seealso:: `construct`, `auxiliary_coordinate`, `cell_measures`, +# `cell_method`, `coordinate`, `coordinate_reference`, +# `dimension_coordinate`, `domain_ancillary`, +# `domain_axis`, `field_ancillary` +# +# :Parameters: +# +# identity: optional +# Select the cell measure construct by: +# +# * `None`. This is the default, which selects the cell +# measure construct when there is only one of them. +# +# * The identity or key of a cell measure construct. +# +# * The identity or key of a domain axis construct that is +# spanned by a unique 1-d cell measure construct's data. +# +# * The position, in the field construct's data, of a +# domain axis construct that is spanned by a unique 1-d +# cell measure construct's data. +# +# A construct identity is specified by a string +# (e.g. ``'long_name=Cell Area', ``'ncvar%areacello'``, +# etc.); a `Query` object (e.g. ``cf.eq('measure:area')``); +# or a compiled regular expression +# (e.g. ``re.compile('^atmosphere')``) that selects the +# relevant constructs whose identities match via +# `re.search`. +# +# Each construct has a number of identities, and is selected +# if any of them match any of those provided. A construct's +# identities are those returned by its `!identities` +# method. In the following example, the construct ``x`` has +# six identities: +# +# >>> x.identities() +# ['time', +# 'long_name=Time', +# 'foo=bar', +# 'standard_name=time', +# 'ncvar%t', +# 'T'] +# +# A construct key may optionally have the ``'key%'`` +# prefix. For example ``'cellmeasure2'`` and +# ``'key%cellmeasure2'`` are both acceptable keys. +# +# A position of a domain axis construct in the field +# construct's data is specified by an integer index. +# +# Note that in the output of a `print` call or `!dump` +# method, a construct is always described by one of its +# identities, and so this description may always be used as +# an *identity* argument. +# +# *Parameter example:* +# ``identity='measure:area'`` +# +# *Parameter example:* +# ``identity='cell_area'`` +# +# *Parameter example:* +# ``identity='long_name=Cell Area'`` +# +# *Parameter example:* +# ``identity='cellmeasure1'`` +# +# *Parameter example:* +# ``identity='domainaxis2'`` +# +# *Parameter example:* +# ``identity=0`` +# +# key: `bool`, optional +# If True then return the selected construct key. By +# default the construct itself is returned. +# +# default: optional +# Return the value of the *default* parameter if a construct +# can not be found. If set to an `Exception` instance then +# it will be raised instead. +# +# :Returns: +# +# `CellMeasure`or `str` +# The selected cell measure construct, or its key. +# +# **Examples:** +# +# TODO +# +# """ +# return self._filter_interface( +# ("cell_measure",), +# "cell_meausure", +# identity, +# construct=True, +# key=key, +# default=default, +# item=item, +# **filter_kwargs, +# ) def cell_method( self, @@ -16327,10 +16332,11 @@ def cell_method( TODO """ - c = self._select_construct( + c = self._filter_interface( ("cell_method",), "cell_method", identity, + construct=True, key=key, item=item, default=None, @@ -16367,255 +16373,258 @@ def cell_method( "return a unique construct", ) - def coordinate( - self, - *identity, - default=ValueError(), - key=False, - item=False, - **filter_kwargs, - ): - """Return a dimension or auxiliary coordinate construct, or its - key. - - .. versionadded:: 3.0.0 - - .. seealso:: `construct`, `auxiliary_coordinate`, `coordinates`, - `dimension_coordinate` - - :Parameters: - - identity: optional - Select the dimension coordinate construct by one of: - - * `None`. This is the default, which selects the - coordinate construct when there is only one of them. - - * The identity or key of a dimension coordinate - construct. - - * The identity or key of a domain axis construct that is - spanned by a unique 1-d coordinate construct's data. - - * The position, in the field construct's data, of a - domain axis construct that is spanned by a unique 1-d - coordinate construct's data. - - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); a `Query` object - (e.g. ``cf.eq('longitude')``); or a compiled regular - expression (e.g. ``re.compile('^atmosphere')``) that - selects the relevant constructs whose identities match via - `re.search`. - - A construct has a number of identities, and is selected if - any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: - - >>> x.identities() - ['time', - 'long_name=Time', - 'foo=bar', - 'standard_name=time', - 'ncvar%t', - 'T'] - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'auxiliarycoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. - - A position of a domain axis construct in the field - construct's data is specified by an integer index. - - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. - - *Parameter example:* - ``identity='Y'`` - - *Parameter example:* - ``identity='latitude'`` - - *Parameter example:* - ``identity='long_name=Latitude'`` - - *Parameter example:* - ``identity='dimensioncoordinate1'`` - - *Parameter example:* - ``identity='domainaxis2'`` - - *Parameter example:* - ``identity='ncdim%y'`` - - key: `bool`, optional - If True then return the selected construct key. By - default the construct itself is returned. - - default: optional - Return the value of the *default* parameter if a construct - can not be found. If set to an `Exception` instance then - it will be raised instead. - - :Returns: - - `DimensionCoordinate` or `AuxiliaryCoordinate` or `str` - The selected dimension or auxiliary coordinate construct, - or its key. - - **Examples:** - - TODO - - """ - c = self._select_construct( - ("dimension_coordinate", "auxiliary_coordinate"), - "coordinate", - identity, - key=key, - item=item, - default=None, - **filter_kwargs, - ) - if c is not None: - return c - - if identity: - da_key = self.domain_axis(*identity, key=True, default=None) - if da_key is not None: - return self._select_construct( - ("dimension_coordinate", "auxiliary_coordinate"), - "coordinate", - (), - key=key, - item=item, - default=default, - filter_by_axis=(da_key,), - axis_mode="exact", - ) - - if default is None: - return default - - return self._default( - default, - f"{self.__class__.__name__}.coordinate() can only " - "return a unique construct", - ) - - def coordinate_reference( - self, - *identity, - default=ValueError(), - key=False, - item=False, - **filter_kwargs, - ): - """Return a coordinate reference construct, or its key. - - .. versionadded:: 3.0.0 - - .. seealso:: `construct`, `auxiliary_coordinate`, `cell_measure`, - `cell_method`, `coordinate`, `coordinate_references`, - `dimension_coordinate`, `domain_ancillary`, - `domain_axis`, `field_ancillary` - - :Parameters: - - identity: optional - Select the coordinate reference construct by one of: - - * `None`. This is the default, which selects the - coordinate reference construct when there is only one - of them. - - * The identity or key of a coordinate reference - construct. - - A construct identity is specified by a string - (e.g. ``'grid_mapping_name:latitude_longitude'``, - ``'latitude_longitude'``, ``'ncvar%lat_lon'``, etc.); a - `Query` object (e.g. ``cf.eq('latitude_longitude')``); or - a compiled regular expression - (e.g. ``re.compile('^atmosphere')``) that selects the - relevant constructs whose identities match via - `re.search`. - - Each construct has a number of identities, and is selected - if any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - two identities: - - >>> x.identities() - ['grid_mapping_name:latitude_longitude', 'ncvar%lat_lon'] - - A identity's prefix of ``'grid_mapping_name:'`` or - ``'standard_name:'`` may be omitted - (e.g. ``'standard_name:atmosphere_hybrid_height_coordinate'`` - and ``'atmosphere_hybrid_height_coordinate'`` are both - acceptable identities). - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'coordinatereference2'`` and - ``'key%coordinatereference2'`` are both acceptable keys. - - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. - - *Parameter example:* - ``identity='standard_name:atmosphere_hybrid_height_coordinate'`` - - *Parameter example:* - ``identity='grid_mapping_name:rotated_latitude_longitude'`` - - *Parameter example:* - ``identity='transverse_mercator'`` - - *Parameter example:* - ``identity='coordinatereference1'`` - - *Parameter example:* - ``identity='key%coordinatereference1'`` - - *Parameter example:* - ``identity='ncvar%lat_lon'`` - - key: `bool`, optional - If True then return the selected construct key. By - default the construct itself is returned. - - default: optional - Return the value of the *default* parameter if a construct - can not be found. If set to an `Exception` instance then - it will be raised instead. - - :Returns: - - `CoordinateReference` or `str` - The selected coordinate reference construct, or its key. - - **Examples:** - - TODO - - """ - return self._select_construct( - ("coordinate_reference",), - "coordinate_reference", - identity, - key=key, - default=default, - item=item, - **filter_kwargs, - ) +# def coordinate( +# self, +# *identity, +# default=ValueError(), +# key=False, +# item=False, +# **filter_kwargs, +# ): +# """Return a dimension or auxiliary coordinate construct, or its +# key. +# +# .. versionadded:: 3.0.0 +# +# .. seealso:: `construct`, `auxiliary_coordinate`, `coordinates`, +# `dimension_coordinate` +# +# :Parameters: +# +# identity: optional +# Select the dimension coordinate construct by one of: +# +# * `None`. This is the default, which selects the +# coordinate construct when there is only one of them. +# +# * The identity or key of a dimension coordinate +# construct. +# +# * The identity or key of a domain axis construct that is +# spanned by a unique 1-d coordinate construct's data. +# +# * The position, in the field construct's data, of a +# domain axis construct that is spanned by a unique 1-d +# coordinate construct's data. +# +# A construct identity is specified by a string +# (e.g. ``'latitude'``, ``'long_name=time'``, +# ``'ncvar%lat'``, etc.); a `Query` object +# (e.g. ``cf.eq('longitude')``); or a compiled regular +# expression (e.g. ``re.compile('^atmosphere')``) that +# selects the relevant constructs whose identities match via +# `re.search`. +# +# A construct has a number of identities, and is selected if +# any of them match any of those provided. A construct's +# identities are those returned by its `!identities` +# method. In the following example, the construct ``x`` has +# six identities: +# +# >>> x.identities() +# ['time', +# 'long_name=Time', +# 'foo=bar', +# 'standard_name=time', +# 'ncvar%t', +# 'T'] +# +# A construct key may optionally have the ``'key%'`` +# prefix. For example ``'auxiliarycoordinate2'`` and +# ``'key%dimensioncoordinate2'`` are both acceptable keys. +# +# A position of a domain axis construct in the field +# construct's data is specified by an integer index. +# +# Note that in the output of a `print` call or `!dump` +# method, a construct is always described by one of its +# identities, and so this description may always be used as +# an *identity* argument. +# +# *Parameter example:* +# ``identity='Y'`` +# +# *Parameter example:* +# ``identity='latitude'`` +# +# *Parameter example:* +# ``identity='long_name=Latitude'`` +# +# *Parameter example:* +# ``identity='dimensioncoordinate1'`` +# +# *Parameter example:* +# ``identity='domainaxis2'`` +# +# *Parameter example:* +# ``identity='ncdim%y'`` +# +# key: `bool`, optional +# If True then return the selected construct key. By +# default the construct itself is returned. +# +# default: optional +# Return the value of the *default* parameter if a construct +# can not be found. If set to an `Exception` instance then +# it will be raised instead. +# +# :Returns: +# +# `DimensionCoordinate` or `AuxiliaryCoordinate` or `str` +# The selected dimension or auxiliary coordinate construct, +# or its key. +# +# **Examples:** +# +# TODO +# +# """ +# return self._filter_interface( +# ("dimension_coordinate", "auxiliary_coordinate"), +# "coordinate", +# identity, +# construct=True, +# key=key, +# item=item, +# default=default, +# **filter_kwargs, +# ) +# if c is not None: +# return c +# +# if identity: +# da_key = self.domain_axis(*identity, key=True, default=None) +# if da_key is not None: +# return self._filter_interface( +# ("dimension_coordinate", "auxiliary_coordinate"), +# "coordinate", +# (), +# construct=True, +# key=key, +# item=item, +# default=default, +# filter_by_axis=(da_key,), +# axis_mode="exact", +# ) +# +# if default is None: +# return default +# +# return self._default( +# default, +# f"{self.__class__.__name__}.coordinate() can only " +# "return a unique construct", +# ) +# +# def coordinate_reference( +# self, +# *identity, +# default=ValueError(), +# key=False, +# item=False, +# **filter_kwargs, +# ): +# """Return a coordinate reference construct, or its key. +# +# .. versionadded:: 3.0.0 +# +# .. seealso:: `construct`, `auxiliary_coordinate`, `cell_measure`, +# `cell_method`, `coordinate`, `coordinate_references`, +# `dimension_coordinate`, `domain_ancillary`, +# `domain_axis`, `field_ancillary` +# +# :Parameters: +# +# identity: optional +# Select the coordinate reference construct by one of: +# +# * `None`. This is the default, which selects the +# coordinate reference construct when there is only one +# of them. +# +# * The identity or key of a coordinate reference +# construct. +# +# A construct identity is specified by a string +# (e.g. ``'grid_mapping_name:latitude_longitude'``, +# ``'latitude_longitude'``, ``'ncvar%lat_lon'``, etc.); a +# `Query` object (e.g. ``cf.eq('latitude_longitude')``); or +# a compiled regular expression +# (e.g. ``re.compile('^atmosphere')``) that selects the +# relevant constructs whose identities match via +# `re.search`. +# +# Each construct has a number of identities, and is selected +# if any of them match any of those provided. A construct's +# identities are those returned by its `!identities` +# method. In the following example, the construct ``x`` has +# two identities: +# +# >>> x.identities() +# ['grid_mapping_name:latitude_longitude', 'ncvar%lat_lon'] +# +# A identity's prefix of ``'grid_mapping_name:'`` or +# ``'standard_name:'`` may be omitted +# (e.g. ``'standard_name:atmosphere_hybrid_height_coordinate'`` +# and ``'atmosphere_hybrid_height_coordinate'`` are both +# acceptable identities). +# +# A construct key may optionally have the ``'key%'`` +# prefix. For example ``'coordinatereference2'`` and +# ``'key%coordinatereference2'`` are both acceptable keys. +# +# Note that in the output of a `print` call or `!dump` +# method, a construct is always described by one of its +# identities, and so this description may always be used as +# an *identity* argument. +# +# *Parameter example:* +# ``identity='standard_name:atmosphere_hybrid_height_coordinate'`` +# +# *Parameter example:* +# ``identity='grid_mapping_name:rotated_latitude_longitude'`` +# +# *Parameter example:* +# ``identity='transverse_mercator'`` +# +# *Parameter example:* +# ``identity='coordinatereference1'`` +# +# *Parameter example:* +# ``identity='key%coordinatereference1'`` +# +# *Parameter example:* +# ``identity='ncvar%lat_lon'`` +# +# key: `bool`, optional +# If True then return the selected construct key. By +# default the construct itself is returned. +# +# default: optional +# Return the value of the *default* parameter if a construct +# can not be found. If set to an `Exception` instance then +# it will be raised instead. +# +# :Returns: +# +# `CoordinateReference` or `str` +# The selected coordinate reference construct, or its key. +# +# **Examples:** +# +# TODO +# +# """ +# return self._filter_interface( +# ("coordinate_reference",), +# "coordinate_reference", +# identity, +# construct=True, +# key=key, +# default=default, +# item=item, +# **filter_kwargs, +# ) def field_ancillary( self, @@ -16727,402 +16736,329 @@ def field_ancillary( TODO """ - return self._select_construct( + return self._filter_interface( ("field_ancillary",), "field_ancillary", identity, + construct=True, key=key, default=default, item=item, **filter_kwargs, ) - def dimension_coordinate( - self, - *identity, - key=False, - default=ValueError(), - item=False, - **filter_kwargs, - ): - """Select a dimension coordinate construct. - - .. versionadded:: 3.0.0 - - .. seealso:: `construct`, `dimension_coordinates` - - :Parameters: - - identity: optional - Select dimension coordinate constructs that have an - identity, defined by their `!identities` methods, that - matches any of the given values. In addition to - construct identities, the values are matched against: - - Additionally, the values are matched against construct - identifiers, with or without the ``'key%'`` prefix. - - Additionly, TODOx the values are matched against the identity or - construct identifier, with or without the ``'key%'`` - prefix, of a domain axis construct that is spanned by - a dimension coordinate construct's data. - - *Parameter example:* - ``'domainaxis2'`` - - *Parameter example:* - ``'ncdim%latitude'`` - - * The integer position, in the field construct's data, - of the domain axis construct that is spanned by a - dimension coordinate construct's data. - - *Parameter example:* - ``0'`` - - *Parameter example:* - ``cf.gt(2)`` - - If no values are provided then all constructs are - selected. - - {{value match}} - - {{displayed identity}} - - *Parameter example:* - ``'Y'`` - - *Parameter example:* - ``latitude'`` - - *Parameter example:* - ``re.compile('^lat')`` - - *Parameter example:* - ``'long_name=Latitude'`` - - *Parameter example:* - ``'Z', 'altutude'`` - - key: `bool`, optional - If True then return the selected construct - identifier. By default the construct itself is - returned. - - default: optional - Return the value of the *default* parameter if a construct - can not be found. If set to an `Exception` instance then - it will be raised instead. - - item: `bool`, optional - If True then return the selected construct and its - construct identifier in a 2-tuple. By default the only - construct is returned. - - .. versionadded:: 3.9.0 - - :Returns: - - `DimensionCoordinate` or `str` or `tuple` - The selected dimension coordinate construct, or its - construct identifier, or both. - - **Examples:** - - TODO - - """ - ctypes = [i for i in "XTYZ" if i in identity] - if ctypes: - identity = [i for i in identity if i not in ctypes] - filter_kwargs["filter_by_coordinate_type"] = ctypes - last_filter = ("filter_by_coordinate_type",) - else: - last_filter = None - - - c = self._filter_interface( - ("dimension_coordinate",), - "dimension_coordinate", - identity, - construct=True, - key=key, - item=item, - default=None, - _last_filter=last_filter, - _identity_config={"identities_kwargs": {"ctype": False}}, - **filter_kwargs, - ) - if c is not None: - return c +# def dimension_coordinate( +# self, +# *identity, +# key=False, +# default=ValueError(), +# item=False, +# **filter_kwargs, +# ): +# """Select a dimension coordinate construct. +# +# .. versionadded:: 3.0.0 +# +# .. seealso:: `construct`, `dimension_coordinates` +# +# :Parameters: +# +# identity: optional +# Select dimension coordinate constructs that have an +# identity, defined by their `!identities` methods, that +# matches any of the given values. In addition to +# construct identities, the values are matched against: +# +# Additionally, the values are matched against construct +# identifiers, with or without the ``'key%'`` prefix. +# +# Additionly, TODOx the values are matched against the identity or +# construct identifier, with or without the ``'key%'`` +# prefix, of a domain axis construct that is spanned by +# a dimension coordinate construct's data. +# +# *Parameter example:* +# ``'domainaxis2'`` +# +# *Parameter example:* +# ``'ncdim%latitude'`` +# +# * The integer position, in the field construct's data, +# of the domain axis construct that is spanned by a +# dimension coordinate construct's data. +# +# *Parameter example:* +# ``0'`` +# +# *Parameter example:* +# ``cf.gt(2)`` +# +# If no values are provided then all constructs are +# selected. +# +# {{value match}} +# +# {{displayed identity}} +# +# *Parameter example:* +# ``'Y'`` +# +# *Parameter example:* +# ``latitude'`` +# +# *Parameter example:* +# ``re.compile('^lat')`` +# +# *Parameter example:* +# ``'long_name=Latitude'`` +# +# *Parameter example:* +# ``'Z', 'altutude'`` +# +# key: `bool`, optional +# If True then return the selected construct +# identifier. By default the construct itself is +# returned. +# +# default: optional +# Return the value of the *default* parameter if a construct +# can not be found. If set to an `Exception` instance then +# it will be raised instead. +# +# item: `bool`, optional +# If True then return the selected construct and its +# construct identifier in a 2-tuple. By default the only +# construct is returned. +# +# .. versionadded:: 3.9.0 +# +# :Returns: +# +# `DimensionCoordinate` or `str` or `tuple` +# The selected dimension coordinate construct, or its +# construct identifier, or both. +# +# **Examples:** +# +# TODO +# +# """ +# return self._filter_interface( +# ("dimension_coordinate",), +# "dimension_coordinate", +# identity, +# construct=True, +# key=key, +# item=item, +# default=default, +## _identity_config={"identities_kwargs": {"ctype": False}}, +# **filter_kwargs, +# ) +# if c is not None: +# return c # # c = self._select_construct( -# ("dimension_coordinate",), -# "dimension_coordinate", -# identity, -# key=key, -# item=item, -# default=None, -# _last_filter=last_filter, -# _identity_config={"identities_kwargs": {"ctype": False}}, -# **filter_kwargs, -# ) -# if c is not None: -# return c - - if not filter_kwargs and len(identity) == 1 and identity in self.domain_axes(todict=True): - raise DeprecationError() - da_key = self.domain_axis(*identity, key=True, default=None) - if da_key is not None: - return self._select_construct( - ("dimension_coordinate",), - "dimension_coordinate", - (), - key=key, - item=item, - default=default, - filter_by_axis=(da_key,), - axis_mode="exact", - ) - - if default is None: - return None - - return self._default( - default, - f"{self.__class__.__name__}.dimension_coordinate() can only " - "return a unique construct", - ) - - def dimension_coordinates(self, *identities, **filter_kwargs): - """Return dimension coordinate constructs. - - .. versionadded:: 3.0.0 - - .. seealso:: `constructs` - - :Parameters: - - identities: optional - Select dimension coordinate constructs that have an - identity, defined by their `!identities` methods, that - matches any of the given values. - - If no identities are provided then all dimension - coordinate constructs are selected. - - {{value match}} - - {{displayed identity}} - - {{filter_kwargs: optional}} - - :Returns: - - `Constructs` - The selected constructs, unless modified by any - *filter_kwargs* parameters. - - **Examples:** - - >>> f.dimension_coordinates() - Constructs: - {} - - >>> f.dimension_coordinates() - Constructs: - {'dimensioncoordinate0': <{{repr}}DimensionCoordinate: atmosphere_hybrid_height_coordinate(1) >, - 'dimensioncoordinate1': <{{repr}}DimensionCoordinate: grid_latitude(10) degrees>, - 'dimensioncoordinate2': <{{repr}}DimensionCoordinate: grid_longitude(9) degrees>, - 'dimensioncoordinate3': <{{repr}}DimensionCoordinate: time(1) days since 2018-12-01 >} - - """ - filter_by_identity = filter_kwargs.pop("filter_by_identity", None) - if identities: - if filter_by_identity is not None: - raise TypeError( - f"Can't set {self.__class__.__name__}." - "dimension_coordinates() " - "keyword argument 'filter_by_identity' when " - "positional *identities arguments are also set" - ) - elif filter_by_identity is not None: - identities = filter_by_identity - - ctypes = [i for i in "XTYZ" if i in identities] - if len(ctypes) == len(identities): - filter_kwargs["filter_by_coordinate_type"] = ctypes - return super().dimension_coordinates( - _last_filter=("filter_by_coordinate_type",), - **filter_kwargs - ) - - return super().dimension_coordinates( *identities, - **filter_kwargs) - - def domain_axis( - self, - *identity, - key=False, - default=ValueError(), - item=False, - **filter_kwargs, - ): - """Return a domain axis construct, or its key. - - .. versionadded:: 3.0.0 - - .. seealso:: `construct`, `auxiliary_coordinate`, `cell_measure`, - `cell_method`, `coordinate`, `coordinate_reference`, - `dimension_coordinate`, `domain_ancillary`, - `domain_axes`, `field_ancillary` - - :Parameters: - - identity: - Select the domain axis construct by one of: - - * An identity or key of a 1-d coordinate construct that - whose data spans the domain axis construct. - - * A domain axis construct identity or key. - - * The position of the domain axis construct in the field - construct's data. - - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); or a compiled regular expression - (e.g. ``re.compile('^atmosphere')``) that selects the - relevant constructs whose identities match via - `re.search`. - - Each construct has a number of identities, and is selected - if any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: - - >>> x.identities() - ['time', 'long_name=Time', 'foo=bar', 'standard_name=time', 'ncvar%t', 'T'] - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. - - A position of a domain axis construct in the field - construct's data is specified by an integer index. - - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. - - *Parameter example:* - ``identity='long_name=Latitude'`` - - *Parameter example:* - ``identity='dimensioncoordinate1'`` - - *Parameter example:* - ``identity='domainaxis2'`` - - *Parameter example:* - ``identity='key%domainaxis2'`` - - *Parameter example:* - ``identity='ncdim%y'`` - - *Parameter example:* - ``identity=2`` - - key: `bool`, optional - If True then return the selected construct key. By - default the construct itself is returned. - - default: optional - Return the value of the *default* parameter if a construct - can not be found. If set to an `Exception` instance then - it will be raised instead. - - :Returns: - - `DomainAxis` or `str` - The selected domain axis construct, or its key. - - **Examples:** - - TODO - - """ - # Try for integer index - if identity: - identity2 = [] - - data_axes = self.get_data_axes(default=None) - for i in identity: - try: - identity2.append(data_axes[i]) - except TypeError: - identity2.append(i) - except IndexError: - pass - - if not identity2: - if default is None: - return default - - return self._default( - default, - "Indices do not exist for field construct data dimenions", - ) - - identity = identity2 - -# c = self._select_construct( +# ("dimension_coordinate",), +# "dimension_coordinate", +# identity, +# key=key, +# item=item, +# default=None, +# _last_filter=last_filter, +# _identity_config={"identities_kwargs": {"ctype": False}}, +# **filter_kwargs, +# ) +# if c is not None: +# return c +# +# if not filter_kwargs and len(identity) == 1 and identity in self.domain_axes(todict=True): +# raise DeprecationError() +# da_key = self.domain_axis(*identity, key=True, default=None) +# if da_key is not None: +# return self._filter_interface( +# ("dimension_coordinate",), +# "dimension_coordinate", +# (), +# construct=True, +# key=key, +# item=item, +# default=default, +# filter_by_axis=(da_key,), +# axis_mode="exact", +# ) +# +# if default is None: +# return None +# +# return self._default( +# default, +# f"{self.__class__.__name__}.dimension_coordinate() can only " +# "return a unique construct", +# ) +# +# +# def domain_axis( +# self, +# *identity, +# key=False, +# default=ValueError(), +# item=False, +# **filter_kwargs, +# ): +# """Return a domain axis construct, or its key. +# +# .. versionadded:: 3.0.0 +# +# .. seealso:: `construct`, `auxiliary_coordinate`, `cell_measure`, +# `cell_method`, `coordinate`, `coordinate_reference`, +# `dimension_coordinate`, `domain_ancillary`, +# `domain_axes`, `field_ancillary` +# +# :Parameters: +# +# identity: +# Select the domain axis construct by one of: +# +# * An identity or key of a 1-d coordinate construct that +# whose data spans the domain axis construct. +# +# * A domain axis construct identity or key. +# +# * The position of the domain axis construct in the field +# construct's data. +# +# A construct identity is specified by a string +# (e.g. ``'latitude'``, ``'long_name=time'``, +# ``'ncvar%lat'``, etc.); or a compiled regular expression +# (e.g. ``re.compile('^atmosphere')``) that selects the +# relevant constructs whose identities match via +# `re.search`. +# +# Each construct has a number of identities, and is selected +# if any of them match any of those provided. A construct's +# identities are those returned by its `!identities` +# method. In the following example, the construct ``x`` has +# six identities: +# +# >>> x.identities() +# ['time', 'long_name=Time', 'foo=bar', 'standard_name=time', 'ncvar%t', 'T'] +# +# A construct key may optionally have the ``'key%'`` +# prefix. For example ``'dimensioncoordinate2'`` and +# ``'key%dimensioncoordinate2'`` are both acceptable keys. +# +# A position of a domain axis construct in the field +# construct's data is specified by an integer index. +# +# Note that in the output of a `print` call or `!dump` +# method, a construct is always described by one of its +# identities, and so this description may always be used as +# an *identity* argument. +# +# *Parameter example:* +# ``identity='long_name=Latitude'`` +# +# *Parameter example:* +# ``identity='dimensioncoordinate1'`` +# +# *Parameter example:* +# ``identity='domainaxis2'`` +# +# *Parameter example:* +# ``identity='key%domainaxis2'`` +# +# *Parameter example:* +# ``identity='ncdim%y'`` +# +# *Parameter example:* +# ``identity=2`` +# +# key: `bool`, optional +# If True then return the selected construct key. By +# default the construct itself is returned. +# +# default: optional +# Return the value of the *default* parameter if a construct +# can not be found. If set to an `Exception` instance then +# it will be raised instead. +# +# :Returns: +# +# `DomainAxis` or `str` +# The selected domain axis construct, or its key. +# +# **Examples:** +# +# TODO +# +# """ +# # Try for integer index +# if identity: +# identity2 = [] +# +# data_axes = self.get_data_axes(default=None) +# for i in identity: +# try: +# identity2.append(data_axes[i]) +# except TypeError: +# identity2.append(i) +# except IndexError: +# pass +# +# if not identity2: +# if default is None: +# return default +# +# return self._default( +# default, +# "Indices do not exist for field construct data dimenions", +# ) +# +# identity = identity2 +# +## c = self._select_construct( +## ("domain_axis",), +## "domain_axis", +## identity, +## key=key, +## default=None, +## item=item, +## **filter_kwargs, +## ) +# c = self._filter_interface( # ("domain_axis",), # "domain_axis", # identity, +# construct=True, # key=key, -# default=None, # item=item, +# default=None, # **filter_kwargs, # ) - c = self._filter_interface( - ("domain_axis",), - "domain_axis", - identity, - construct=True, - key=key, - item=item, - default=None, - **filter_kwargs, - ) - if c is not None: - return c - - da_key = self.domain_axis_key(*identity, default=None) - - if da_key is not None: - if key: - return da_key - - construct = self.constructs[da_key] - - if item: - return da_key, construct - - return construct - - if default is None: - return default - - return self._default( - default, - f"{self.__class__.__name__}.domain_axis() can't return zero " - "constructs", - ) +# if c is not None: +# return c +# +# da_key = self.domain_axis_key(*identity, default=None) +# +# if da_key is not None: +# if key: +# return da_key +# +# construct = self.constructs[da_key] +# +# if item: +# return da_key, construct +# +# return construct +# +# if default is None: +# return default +# +# return self._default( +# default, +# f"{self.__class__.__name__}.domain_axis() can't return zero " +# "constructs", +# ) def domain_axis_position(self, *identity): """Return the position in the data of a domain axis construct. @@ -17217,70 +17153,70 @@ def domain_axis_position(self, *identity): key = self.domain_axis(*identity, key=True) return self.get_data_axes().index(key) - def auxiliary_coordinates(self, *identities, **filter_kwargs): - """Return auxiliary coordinate constructs. - - .. versionadded:: 3.0.0 - - .. seealso:: `constructs` - - :Parameters: - - identities: optional - Select auxiliary coordinate constructs that have an - identity, defined by their `!identities` methods, that - matches any of the given values. - - If no identities are provided then all auxiliary - coordinate constructs are selected. - - {{value match}} - - {{displayed identity}} - - {{filter_kwargs: optional}} - - :Returns: - - `Constructs` - The selected constructs, unless modified by any - *filter_kwargs* parameters. - - **Examples:** - - >>> f.auxiliary_coordinates() - Constructs: - {} - - >>> f.auxiliary_coordinates() - Constructs: - {'auxiliarycoordinate0': <{{repr}}AuxiliaryCoordinate: latitude(10, 9) degrees_N>, - 'auxiliarycoordinate1': <{{repr}}AuxiliaryCoordinate: longitude(9, 10) degrees_E>, - 'auxiliarycoordinate2': <{{repr}}AuxiliaryCoordinate: long_name:Grid latitude name(10) >} - - """ - filter_by_identity = filter_kwargs.pop("filter_by_identity", None) - if identities: - if filter_by_identity is not None: - raise TypeError( - f"Can't set {self.__class__.__name__}." - "auxiliary_coordinates() " - "keyword argument 'filter_by_identity' when " - "positional *identities arguments are also set" - ) - elif filter_by_identity is not None: - identities = filter_by_identity - - ctypes = [i for i in "XTYZ" if i in identities] - if len(ctypes) == len(identities): - filter_kwargs["filter_by_coordinate_type"] = ctypes - return super().auxiliary_coordinates( - _last_filter=("filter_by_coordinate_type",), - **filter_kwargs - ) - - return super().auxiliary_coordinates( *identities, - **filter_kwargs) +# def auxiliary_coordinates(self, *identities, **filter_kwargs): +# """Return auxiliary coordinate constructs. +# +# .. versionadded:: 3.0.0 +# +# .. seealso:: `constructs` +# +# :Parameters: +# +# identities: optional +# Select auxiliary coordinate constructs that have an +# identity, defined by their `!identities` methods, that +# matches any of the given values. +# +# If no identities are provided then all auxiliary +# coordinate constructs are selected. +# +# {{value match}} +# +# {{displayed identity}} +# +# {{filter_kwargs: optional}} +# +# :Returns: +# +# `Constructs` +# The selected constructs, unless modified by any +# *filter_kwargs* parameters. +# +# **Examples:** +# +# >>> f.auxiliary_coordinates() +# Constructs: +# {} +# +# >>> f.auxiliary_coordinates() +# Constructs: +# {'auxiliarycoordinate0': <{{repr}}AuxiliaryCoordinate: latitude(10, 9) degrees_N>, +# 'auxiliarycoordinate1': <{{repr}}AuxiliaryCoordinate: longitude(9, 10) degrees_E>, +# 'auxiliarycoordinate2': <{{repr}}AuxiliaryCoordinate: long_name:Grid latitude name(10) >} +# +# """ +# filter_by_identity = filter_kwargs.pop("filter_by_identity", None) +# if identities: +# if filter_by_identity is not None: +# raise TypeError( +# f"Can't set {self.__class__.__name__}." +# "auxiliary_coordinates() " +# "keyword argument 'filter_by_identity' when " +# "positional *identities arguments are also set" +# ) +# elif filter_by_identity is not None: +# identities = filter_by_identity +# +# ctypes = [i for i in "XTYZ" if i in identities] +# if len(ctypes) == len(identities): +# filter_kwargs["filter_by_coordinate_type"] = ctypes +# return super().auxiliary_coordinates( +# _last_filter=("filter_by_coordinate_type",), +# **filter_kwargs +# ) +# +# return super().auxiliary_coordinates( *identities, +# **filter_kwargs) def axes_names(self, *identities, **kwargs): """Return canonical identities for each domain axis construct. @@ -18434,111 +18370,112 @@ def percentile( # # return super().period(*value) - def replace_construct(self, identity, construct, copy=True): - """Replace a metadata construct. - - Replacement assigns the same construct key and, if applicable, the - domain axes of the original construct to the new, replacing - construct. - - .. versionadded:: 3.0.0 - - .. seealso:: `set_construct` - - :Parameters: - - identity: - Select the metadata construct to be replaced by one of: - - * The identity or key of a metadata construct. - - * The identity or key of a domain axis construct that is - spanned by a metadata construct's data. - - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, ``'ncvar%lat'``, - etc.); a `Query` object (e.g. ``cf.eq('longitude')``); or - a compiled regular expression - (e.g. ``re.compile('^atmosphere')``) that selects the - relevant constructs whose identities match via - `re.search`. - - A construct has a number of identities, and is selected if - any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: - - >>> x.identities() - ['time', - 'long_name=Time', - 'foo=bar', - 'standard_name=time', - 'ncvar%t', - 'T'] - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. - - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. - - *Parameter example:* - ``identity='Y'`` - - *Parameter example:* - ``identity='latitude'`` - - *Parameter example:* - ``identity='long_name=Latitude'`` - - *Parameter example:* - ``identity='dimensioncoordinate1'`` - - *Parameter example:* - ``identity='domainaxis2'`` - - *Parameter example:* - ``identity='ncdim%y'`` - - construct: - The new construct to replace that selected by the - *identity* parameter. - - copy: `bool`, optional - If True then set a copy of the new construct. By default - the construct is copied. - - :Returns: - - The construct that was replaced. - - **Examples:** - - >>> f.replace_construct('X', new_X_construct) - - """ - key, c = self.construct(identity, item=True) - - if not isinstance(construct, c.__class__): - raise ValueError( - f"Can't replace a {c.__class__.__name__} construct " - f"with a {construct.__class__.__name__} construct" - ) - - axes = self.get_data_axes(key, None) - if axes is not None: - shape0 = getattr(c, "shape", None) - shape1 = getattr(construct, "shape", None) - if shape0 != shape1: - raise ValueError("TODO bb") - - self.set_construct(construct, key=key, axes=axes, copy=copy) - - return c +# def replace_construct(self, *identity, construct=None, copy=True, +# **filter_kwargs): +# """Replace a metadata construct. +# +# Replacement assigns the same construct key and, if applicable, the +# domain axes of the original construct to the new, replacing +# construct. +# +# .. versionadded:: 3.0.0 +# +# .. seealso:: `set_construct` +# +# :Parameters: +# +# identity: +# Select the metadata construct to be replaced by one of: +# +# * The identity or key of a metadata construct. +# +# * The identity or key of a domain axis construct that is +# spanned by a metadata construct's data. +# +# A construct identity is specified by a string +# (e.g. ``'latitude'``, ``'long_name=time'``, ``'ncvar%lat'``, +# etc.); a `Query` object (e.g. ``cf.eq('longitude')``); or +# a compiled regular expression +# (e.g. ``re.compile('^atmosphere')``) that selects the +# relevant constructs whose identities match via +# `re.search`. +# +# A construct has a number of identities, and is selected if +# any of them match any of those provided. A construct's +# identities are those returned by its `!identities` +# method. In the following example, the construct ``x`` has +# six identities: +# +# >>> x.identities() +# ['time', +# 'long_name=Time', +# 'foo=bar', +# 'standard_name=time', +# 'ncvar%t', +# 'T'] +# +# A construct key may optionally have the ``'key%'`` +# prefix. For example ``'dimensioncoordinate2'`` and +# ``'key%dimensioncoordinate2'`` are both acceptable keys. +# +# Note that in the output of a `print` call or `!dump` +# method, a construct is always described by one of its +# identities, and so this description may always be used as +# an *identity* argument. +# +# *Parameter example:* +# ``identity='Y'`` +# +# *Parameter example:* +# ``identity='latitude'`` +# +# *Parameter example:* +# ``identity='long_name=Latitude'`` +# +# *Parameter example:* +# ``identity='dimensioncoordinate1'`` +# +# *Parameter example:* +# ``identity='domainaxis2'`` +# +# *Parameter example:* +# ``identity='ncdim%y'`` +# +# construct: +# The new construct to replace that selected by the +# *identity* parameter. +# +# copy: `bool`, optional +# If True then set a copy of the new construct. By default +# the construct is copied. +# +# :Returns: +# +# The construct that was replaced. +# +# **Examples:** +# +# >>> f.replace_construct('X', new_X_construct) +# +# """ +# key, c = self.construct(*identity, item=True, **filter_kwargs) +# +# if not isinstance(construct, c.__class__): +# raise ValueError( +# f"Can't replace a {c.__class__.__name__} construct " +# f"with a {construct.__class__.__name__} construct" +# ) +# +# axes = self.get_data_axes(key, None) +# if axes is not None: +# shape0 = getattr(c, "shape", None) +# shape1 = getattr(construct, "shape", None) +# if shape0 != shape1: +# raise ValueError("TODO bb") +# +# self.set_construct(construct, key=key, axes=axes, copy=copy) +# +# return c @_inplace_enabled(default=False) def flatten(self, axes=None, return_axis=False, inplace=False): @@ -18551,38 +18488,38 @@ def flatten(self, axes=None, return_axis=False, inplace=False): Metadata constructs whose data spans the flattened axes will either themselves be flattened, or else removed. - Cell method constructs that apply to the flattened axes will be - removed or, if possible, have their axis specifications changed to - standard names. + Cell method constructs that apply to the flattened axes will + be removed or, if possible, have their axis specifications + changed to standard names. The flattening is executed in row-major (C-style) order. For - example, the array ``[[1, 2], [3, 4]]`` would be flattened across - both dimensions to ``[1 2 3 4]``. + example, the array ``[[1, 2], [3, 4]]`` would be flattened + across both dimensions to ``[1 2 3 4]``. .. versionadded:: 3.0.2 .. seealso:: `compress`, `insert_dimension`, `flip`, `swapaxes`, `transpose` - :Parameters: - - axes: (sequence of) `str` or `int`, optional - Select the domain axes to be flattened, defined by the - domain axes that would be selected by passing each given - axis description to a call of the field construct's - `domain_axis` method. For example, for a value of ``'X'``, - the domain axis construct returned by - ``f.domain_axis('X')`` is selected. + :Parameters: + + axes: (sequence of) `str` or `int`, optional + Select the domain axes to be flattened, defined by the + domain axes that would be selected by passing each + given axis description to a call of the field + construct's `domain_axis` method. For example, for a + value of ``'X'``, the domain axis construct returned + by ``f.domain_axis('X')`` is selected. - If no axes are provided then all axes spanned by the field - construct's data are flattened. + If no axes are provided then all axes spanned by the + field construct's data are flattened. No axes are flattened if *axes* is an empty sequence. return_axis: `bool`, optional - If True then also return either the key of the flattened - domain axis construct; or `None` if the axes to be - flattened do not span the data. + If True then also return either the key of the + flattened domain axis construct; or `None` if the axes + to be flattened do not span the data. {{inplace: `bool`, optional}} @@ -18592,9 +18529,9 @@ def flatten(self, axes=None, return_axis=False, inplace=False): The new, flattened field construct, or `None` if the operation was in-place. - If *return_axis* is True then also return either the key - of the flattened domain axis construct; or `None` if the - axes to be flattened do not span the data. + If *return_axis* is True then also return either the + key of the flattened domain axis construct; or `None` + if the axes to be flattened do not span the data. **Examples** @@ -19393,165 +19330,165 @@ def subspace(self): """ return SubspaceField(self) - def coordinates(self, *identities, **filter_kwargs): - """Return dimension and auxiliary coordinate constructs. - - . versionadded:: 3.0.0 - - . seealso:: `auxiliary_coordinates`, `constructs`, - `dimension_coordinates` - - :Parameters: - - identities: optional - Select coordinate constructs that have an identity, - defined by their `!identities` methods, that matches - any of the given values. - - If no identities are provided then all coordinate - constructs are selected. - - {{value match}} - - {{displayed identity}} - - {{filter_kwargs: optional}} - - :Returns: - - `Constructs` - The selected constructs, unless modified by any - *filter_kwargs* parameters. - - *Examples:** - - >> f.coordinates() - onstructs: - } - - >> f.coordinates() - onstructs: - 'auxiliarycoordinate0': <{{repr}}AuxiliaryCoordinate: latitude(10, 9) degrees_N>, - 'auxiliarycoordinate1': <{{repr}}AuxiliaryCoordinate: longitude(9, 10) degrees_E>, - 'auxiliarycoordinate2': <{{repr}}AuxiliaryCoordinate: long_name=Grid latitude name(10) >, - 'dimensioncoordinate0': <{{repr}}DimensionCoordinate: atmosphere_hybrid_height_coordinate(1) >, - 'dimensioncoordinate1': <{{repr}}DimensionCoordinate: grid_latitude(10) degrees>, - 'dimensioncoordinate2': <{{repr}}DimensionCoordinate: grid_longitude(9) degrees>, - 'dimensioncoordinate3': <{{repr}}DimensionCoordinate: time(1) days since 2018-12-01 >} - - """ - filter_by_identity = filter_kwargs.pop("filter_by_identity", None) - if identities: - if filter_by_identity is not None: - raise TypeError( - f"Can't set {self.__class__.__name__}.coordinates() " - "keyword argument 'filter_by_identity' when " - "positional *identities arguments are also set" - ) - elif filter_by_identity is not None: - identities = filter_by_identity - - ctypes = [i for i in "XTYZ" if i in identities] - if ctypes and len(ctypes) == len(identities): - filter_kwargs["filter_by_coordinate_type"] = ctypes - return super().coordinates( - _last_filter=("filter_by_coordinate_type",), - **filter_kwargs - ) - - return super().coordinates(*identities, **filter_kwargs) - - def coordinate_reference_domain_axes(self, identity): - """Return the domain axes that apply to a coordinate reference - construct. - - :Parameters: - - identity: - Select the coordinate reference construct by one of: - - * The identity or key of a coordinate reference construct. - - A construct identity is specified by a string - (e.g. ``'grid_mapping_name:latitude_longitude'``, - ``'latitude_longitude'``, ``'ncvar%lat_lon'``, etc.); a - `Query` object (e.g. ``cf.eq('latitude_longitude')``); or - a compiled regular expression - (e.g. ``re.compile('^atmosphere')``) that selects the - relevant constructs whose identities match via - `re.search`. - - Each construct has a number of identities, and is selected - if any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - two identities: - - >>> x.identities() - ['grid_mapping_name:latitude_longitude', 'ncvar%lat_lon'] - - A identity's prefix of ``'grid_mapping_name:'`` or - ``'standard_name:'`` may be omitted - (e.g. ``'standard_name:atmosphere_hybrid_height_coordinate'`` - and ``'atmosphere_hybrid_height_coordinate'`` are both - acceptable identities). - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'coordinatereference2'`` and - ``'key%coordinatereference2'`` are both acceptable keys. - - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. - - *Parameter example:* - ``identity='standard_name:atmosphere_hybrid_height_coordinate'`` - - *Parameter example:* - ``identity='grid_mapping_name:rotated_latitude_longitude'`` - - *Parameter example:* - ``identity='transverse_mercator'`` - - *Parameter example:* - ``identity='coordinatereference1'`` - - *Parameter example:* - ``identity='key%coordinatereference1'`` - - *Parameter example:* - ``identity='ncvar%lat_lon'`` - - :Returns: - - `set` - The identifiers of the domain axis constructs that span - the data of all coordinate and domain ancillary constructs - used by the selected coordinate reference construct. - - **Examples:** - - >>> f.coordinate_reference_domain_axes('coordinatereference0') - {'domainaxis0', 'domainaxis1', 'domainaxis2'} - - >>> f.coordinate_reference_domain_axes( - ... 'atmosphere_hybrid_height_coordinate') - {'domainaxis0', 'domainaxis1', 'domainaxis2'} - - """ - cr = self.coordinate_reference(identity) - - data_axes = self.constructs.data_axes() - - axes = [] - for i in cr.coordinates() | set( - cr.coordinate_conversion.domain_ancillaries().values() - ): - i = self.construct_key(i, None) - axes.extend(data_axes.get(i, ())) - - return set(axes) +# def coordinates(self, *identities, **filter_kwargs): +# """Return dimension and auxiliary coordinate constructs. +# +# . versionadded:: 3.0.0 +# +# . seealso:: `auxiliary_coordinates`, `constructs`, +# `dimension_coordinates` +# +# :Parameters: +# +# identities: optional +# Select coordinate constructs that have an identity, +# defined by their `!identities` methods, that matches +# any of the given values. +# +# If no identities are provided then all coordinate +# constructs are selected. +# +# {{value match}} +# +# {{displayed identity}} +# +# {{filter_kwargs: optional}} +# +# :Returns: +# +# `Constructs` +# The selected constructs, unless modified by any +# *filter_kwargs* parameters. +# +# *Examples:** +# +# >> f.coordinates() +# onstructs: +# } +# +# >> f.coordinates() +# onstructs: +# 'auxiliarycoordinate0': <{{repr}}AuxiliaryCoordinate: latitude(10, 9) degrees_N>, +# 'auxiliarycoordinate1': <{{repr}}AuxiliaryCoordinate: longitude(9, 10) degrees_E>, +# 'auxiliarycoordinate2': <{{repr}}AuxiliaryCoordinate: long_name=Grid latitude name(10) >, +# 'dimensioncoordinate0': <{{repr}}DimensionCoordinate: atmosphere_hybrid_height_coordinate(1) >, +# 'dimensioncoordinate1': <{{repr}}DimensionCoordinate: grid_latitude(10) degrees>, +# 'dimensioncoordinate2': <{{repr}}DimensionCoordinate: grid_longitude(9) degrees>, +# 'dimensioncoordinate3': <{{repr}}DimensionCoordinate: time(1) days since 2018-12-01 >} +# +# """ +# filter_by_identity = filter_kwargs.pop("filter_by_identity", None) +# if identities: +# if filter_by_identity is not None: +# raise TypeError( +# f"Can't set {self.__class__.__name__}.coordinates() " +# "keyword argument 'filter_by_identity' when " +# "positional *identities arguments are also set" +# ) +# elif filter_by_identity is not None: +# identities = filter_by_identity +# +# ctypes = [i for i in "XTYZ" if i in identities] +# if ctypes and len(ctypes) == len(identities): +# filter_kwargs["filter_by_coordinate_type"] = ctypes +# return super().coordinates( +# _last_filter=("filter_by_coordinate_type",), +# **filter_kwargs +# ) +# +# return super().coordinates(*identities, **filter_kwargs) +# +# def coordinate_reference_domain_axes(self, identity): +# """Return the domain axes that apply to a coordinate reference +# construct. +# +# :Parameters: +# +# identity: +# Select the coordinate reference construct by one of: +# +# * The identity or key of a coordinate reference construct. +# +# A construct identity is specified by a string +# (e.g. ``'grid_mapping_name:latitude_longitude'``, +# ``'latitude_longitude'``, ``'ncvar%lat_lon'``, etc.); a +# `Query` object (e.g. ``cf.eq('latitude_longitude')``); or +# a compiled regular expression +# (e.g. ``re.compile('^atmosphere')``) that selects the +# relevant constructs whose identities match via +# `re.search`. +# +# Each construct has a number of identities, and is selected +# if any of them match any of those provided. A construct's +# identities are those returned by its `!identities` +# method. In the following example, the construct ``x`` has +# two identities: +# +# >>> x.identities() +# ['grid_mapping_name:latitude_longitude', 'ncvar%lat_lon'] +# +# A identity's prefix of ``'grid_mapping_name:'`` or +# ``'standard_name:'`` may be omitted +# (e.g. ``'standard_name:atmosphere_hybrid_height_coordinate'`` +# and ``'atmosphere_hybrid_height_coordinate'`` are both +# acceptable identities). +# +# A construct key may optionally have the ``'key%'`` +# prefix. For example ``'coordinatereference2'`` and +# ``'key%coordinatereference2'`` are both acceptable keys. +# +# Note that in the output of a `print` call or `!dump` +# method, a construct is always described by one of its +# identities, and so this description may always be used as +# an *identity* argument. +# +# *Parameter example:* +# ``identity='standard_name:atmosphere_hybrid_height_coordinate'`` +# +# *Parameter example:* +# ``identity='grid_mapping_name:rotated_latitude_longitude'`` +# +# *Parameter example:* +# ``identity='transverse_mercator'`` +# +# *Parameter example:* +# ``identity='coordinatereference1'`` +# +# *Parameter example:* +# ``identity='key%coordinatereference1'`` +# +# *Parameter example:* +# ``identity='ncvar%lat_lon'`` +# +# :Returns: +# +# `set` +# The identifiers of the domain axis constructs that span +# the data of all coordinate and domain ancillary constructs +# used by the selected coordinate reference construct. +# +# **Examples:** +# +# >>> f.coordinate_reference_domain_axes('coordinatereference0') +# {'domainaxis0', 'domainaxis1', 'domainaxis2'} +# +# >>> f.coordinate_reference_domain_axes( +# ... 'atmosphere_hybrid_height_coordinate') +# {'domainaxis0', 'domainaxis1', 'domainaxis2'} +# +# """ +# cr = self.coordinate_reference(identity) +# +# data_axes = self.constructs.data_axes() +# +# axes = [] +# for i in cr.coordinates() | set( +# cr.coordinate_conversion.domain_ancillaries().values() +# ): +# i = self.construct_key(i, None) +# axes.extend(data_axes.get(i, ())) +# +# return set(axes) def section(self, axes=None, stop=None, **kwargs): """Return a FieldList of m dimensional sections of a Field of n @@ -19623,69 +19560,71 @@ def regrids( i=False, _compute_field_mass=None, ): - """Return the field regridded onto a new latitude-longitude - grid. - - Regridding, also called remapping or interpolation, is the process - of changing the grid underneath field data values while preserving - the qualities of the original data. - - The regridding method must be specified. First-order conservative - interpolation conserves the global area integral of the field, but - may not give approximations to the values as good as linear - interpolation. Second-order conservative interpolation also takes - into account the gradient across the source cells, so in general - gives a smoother, more accurate representation of the source field - especially when going from a coarser to a finer grid. Linear - interpolation is available. The latter method is particular useful - for cases when the latitude and longitude coordinate cell - boundaries are not known nor inferable. Higher order patch - recovery is available as an alternative to linear - interpolation. This typically results in better approximations to - values and derivatives compared to the latter, but the weight - matrix can be larger than the linear matrix, which can be an - issue when regridding close to the memory limit on a - machine. Nearest neighbour interpolation is also - available. Nearest source to destination is particularly useful - for regridding integer fields such as land use. + """Return the field regridded onto a new latitude-longitude grid. + + Regridding, also called remapping or interpolation, is the + process of changing the grid underneath field data values + while preserving the qualities of the original data. + + The regridding method must be specified. First-order + conservative interpolation conserves the global area integral + of the field, but may not give approximations to the values as + good as linear interpolation. Second-order conservative + interpolation also takes into account the gradient across the + source cells, so in general gives a smoother, more accurate + representation of the source field especially when going from + a coarser to a finer grid. Linear interpolation is + available. The latter method is particular useful for cases + when the latitude and longitude coordinate cell boundaries are + not known nor inferable. Higher order patch recovery is + available as an alternative to linear interpolation. This + typically results in better approximations to values and + derivatives compared to the latter, but the weight matrix can + be larger than the linear matrix, which can be an issue when + regridding close to the memory limit on a machine. Nearest + neighbour interpolation is also available. Nearest source to + destination is particularly useful for regridding integer + fields such as land use. **Metadata** - The field construct's domain must have well defined X and Y axes - with latitude and longitude coordinate values, which may be stored - as dimension coordinate objects or two dimensional auxiliary - coordinate objects. If the latitude and longitude coordinates are - two dimensional then the X and Y axes must be defined by dimension - coordinates if present or by the netCDF dimensions. In the latter - case the X and Y axes must be specified using the *src_axes* or - *dst_axes* keyword. The same is true for the destination grid, if - it provided as part of another field. - - The cyclicity of the X axes of the source field and destination - grid is taken into account. If an X axis is in fact cyclic but is - not registered as such by its parent field (see - `cf.Field.iscyclic`), then the cyclicity may be set with the - *src_cyclic* or *dst_cyclic* parameters. In the case of two - dimensional latitude and longitude dimension coordinates without - bounds it will be necessary to specify *src_cyclic* or + The field construct's domain must have well defined X and Y + axes with latitude and longitude coordinate values, which may + be stored as dimension coordinate objects or two dimensional + auxiliary coordinate objects. If the latitude and longitude + coordinates are two dimensional then the X and Y axes must be + defined by dimension coordinates if present or by the netCDF + dimensions. In the latter case the X and Y axes must be + specified using the *src_axes* or *dst_axes* keyword. The same + is true for the destination grid, if it provided as part of + another field. + + The cyclicity of the X axes of the source field and + destination grid is taken into account. If an X axis is in + fact cyclic but is not registered as such by its parent field + (see `cf.Field.iscyclic`), then the cyclicity may be set with + the *src_cyclic* or *dst_cyclic* parameters. In the case of + two dimensional latitude and longitude dimension coordinates + without bounds it will be necessary to specify *src_cyclic* or *dst_cyclic* manually if the field is global. - The output field construct's coordinate objects which span the X - and/or Y axes are replaced with those from the destination - grid. Any fields contained in coordinate reference objects will - also be regridded, if possible. + The output field construct's coordinate objects which span the + X and/or Y axes are replaced with those from the destination + grid. Any fields contained in coordinate reference objects + will also be regridded, if possible. **Mask** The data array mask of the field is automatically taken into account, such that the regridded data array will be masked in - regions where the input data array is masked. By default the mask - of the destination grid is not taken into account. If the - destination field data has more than two dimensions then the mask, - if used, is taken from the two dimensional section of the data - where the indices of all axes other than X and Y are zero. + regions where the input data array is masked. By default the + mask of the destination grid is not taken into account. If the + destination field data has more than two dimensions then the + mask, if used, is taken from the two dimensional section of + the data where the indices of all axes other than X and Y are + zero. **Implementation** @@ -19711,8 +19650,9 @@ def regrids( **Curvilinear Grids** - Grids in projection coordinate systems can be regridded as long as - two dimensional latitude and longitude coordinates are present. + Grids in projection coordinate systems can be regridded as + long as two dimensional latitude and longitude coordinates are + present. **Rotated Pole Grids** @@ -19720,20 +19660,21 @@ def regrids( Rotated pole grids can be regridded as long as two dimensional latitude and longitude coordinates are present. It may be necessary to explicitly identify the grid latitude and grid - longitude coordinates as being the X and Y axes and specify the - *src_cyclic* or *dst_cyclic* keywords. + longitude coordinates as being the X and Y axes and specify + the *src_cyclic* or *dst_cyclic* keywords. **Tripolar Grids** - Tripolar grids are logically rectangular and so may be able to be - regridded. If no dimension coordinates are present it will be - necessary to specify which netCDF dimensions are the X and Y axes - using the *src_axes* or *dst_axes* keywords. Connections across - the bipole fold are not currently supported, but are not be - necessary in some cases, for example if the points on either side - are together without a gap. It will also be necessary to specify - *src_cyclic* or *dst_cyclic* if the grid is global. + Tripolar grids are logically rectangular and so may be able to + be regridded. If no dimension coordinates are present it will + be necessary to specify which netCDF dimensions are the X and + Y axes using the *src_axes* or *dst_axes* + keywords. Connections across the bipole fold are not currently + supported, but are not be necessary in some cases, for example + if the points on either side are together without a gap. It + will also be necessary to specify *src_cyclic* or *dst_cyclic* + if the grid is global. .. versionadded:: 1.0.4 @@ -19742,14 +19683,15 @@ def regrids( :Parameters: dst: `Field` or `dict` - The field containing the new grid. If dst is a field list - the first field in the list is used. Alternatively a - dictionary can be passed containing the keywords - 'longitude' and 'latitude' with either two 1D dimension - coordinates or two 2D auxiliary coordinates. In the 2D - case both coordinates must have their axes in the same - order and this must be specified by the keyword 'axes' as - either of the tuples ``('X', 'Y')`` or ``('Y', 'X')``. + The field containing the new grid. If dst is a field + list the first field in the list is + used. Alternatively a dictionary can be passed + containing the keywords 'longitude' and 'latitude' + with either two 1D dimension coordinates or two 2D + auxiliary coordinates. In the 2D case both coordinates + must have their axes in the same order and this must + be specified by the keyword 'axes' as either of the + tuples ``('X', 'Y')`` or ``('Y', 'X')``. method: `str` Specify the regridding method. The *method* parameter must @@ -19765,44 +19707,47 @@ def regrids( ``'conservative_1st'`` First order conservative interpolation. - Preserve the area integral of the - data across the interpolation from - source to destination. It uses the + Preserve the area integral of + the data across the + interpolation from source to + destination. It uses the proportion of the area of the - overlapping source and destination - cells to determine appropriate - weights. + overlapping source and + destination cells to determine + appropriate weights. In particular, the weight of a - source cell is the ratio of the - area of intersection of the source - and destination cells to the area - of the whole destination cell. - - It does not account for the field - gradient across the source cell, - unlike the second-order - conservative method (see below). + source cell is the ratio of + the area of intersection of + the source and destination + cells to the area of the whole + destination cell. + + It does not account for the + field gradient across the + source cell, unlike the + second-order conservative + method (see below). ``'conservative_2nd'`` Second-order conservative interpolation. - As with first order (see above), - preserves the area integral of the - field between source and - destination using a weighted sum, - with weights based on the - proportionate area of - intersection. + As with first order (see + above), preserves the area + integral of the field between + source and destination using a + weighted sum, with weights + based on the proportionate + area of intersection. Unlike first-order, the - second-order method incorporates - further terms to take into - consideration the gradient of the - field across the source cell, - thereby typically producing a - smoother result of higher - accuracy. + second-order method + incorporates further terms to + take into consideration the + gradient of the field across + the source cell, thereby + typically producing a smoother + result of higher accuracy. ``'conservative'`` Alias for ``'conservative_1st'`` @@ -19810,14 +19755,14 @@ def regrids( interpolation. A second degree polynomial - regridding method, which uses a - least squares algorithm to + regridding method, which uses + a least squares algorithm to calculate the polynomial. This method gives better derivatives in the resulting - destination data than the linear - method. + destination data than the + linear method. ``'nearest_stod'`` Nearest neighbour interpolation for which each destination point @@ -19835,50 +19780,54 @@ def regrids( categorical data. A given destination point may - receive input from multiple source - points, but no source point will - map to more than one destination - point. + receive input from multiple + source points, but no source + point will map to more than + one destination point. ====================== ================================== src_cyclic: `bool`, optional Specifies whether the longitude for the source grid is periodic or not. If `None` then, if possible, this is - determined automatically otherwise it defaults to False. + determined automatically otherwise it defaults to + False. dst_cyclic: `bool`, optional - Specifies whether the longitude for the destination grid - is periodic of not. If `None` then, if possible, this is - determined automatically otherwise it defaults to False. + Specifies whether the longitude for the destination + grid is periodic of not. If `None` then, if possible, + this is determined automatically otherwise it defaults + to False. use_src_mask: `bool`, optional - For all methods other than 'nearest_stod', this must be - True as it does not make sense to set it to False. For the - 'nearest_stod' method if it is True then points in the - result that are nearest to a masked source point are - masked. Otherwise, if it is False, then these points are - interpolated to the nearest unmasked source points. + For all methods other than 'nearest_stod', this must + be True as it does not make sense to set it to + False. For the 'nearest_stod' method if it is True + then points in the result that are nearest to a masked + source point are masked. Otherwise, if it is False, + then these points are interpolated to the nearest + unmasked source points. use_dst_mask: `bool`, optional - By default the mask of the data on the destination grid is - not taken into account when performing regridding. If this - option is set to true then it is. If the destination field - has more than two dimensions then the first 2D slice in - index space is used for the mask e.g. for an field varying - with (X, Y, Z, T) the mask is taken from the slice (X, Y, - 0, 0). + By default the mask of the data on the destination + grid is not taken into account when performing + regridding. If this option is set to true then it + is. If the destination field has more than two + dimensions then the first 2D slice in index space is + used for the mask e.g. for an field varying with (X, + Y, Z, T) the mask is taken from the slice (X, Y, 0, + 0). fracfield: `bool`, optional - If the method of regridding is conservative the fraction - of each destination grid cell involved in the regridding - is returned instead of the regridded data if this is - True. Otherwise this is ignored. + If the method of regridding is conservative the + fraction of each destination grid cell involved in the + regridding is returned instead of the regridded data + if this is True. Otherwise this is ignored. src_axes: `dict`, optional - A dictionary specifying the axes of the 2D latitude and - longitude coordinates of the source field when no 1D - dimension coordinates are present. It must have keys - ``'X'`` and ``'Y'``. TODO + A dictionary specifying the axes of the 2D latitude + and longitude coordinates of the source field when no + 1D dimension coordinates are present. It must have + keys ``'X'`` and ``'Y'``. TODO *Parameter example:* ``src_axes={'X': 'ncdim%x', 'Y': 'ncdim%y'}`` @@ -19887,34 +19836,35 @@ def regrids( ``src_axes={'X': 1, 'Y': 0}`` dst_axes: `dict`, optional - A dictionary specifying the axes of the 2D latitude and - longitude coordinates of the destination field when no - dimension coordinates are present. It must have keys - ``'X'`` and ``'Y'``. + A dictionary specifying the axes of the 2D latitude + and longitude coordinates of the destination field + when no dimension coordinates are present. It must + have keys ``'X'`` and ``'Y'``. *Parameter example:* ``dst_axes={'X': 'ncdim%x', 'Y': 'ncdim%y'}`` axis_order: sequence, optional - A sequence of items specifying dimension coordinates as - retrieved by the `dim` method. These determine the order - in which to iterate over the other axes of the field when - regridding X-Y slices. The slowest moving axis will be the - first one specified. Currently the regridding weights are - recalculated every time the mask of an X-Y slice changes - with respect to the previous one, so this option allows - the user to minimise how frequently the mask changes. + A sequence of items specifying dimension coordinates + as retrieved by the `dim` method. These determine the + order in which to iterate over the other axes of the + field when regridding X-Y slices. The slowest moving + axis will be the first one specified. Currently the + regridding weights are recalculated every time the + mask of an X-Y slice changes with respect to the + previous one, so this option allows the user to + minimise how frequently the mask changes. ignore_degenerate: `bool`, optional True by default. Instructs ESMPy to ignore degenerate - cells when checking the grids for errors. Regridding will - proceed and degenerate cells will be skipped, not - producing a result, when set to True. Otherwise an error - will be produced if degenerate cells are found. This will - be present in the ESMPy log files if `cf.regrid_logging` - is set to True. As of ESMF 7.0.0 this only applies to - conservative regridding. Other methods always skip - degenerate cells. + cells when checking the grids for errors. Regridding + will proceed and degenerate cells will be skipped, not + producing a result, when set to True. Otherwise an + error will be produced if degenerate cells are + found. This will be present in the ESMPy log files if + `cf.regrid_logging` is set to True. As of ESMF 7.0.0 + this only applies to conservative regridding. Other + methods always skip degenerate cells. {{inplace: `bool`, optional}} @@ -19923,14 +19873,14 @@ def regrids( _compute_field_mass: `dict`, optional If this is a dictionary then the field masses of the - source and destination fields are computed and returned - within the dictionary. The keys of the dictionary - indicates the lat-long slice of the field and the - corresponding value is a tuple containing the source field - construct's mass and the destination field construct's - mass. The calculation is only done if conservative - regridding is being performed. This is for debugging - purposes. + source and destination fields are computed and + returned within the dictionary. The keys of the + dictionary indicates the lat-long slice of the field + and the corresponding value is a tuple containing the + source field construct's mass and the destination + field construct's mass. The calculation is only done + if conservative regridding is being performed. This is + for debugging purposes. :Returns: @@ -19939,8 +19889,8 @@ def regrids( **Examples:** - Regrid field construct ``f`` conservatively onto a grid contained - in field construct ``g``: + Regrid field construct ``f`` conservatively onto a grid + contained in field construct ``g``: >>> h = f.regrids(g, 'conservative') @@ -19953,8 +19903,8 @@ def regrids( >>> h = f.regrids(g, 'conservative_1st', use_dst_mask=True) - Regrid f to 2D auxiliary coordinates lat and lon, which have their - dimensions ordered "Y" first then "X". + Regrid f to 2D auxiliary coordinates lat and lon, which have + their dimensions ordered "Y" first then "X". >>> lat @@ -19965,15 +19915,15 @@ def regrids( ... 'conservative' ... ) - Regrid field, f, on tripolar grid to latitude-longitude grid of - field, g. + Regrid field, f, on tripolar grid to latitude-longitude grid + of field, g. >>> h = f.regrids(g, 'linear', src_axes={'X': 'ncdim%x', 'Y': 'ncdim%y'}, ... src_cyclic=True) - Regrid f to the grid of g iterating over the 'Z' axis last and the - 'T' axis next to last to minimise the number of times the mask is - changed. + Regrid f to the grid of g iterating over the 'Z' axis last and + the 'T' axis next to last to minimise the number of times the + mask is changed. >>> h = f.regrids(g, 'nearest_dtos', axis_order='ZT') @@ -20313,7 +20263,7 @@ def regrids( x = f.dimension_coordinate("X", default=None) if x is not None and x.Units.equivalent(Units("degrees")): f.cyclic("X", iscyclic=dst_cyclic, period=Data(360, "degrees")) - + # Release old memory from ESMF (this ought to happen garbage # collection, but it doesn't seem to work there!) regridSrc2Dst.destroy() @@ -20327,7 +20277,7 @@ def regrids( # if f.data.fits_in_one_chunk_in_memory(f.data.dtype.itemsize): # f.varray - f.autocyclic() +# f.autocyclic() return f @@ -20347,57 +20297,58 @@ def regridc( i=False, _compute_field_mass=None, ): - """Return the field with the specified Cartesian axes regridded - onto a new grid. + """Return the field with the specified Cartesian axes regridded onto a + new grid. Between 1 and 3 dimensions may be regridded. - Regridding, also called remapping or interpolation, is the process - of changing the grid underneath field data values while preserving - the qualities of the original data. - - The regridding method must be specified. First-order conservative - interpolation conserves the global spatial integral of the field, - but may not give approximations to the values as good as - (multi)linear interpolation. Second-order conservative - interpolation also takes into account the gradient across the - source cells, so in general gives a smoother, more accurate - representation of the source field especially when going from a - coarser to a finer grid. (Multi)linear interpolation is - available. The latter method is particular useful for cases when - the latitude and longitude coordinate cell boundaries are not - known nor inferable. Higher order patch recovery is available as - an alternative to (multi)linear interpolation. This typically + Regridding, also called remapping or interpolation, is the + process of changing the grid underneath field data values + while preserving the qualities of the original data. + + The regridding method must be specified. First-order + conservative interpolation conserves the global spatial + integral of the field, but may not give approximations to the + values as good as (multi)linear interpolation. Second-order + conservative interpolation also takes into account the + gradient across the source cells, so in general gives a + smoother, more accurate representation of the source field + especially when going from a coarser to a finer + grid. (Multi)linear interpolation is available. The latter + method is particular useful for cases when the latitude and + longitude coordinate cell boundaries are not known nor + inferable. Higher order patch recovery is available as an + alternative to (multi)linear interpolation. This typically results in better approximations to values and derivatives - compared to the latter, but the weight matrix can be larger than - the linear matrix, which can be an issue when regridding close - to the memory limit on a machine. It is only available in - 2D. Nearest neighbour interpolation is also available. Nearest - source to destination is particularly useful for regridding - integer fields such as land use. + compared to the latter, but the weight matrix can be larger + than the linear matrix, which can be an issue when regridding + close to the memory limit on a machine. It is only available + in 2D. Nearest neighbour interpolation is also + available. Nearest source to destination is particularly + useful for regridding integer fields such as land use. **Metadata** The field construct's domain must have axes matching those specified in *src_axes*. The same is true for the destination - grid, if it provided as part of another field. Optionally the axes - to use from the destination grid may be specified separately in - *dst_axes*. + grid, if it provided as part of another field. Optionally the + axes to use from the destination grid may be specified + separately in *dst_axes*. The output field construct's coordinate objects which span the specified axes are replaced with those from the destination - grid. Any fields contained in coordinate reference objects will - also be regridded, if possible. + grid. Any fields contained in coordinate reference objects + will also be regridded, if possible. **Mask** The data array mask of the field is automatically taken into account, such that the regridded data array will be masked in - regions where the input data array is masked. By default the mask - of the destination grid is not taken into account. If the - destination field data has more dimensions than the number of axes - specified then, if used, its mask is taken from the 1-3 + regions where the input data array is masked. By default the + mask of the destination grid is not taken into account. If the + destination field data has more dimensions than the number of + axes specified then, if used, its mask is taken from the 1-3 dimensional section of the data where the indices of all axes other than X and Y are zero. @@ -20421,21 +20372,22 @@ def regridc( :Parameters: dst: `Field` or `dict` - The field containing the new grid or a dictionary with the - axes specifiers as keys referencing dimension coordinates. - If dst is a field list the first field in the list is - used. + The field containing the new grid or a dictionary with + the axes specifiers as keys referencing dimension + coordinates. If dst is a field list the first field + in the list is used. axes: Select dimension coordinates from the source and - destination fields for regridding. See `cf.Field.axes` TODO for - options for selecting specific axes. However, the number - of axes returned by `cf.Field.axes` TODO must be the same as - the number of specifiers passed in. + destination fields for regridding. See `cf.Field.axes` + TODO for options for selecting specific axes. However, + the number of axes returned by `cf.Field.axes` TODO + must be the same as the number of specifiers passed + in. method: `str` - Specify the regridding method. The *method* parameter must - be one of the following: + Specify the regridding method. The *method* parameter + must be one of the following: ====================== ================================== Method Description @@ -20444,9 +20396,10 @@ def regridc( of dimensions being regridded. For two dimensional regridding - this is bilinear interpolation, - and for three dimensional - regridding this is trilinear + this is bilinear + interpolation, and for three + dimensional regridding this is + trilinear interpolation.Bilinear interpolation. @@ -20455,44 +20408,47 @@ def regridc( ``'conservative_1st'`` First order conservative interpolation. - Preserve the area integral of the - data across the interpolation from - source to destination. It uses the + Preserve the area integral of + the data across the + interpolation from source to + destination. It uses the proportion of the area of the - overlapping source and destination - cells to determine appropriate - weights. + overlapping source and + destination cells to determine + appropriate weights. In particular, the weight of a - source cell is the ratio of the - area of intersection of the source - and destination cells to the area - of the whole destination cell. - - It does not account for the field - gradient across the source cell, - unlike the second-order - conservative method (see below). + source cell is the ratio of + the area of intersection of + the source and destination + cells to the area of the whole + destination cell. + + It does not account for the + field gradient across the + source cell, unlike the + second-order conservative + method (see below). ``'conservative_2nd'`` Second-order conservative interpolation. - As with first order (see above), - preserves the area integral of the - field between source and - destination using a weighted sum, - with weights based on the - proportionate area of - intersection. + As with first order (see + above), preserves the area + integral of the field between + source and destination using a + weighted sum, with weights + based on the proportionate + area of intersection. Unlike first-order, the - second-order method incorporates - further terms to take into - consideration the gradient of the - field across the source cell, - thereby typically producing a - smoother result of higher - accuracy. + second-order method + incorporates further terms to + take into consideration the + gradient of the field across + the source cell, thereby + typically producing a smoother + result of higher accuracy. ``'conservative'`` Alias for ``'conservative_1st'`` @@ -20500,14 +20456,14 @@ def regridc( interpolation. A second degree polynomial - regridding method, which uses a - least squares algorithm to + regridding method, which uses + a least squares algorithm to calculate the polynomial. This method gives better derivatives in the resulting - destination data than the linear - method. + destination data than the + linear method. ``'nearest_stod'`` Nearest neighbour interpolation for which each destination point @@ -20525,52 +20481,55 @@ def regridc( categorical data. A given destination point may - receive input from multiple source - points, but no source point will - map to more than one destination - point. + receive input from multiple + source points, but no source + point will map to more than + one destination point. ====================== ================================== use_src_mask: `bool`, optional - For all methods other than 'nearest_stod', this must be - True as it does not make sense to set it to False. For the + For all methods other than 'nearest_stod', this must + be True as it does not make sense to set it to + False. For the 'nearest_stod' method if it is True then points in the result that are nearest to a masked source point are - masked. Otherwise, if it is False, then these points are - interpolated to the nearest unmasked source points. + masked. Otherwise, if it is False, then these points + are interpolated to the nearest unmasked source + points. use_dst_mask: `bool`, optional - By default the mask of the data on the destination grid is - not taken into account when performing regridding. If this - option is set to True then it is. + By default the mask of the data on the destination + grid is not taken into account when performing + regridding. If this option is set to True then it is. fracfield: `bool`, optional - If the method of regridding is conservative the fraction - of each destination grid cell involved in the regridding - is returned instead of the regridded data if this is - True. Otherwise this is ignored. + If the method of regridding is conservative the + fraction of each destination grid cell involved in the + regridding is returned instead of the regridded data + if this is True. Otherwise this is ignored. axis_order: sequence, optional - A sequence of items specifying dimension coordinates as - retrieved by the `dim` method. These determine the order - in which to iterate over the other axes of the field when - regridding slices. The slowest moving axis will be the - first one specified. Currently the regridding weights are - recalculated every time the mask of a slice changes with - respect to the previous one, so this option allows the - user to minimise how frequently the mask changes. + A sequence of items specifying dimension coordinates + as retrieved by the `dim` method. These determine the + order in which to iterate over the other axes of the + field when regridding slices. The slowest moving axis + will be the first one specified. Currently the + regridding weights are recalculated every time the + mask of a slice changes with respect to the previous + one, so this option allows the user to minimise how + frequently the mask changes. ignore_degenerate: `bool`, optional True by default. Instructs ESMPy to ignore degenerate - cells when checking the grids for errors. Regridding will - proceed and degenerate cells will be skipped, not - producing a result, when set to True. Otherwise an error - will be produced if degenerate cells are found. This will - be present in the ESMPy log files if cf.regrid_logging is - set to True. As of ESMF 7.0.0 this only applies to - conservative regridding. Other methods always skip - degenerate cells. + cells when checking the grids for errors. Regridding + will proceed and degenerate cells will be skipped, not + producing a result, when set to True. Otherwise an + error will be produced if degenerate cells are + found. This will be present in the ESMPy log files if + cf.regrid_logging is set to True. As of ESMF 7.0.0 + this only applies to conservative regridding. Other + methods always skip degenerate cells. {{inplace: `bool`, optional}} @@ -20579,14 +20538,14 @@ def regridc( _compute_field_mass: `dict`, optional If this is a dictionary then the field masses of the - source and destination fields are computed and returned - within the dictionary. The keys of the dictionary - indicates the lat/long slice of the field and the - corresponding value is a tuple containing the source field - construct's mass and the destination field construct's - mass. The calculation is only done if conservative - regridding is being performed. This is for debugging - purposes. + source and destination fields are computed and + returned within the dictionary. The keys of the + dictionary indicates the lat/long slice of the field + and the corresponding value is a tuple containing the + source field construct's mass and the destination + field construct's mass. The calculation is only done + if conservative regridding is being performed. This is + for debugging purposes. :Returns: @@ -21090,90 +21049,90 @@ def derivative( # ---------------------------------------------------------------- # Aliases # ---------------------------------------------------------------- - def aux( - self, - *identity, - key=False, - default=ValueError(), - item=False, - **filter_kwargs, - ): - """Alias for `auxiliary_coordinate`.""" - return self.auxiliary_coordinate( - *identity, key=key, default=default, item=item, **filter_kwargs - ) - - def auxs(self, *identities, **filter_kwargs): - """Alias for `coordinates`.""" - return self.auxiliary_coordinates(*identities, **filter_kwargs) - - def axes(self, *identities, **filter_kwargs): - """Alias for `domain_axes`.""" - return self.domain_axes(*identities, **filter_kwargs) - - def axis( - self, - *identity, - key=False, - default=ValueError(), - item=False, - **filter_kwargs, - ): - """Alias for `domain_axis`.""" - return self.domain_axis( - *identity, key=key, default=default, item=item, **filter_kwargs - ) - - def coord( - self, - *identity, - key=False, - default=ValueError(), - item=False, - **filter_kwargs, - ): - """Alias for `coordinate`.""" - return self.coordinate( - *identity, key=key, default=default, item=item, **filter_kwargs - ) - - def coords(self, *identities, **filter_kwargs): - """Alias for `coordinates`.""" - return self.coordinates(*identities, **filter_kwargs) - - def dim( - self, - *identity, - key=False, - default=ValueError(), - item=False, - **filter_kwargs, - ): - """Alias for `dimension_coordinate`.""" - return self.dimension_coordinate( - *identity, key=key, default=default, item=item, **filter_kwargs - ) - - def dims(self, *identities, **filter_kwargs): - """Alias for `dimension_coordinates`.""" - return self.dimension_coordinates(*identities, **filter_kwargs) - - def domain_anc( - self, - *identity, - key=False, - default=ValueError(), - item=False, - **filter_kwargs, - ): - """Alias for `domain_ancillary`.""" - return self.domain_ancillary( - *identity, key=key, default=default, item=item, **filter_kwargs - ) - - def domain_ancs(self, *identities, **filter_kwargs): - """Alias for `domain_ancillaries`.""" - return self.domain_ancillaries(*identities, **filter_kwargs) +# def aux( +# self, +# *identity, +# key=False, +# default=ValueError(), +# item=False, +# **filter_kwargs, +# ): +# """Alias for `auxiliary_coordinate`.""" +# return self.auxiliary_coordinate( +# *identity, key=key, default=default, item=item, **filter_kwargs +# ) +# +# def auxs(self, *identities, **filter_kwargs): +# """Alias for `coordinates`.""" +# return self.auxiliary_coordinates(*identities, **filter_kwargs) +# +# def axes(self, *identities, **filter_kwargs): +# """Alias for `domain_axes`.""" +# return self.domain_axes(*identities, **filter_kwargs) +# +# def axis( +# self, +# *identity, +# key=False, +# default=ValueError(), +# item=False, +# **filter_kwargs, +# ): +# """Alias for `domain_axis`.""" +# return self.domain_axis( +# *identity, key=key, default=default, item=item, **filter_kwargs +# ) +# +# def coord( +# self, +# *identity, +# key=False, +# default=ValueError(), +# item=False, +# **filter_kwargs, +# ): +# """Alias for `coordinate`.""" +# return self.coordinate( +# *identity, key=key, default=default, item=item, **filter_kwargs +# ) +# +# def coords(self, *identities, **filter_kwargs): +# """Alias for `coordinates`.""" +# return self.coordinates(*identities, **filter_kwargs) +# +# def dim( +# self, +# *identity, +# key=False, +# default=ValueError(), +# item=False, +# **filter_kwargs, +# ): +# """Alias for `dimension_coordinate`.""" +# return self.dimension_coordinate( +# *identity, key=key, default=default, item=item, **filter_kwargs +# ) +# +# def dims(self, *identities, **filter_kwargs): +# """Alias for `dimension_coordinates`.""" +# return self.dimension_coordinates(*identities, **filter_kwargs) +# +# def domain_anc( +# self, +# *identity, +# key=False, +# default=ValueError(), +# item=False, +# **filter_kwargs, +# ): +# """Alias for `domain_ancillary`.""" +# return self.domain_ancillary( +# *identity, key=key, default=default, item=item, **filter_kwargs +# ) +# +# def domain_ancs(self, *identities, **filter_kwargs): +# """Alias for `domain_ancillaries`.""" +# return self.domain_ancillaries(*identities, **filter_kwargs) def field_anc( self, @@ -21192,59 +21151,59 @@ def field_ancs(self, *identities, **filter_kwargs): """Alias for `field_ancillaries`.""" return self.field_ancillaries(*identities, **filter_kwargs) - def key(self, identity, default=ValueError(), **kwargs): - """Alias for `cf.Field.construct_key`.""" - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, - "key", - kwargs, - "Use 'construct' method or 'construct_key' method instead.", - ) # pragma: no cover - - return self.construct_key(identity, default=default) - - def measure( - self, - *identity, - key=False, - default=ValueError(), - item=False, - **filter_kwargs, - ): - """Alias for `cell_measure`.""" - return self.cell_measure( - *identity, - key=key, - default=default, - item=item, - **filter_kwargs, - ) - - def measures(self, *identities, **filter_kwargs): - """Alias for `cell_measures`.""" - return self.cell_measures(*identities, **filter_kwargs) - - def ref( - self, - *identity, - default=ValueError(), - key=False, - item=False, - **filter_kwargs, - ): - """Alias for `coordinate_reference`.""" - return self.coordinate_reference( - *identity, - key=key, - default=default, - item=item, - **filter_kwargs, - ) - - def refs(self, *identities, **filter_kwargs): - """Alias for `coordinate_references`.""" - return self.coordinate_references(*identities, **filter_kwargs) +# def key(self, identity, default=ValueError(), **kwargs): +# """Alias for `cf.Field.construct_key`.""" +# if kwargs: +# _DEPRECATION_ERROR_KWARGS( +# self, +# "key", +# kwargs, +# "Use 'construct' method or 'construct_key' method instead.", +# ) # pragma: no cover +# +# return self.construct_key(identity, default=default) +# +# def measure( +# self, +# *identity, +# key=False, +# default=ValueError(), +# item=False, +# **filter_kwargs, +# ): +# """Alias for `cell_measure`.""" +# return self.cell_measure( +# *identity, +# key=key, +# default=default, +# item=item, +# **filter_kwargs, +# ) +# +# def measures(self, *identities, **filter_kwargs): +# """Alias for `cell_measures`.""" +# return self.cell_measures(*identities, **filter_kwargs) +# +# def ref( +# self, +# *identity, +# default=ValueError(), +# key=False, +# item=False, +# **filter_kwargs, +# ): +# """Alias for `coordinate_reference`.""" +# return self.coordinate_reference( +# *identity, +# key=key, +# default=default, +# item=item, +# **filter_kwargs, +# ) +# +# def refs(self, *identities, **filter_kwargs): +# """Alias for `coordinate_references`.""" +# return self.coordinate_references(*identities, **filter_kwargs) # ---------------------------------------------------------------- # Deprecated attributes and methods diff --git a/cf/mixin/__init__.py b/cf/mixin/__init__.py index c82f3ad54b..555f87c84d 100644 --- a/cf/mixin/__init__.py +++ b/cf/mixin/__init__.py @@ -2,3 +2,4 @@ from .propertiesdata import PropertiesData, Subspace from .propertiesdatabounds import PropertiesDataBounds from .coordinate import Coordinate +from .fielddomain import FieldDomain diff --git a/cf/mixin/coordinate.py b/cf/mixin/coordinate.py index 1288a7e050..34c86747c0 100644 --- a/cf/mixin/coordinate.py +++ b/cf/mixin/coordinate.py @@ -1,4 +1,4 @@ -from itertools import chain +#from itertools import chain from ..decorators import ( _inplace_enabled, @@ -508,7 +508,7 @@ def identity( return default - def identities(self, generator=False, ctypes=None): + def identities(self, generator=False, ctypes=None, **kwargs): """Return all possible identities. The identities comprise: @@ -575,23 +575,28 @@ def identities(self, generator=False, ctypes=None): 'ncvar%tas'] """ - identities = super().identities(generator=True, ctypes=ctypes) - if ctypes: - g = chain(_ctypes_iter(self, ctypes), identities) + pre = (self._ctypes_iter(ctypes),) + pre0 = kwargs.pop("pre", None) + if pre0: + pre = tuple(pre0) + pre + + kwargs["pre"] = pre else: - g = chain(identities, _ctypes_iter(self, 'XTYZ')) + post = (self._ctypes_iter('XTYZ'),) + post0 = kwargs.pop("post", None) + if post0: + post += tuple(post0) + + kwargs["post"] = post + + return super().identities(generator=generator, **kwargs) + + def _ctypes_iter(self, ctypes): + """Generator for returning the coordinate type letter.""" + for c in ctypes: + if getattr(self, c): + # This coordinate construct is of this type + yield c + return - if generator: - return g - - return list(g) - - -def _ctypes_iter(coord, ctypes): - """Generator for returning the coordinate type letter.""" - for c in ctypes: - if getattr(coord, c): - # This coordinate construct is of this type - yield c - return diff --git a/cf/mixin/fielddomain.py b/cf/mixin/fielddomain.py new file mode 100644 index 0000000000..7f77be8e1c --- /dev/null +++ b/cf/mixin/fielddomain.py @@ -0,0 +1,3135 @@ +import logging + +from numbers import Integral + +import numpy as np + +try: + from matplotlib.path import Path +except ImportError: + pass + +from ..query import Query +from ..data import Data +from ..units import Units + +from ..functions import ( + parse_indices, + bounds_combination_mode, + _DEPRECATION_ERROR, + _DEPRECATION_ERROR_KWARGS, + _DEPRECATION_ERROR_DICT, + _DEPRECATION_ERROR_SEQUENCE, +) + +from ..decorators import ( + _inplace_enabled, + _inplace_enabled_define_and_cleanup, + _manage_log_level_via_verbosity, + _deprecated_kwarg_check, +) + +logger = logging.getLogger(__name__) + + +_units_degrees = Units("degrees") + + +class FieldDomain: + """Mixin class for methods common to both field and domain constructs + + .. versionadded:: 3.TODO.0 + + """ + + # ---------------------------------------------------------------- + # Private methods + # ---------------------------------------------------------------- + def _indices(self, mode, data_axes, auxiliary_mask, **kwargs): + """Create indices that define a subspace of the field or domain + construct. + + This method is intended to be called by the `indices` method. + + See the `indices` method for more details. + + .. versionadded:: 3.TODO.0 + + :Parameters: + + mode: `str` + The mode of operation. See the *mode* parameter of + `indices` for details. + + data_axes: sequence of `str`, or `None` + The domain axis identifiers of the data axes, or + `None` if there is no data array. + + auxiliary_mask: `bool` + Whether or not to create an auxiliary mask. See + `indices` for details. + + kwargs: *optional* + See the **kwargs** parameters of `indices` for + details. + + :Returns: + + `dict` + The dictionary has two keys: ``'indices'`` and + ``'mask'``. + + The ``'indices'`` key stores a dictionary in keyed by + domain axis identifiers, each of which has a value of + the index for that domain axis. + + The ``'mask'`` key stores a dictionary in keyed by + tuples of domain axis identifier combinations, each + of which has of a `Data` object containing the + auxiliary mask to apply to those domain axes at time + of the indices being used to create a subspace. This + dictionary will always be empty if *auxiliary_mask* + is False. + + """ + compress = mode == "compress" + envelope = mode == "envelope" + full = mode == "full" + + logger.debug( + f"{self.__class__.__name__}._indices:\n" + f" mode = {mode!r}\n" + f" input kwargs = {kwargs!r}" + ) # pragma: no cover + + domain_axes = self.domain_axes(todict=True) +# constructs = self.constructs.filter_by_data() + + # Initialize indices + indices = {axis: slice(None) for axis in domain_axes} + + data_axes = self.constructs.data_axes() + + parsed = {} + unique_axes = set() + n_axes = 0 + for identity, value in kwargs.items(): +# if identity in domain_axes: +# axes = (identity,) +# key = None +# construct = None +# else: + key, construct = self.construct(identity, + item=True, + default=(None, None) + ) + + if construct is None: + raise ValueError( + f"Can't find indices. Ambiguous axis or axes: " + f"{identity!r}" + ) + + if key in domain_axes: + axes = key + else: + axes = data_axes[key] + + if axes in parsed: + # The axes are the same as an exisiting key + parsed[axes].append((axes, key, construct, value)) + else: + new_key = True + y = set(axes) + for x in parsed: + if set(x) == set(y): + # The axes are the same but in a different + # order, so we don't need a new key. + parsed[x].append((axes, key, construct, value)) + new_key = False + break + + if new_key: + # The axes, taken in any order, are not the same + # as any keys, so create an new key. + n_axes += len(axes) + parsed[axes] = [(axes, key, construct, value)] + + unique_axes.update(axes) + + logger.debug( + f" parsed = {parsed!r}\n" + f" unique_axes = {unique_axes!r}\n" + f" n_axes = {n_axes!r}" + ) # pragma: no cover + + if len(unique_axes) < n_axes: + raise ValueError( + "Can't find indices: Multiple constructs with incompatible " + "domain axes" + ) + + auxiliary_mask = {} + + for canonical_axes, axes_key_construct_value in parsed.items(): + axes, keys, constructs, points = list( + zip(*axes_key_construct_value) + ) + + n_items = len(constructs) + n_axes = len(canonical_axes) + + if n_items > n_axes: + if n_axes == 1: + a = "axis" + else: + a = "axes" + + raise ValueError( + f"Error: Can't specify {n_items} conditions for " + f"{n_axes} {a}: {points}" + ) + + create_mask = False + + item_axes = axes[0] + + logger.debug( + f" item_axes = {item_axes!r}\n keys = {keys!r}" + ) # pragma: no cover + + if n_axes == 1: + # ---------------------------------------------------- + # 1-d construct + # ---------------------------------------------------- + ind = None + + axis = item_axes[0] + item = constructs[0] + value = points[0] + + logger.debug( + f" {n_items} 1-d constructs: {constructs!r}\n" + f" axis = {axis!r}\n" + f" value = {value!r}" + ) # pragma: no cover + + if isinstance(value, (list, slice, tuple, np.ndarray)): + # ------------------------------------------------ + # 1-dimensional CASE 1: Value is already an index, + # e.g. [0], [7,4,2], + # slice(0,4,2), + # numpy.array([2,4,7]), + # [True, False, True] + # ------------------------------------------------ + logger.debug(" 1-d CASE 1:") # pragma: no cover + + index = value + + if envelope or full: + size = domain_axes[axis].get_size() + d = self._Data(range(size)) + ind = (d[value].array,) + index = slice(None) + + elif ( + item is not None + and isinstance(value, Query) + and value.operator in ("wi", "wo") + and item.construct_type == "dimension_coordinate" + and self.iscyclic(axis) + ): + # ------------------------------------------------ + # 1-dimensional CASE 2: Axis is cyclic and + # subspace criterion is a + # 'within' or 'without' + # Query instance + # ------------------------------------------------ + logger.debug(" 1-d CASE 2:") # pragma: no cover + + if item.increasing: + anchor0 = value.value[0] + anchor1 = value.value[1] + else: + anchor0 = value.value[1] + anchor1 = value.value[0] + + a = self.anchor(axis, anchor0, dry_run=True)["roll"] + b = self.flip(axis).anchor(axis, anchor1, dry_run=True)[ + "roll" + ] + + size = item.size + if abs(anchor1 - anchor0) >= item.period(): + if value.operator == "wo": + set_start_stop = 0 + else: + set_start_stop = -a + + start = set_start_stop + stop = set_start_stop + elif a + b == size: + b = self.anchor(axis, anchor1, dry_run=True)["roll"] + if (b == a and value.operator == "wo") or not ( + b == a or value.operator == "wo" + ): + set_start_stop = -a + else: + set_start_stop = 0 + + start = set_start_stop + stop = set_start_stop + else: + if value.operator == "wo": + start = b - size + stop = -a + size + else: + start = -a + stop = b - size + + index = slice(start, stop, 1) + + if full: + # index = slice(start, start+size, 1) + d = self._Data(list(range(size))) + d.cyclic(0) + ind = (d[index].array,) + + index = slice(None) + + elif item is not None: + # ------------------------------------------------ + # 1-dimensional CASE 3: All other 1-d cases + # ------------------------------------------------ + logger.debug(" 1-d CASE 3:") # pragma: no cover + + item_match = value == item + + if not item_match.any(): + raise ValueError( + f"No {identity!r} axis indices found " + f"from: {value}" + ) + + index = np.asanyarray(item_match) + + if envelope or full: + if np.ma.isMA(index): + ind = np.ma.where(index) + else: + ind = np.where(index) + + index = slice(None) + + else: + raise ValueError( + "Must specify a domain axis construct or a " + "construct with data for which to create indices" + ) + + logger.debug(f" index = {index}") # pragma: no cover + + # Put the index into the correct place in the list of + # indices. + # + # Note that we might overwrite it later if there's an + # auxiliary mask for this axis. + indices[axis] = index + + else: + # ---------------------------------------------------- + # N-dimensional constructs + # ---------------------------------------------------- + logger.debug( + f" {n_items} N-d constructs: {constructs!r}\n" + f" {len(points)} points : {points!r}\n" + ) # pragma: no cover + + # Make sure that each N-d item has the same axis order + transposed_constructs = [] + + for construct, construct_axes in zip(constructs, axes): + if construct_axes != canonical_axes: + iaxes = [ + construct_axes.index(axis) + for axis in canonical_axes + ] + construct = construct.transpose(iaxes) + + transposed_constructs.append(construct) + + logger.debug( + f" transposed N-d constructs: {transposed_constructs!r}" + ) # pragma: no cover + + item_matches = [ + (value == construct).data + for value, construct in zip(points, transposed_constructs) + ] + + item_match = item_matches.pop() + + for m in item_matches: + item_match &= m + + item_match = item_match.array # LAMA alert + + if np.ma.isMA: + ind = np.ma.where(item_match) + else: + ind = np.where(item_match) + + logger.debug( + f" item_match = {item_match}\n" f" ind = {ind}" + ) # pragma: no cover + + for i in ind: + if not i.size: + raise ValueError( + f"No {canonical_axes!r} axis indices found " + f"from: {value!r}" + ) + + bounds = [ + item.bounds.array[ind] + for item in transposed_constructs + if item.has_bounds() + ] + + contains = False + if bounds: + points2 = [] + for v, construct in zip(points, transposed_constructs): + if isinstance(v, Query): + if v.operator == "contains": + contains = True + v = v.value + elif v.operator == "eq": + v = v.value + else: + contains = False + break + + v = self._Data.asdata(v) + if v.Units: + v.Units = construct.Units + + points2.append(v.datum()) + + if contains: + # The coordinates have bounds and the condition is + # a 'contains' Query object. Check each + # potentially matching cell for actually including + # the point. + try: + Path + except NameError: + raise ImportError( + "Need to install matplotlib to create indices " + f"based on {transposed_constructs[0].ndim}-d " + "constructs and a 'contains' Query object" + ) + + if n_items != 2: + raise ValueError( + f"Can't index for cell from {n_axes}-d " + "coordinate objects" + ) + + if 0 < len(bounds) < n_items: + raise ValueError("bounds alskdaskds TODO") + + # Remove grid cells if, upon closer inspection, + # they do actually contain the point. + delete = [ + n + for n, vertices in enumerate(zip(*zip(*bounds))) + if not Path(zip(*vertices)).contains_point(points2) + ] + + if delete: + ind = [np.delete(ind_1d, delete) for ind_1d in ind] + + if ind is not None: + mask_shape = [] + masked_subspace_size = 1 + ind = np.array(ind) + + logger.debug(" ind = {ind}") # pragma: no cover + + for i, (axis, start, stop) in enumerate( + zip(canonical_axes, ind.min(axis=1), ind.max(axis=1)) + ): + if data_axes and axis not in data_axes: + continue + + if indices[axis] == slice(None): + if compress: + # Create a compressed index for this axis + size = stop - start + 1 + index = sorted(set(ind[i])) + elif envelope: + # Create an envelope index for this axis + stop += 1 + size = stop - start + index = slice(start, stop) + elif full: + # Create a full index for this axis + start = 0 + stop = domain_axes[axis].get_size() + size = stop - start + index = slice(start, stop) + else: + raise ValueError( + "Must have full, envelope or compress" + ) # pragma: no cover + + indices[axis] = index + + mask_shape.append(size) + masked_subspace_size *= size + ind[i] -= start + + create_mask = data_axes and ind.shape[1] < masked_subspace_size + else: + create_mask = False + + # TODODASK - if we have 2 list of integers then we need to + # apply different auxiliary masks (if any) + # after different __getitems__. SCRUB THAT! if + # we have an auxiliary mask, then by + # definition we do _not_ have a list(s) of + # integers + + # -------------------------------------------------------- + # Create an auxiliary mask for these axes + # -------------------------------------------------------- + logger.debug(f" create_mask = {create_mask}") # pragma: no cover + + if create_mask: + mask = _create_auxiliary_mask_component( + mask_shape, ind, compress + ) + auxiliary_mask[canonical_axes] = mask + logger.debug( + f" mask_shape = {mask_shape}\n" + f" mask.shape = {mask.shape}" + ) # pragma: no cover + + for axis, index in tuple(indices.items()): + indices[axis] = parse_indices( + (domain_axes[axis].get_size(),), (index,) + )[0] + + # Include the auxiliary mask + indices = { + "indices": indices, + "mask": auxiliary_mask, + } + + logger.debug(f" indices = {indices!r}") # pragma: no cover + + # Return the indices and the auxiliary mask + return indices + + def _roll_constructs(self, axis, shift): + """Roll the metadata constructs in-place along axes. + + If a roll axis is spanned by a dimension coordinate construct + then it must be a periodic dimension coordinate construct. + + .. versionadded:: 3.TODO.0 + + :Parameters: + + axis: sequence of `str` + The axis or axes along which elements are to be + shifted, defined by their domain axis identifiers. + + shift: (sequence of) `int` + The number of places by which elements are shifted. + If a sequence, then *axis* must be a sequence of the + same size, and each of the given axes is shifted by + the corresponding number. If an `int` while *axis* is + a sequence, then the same value is used for all given + axes. + + :Returns: + + `list` + + The shifts corresponding to each rolled axis. + + **Examples:** + + """ + if isinstance(shift, Integral): + if axis: + shift = [shift] * len(axis) + else: + shift = [shift] + else: + shift = list(shift) + + if len(shift) != len(axis): + raise ValueError( + f"Can't roll {self.__class__.__name__}: " + f"Must have the same number of shifts ({len(shift)}) " + f"as axes ({len(axis)})." + ) + + for a in axis: + dim = dims.filter_by_axis("exact", a).value(None) + dim = self.dimension_coordinate(filter_by_axis=(a,), todict=True) + if dim is not None and dim.period() is None: + raise ValueError( + f"Can't roll {self.__class__.__name__}. " + f"{dim.identity()!r} axis has a non-periodic " + "dimension coordinate construct" + ) + + data_axes = self.constructs.data_axes() + for key, construct in self.constructs.filter_by_data(todict=True).items(): + construct_axes = data_axes.get(key, ()) + + c_axes = [] + c_shifts = [] + for a, s in zip(axis, shift): + if a in construct_axes: + c_axes.append(construct_axes.index(a)) + c_shifts.append(s) + + if not c_axes: + # This construct does not span the roll axes + continue + + # TODODASK - remove these two lines when multiaxis rolls + # are allowed at v4.0.0 + c_axes = c_axes[0] + c_shifts = c_shifts[0] + + construct.roll(c_axes, shift=c_shifts, inplace=True) + + return shift + + # ---------------------------------------------------------------- + # Methods + # ---------------------------------------------------------------- + @_deprecated_kwarg_check("i") + @_inplace_enabled(default=False) + def anchor( + self, axis, value, inplace=False, dry_run=False, i=False, **kwargs + ): + """Roll a cyclic axis so that the given value lies in the first + coordinate cell. + + A unique axis is selected with the *axes* and *kwargs* + parameters. + + .. versionadded:: 3.TODO.0 + + .. seealso:: `axis`, `cyclic`, `iscyclic`, `roll` + + :Parameters: + + axis: + The cyclic axis to be anchored. + + {{domain axis selection}} + + value: + Anchor the dimension coordinate values for the + selected cyclic axis to the *value*. May be any + numeric scalar object that can be converted to a + `Data` object (which includes `numpy` and `Data` + objects). If *value* has units then they must be + compatible with those of the dimension coordinates, + otherwise it is assumed to have the same units as the + dimension coordinates. The coordinate values are + transformed so that *value* is "equal to or just + before" the new first coordinate value. More + specifically: + + * Increasing dimension coordinates with positive + period, P, are transformed so that *value* lies in + the half-open range (L-P, F], where F and L are + the transformed first and last coordinate values, + respectively. + + .. + + * Decreasing dimension coordinates with positive + period, P, are transformed so that *value* lies in + the half-open range (L+P, F], where F and L are + the transformed first and last coordinate values, + respectively. + + *Parameter example:* + If the original dimension coordinates are ``0, 5, + ..., 355`` (evenly spaced) and the period is ``360`` + then ``value=0`` implies transformed coordinates of + ``0, 5, ..., 355``; ``value=-12`` implies + transformed coordinates of ``-10, -5, ..., 345``; + ``value=380`` implies transformed coordinates of + ``380, 385, ..., 715``. + + *Parameter example:* + If the original dimension coordinates are ``355, + 350, ..., 0`` (evenly spaced) and the period is + ``360`` then ``value=355`` implies transformed + coordinates of ``355, 350, ..., 0``; ``value=0`` + implies transformed coordinates of ``0, -5, ..., + -355``; ``value=392`` implies transformed + coordinates of ``390, 385, ..., 30``. + + {{inplace: `bool`, optional}} + + dry_run: `bool`, optional + Return a dictionary of parameters which describe the + anchoring process. The construct is not changed, even + if *inplace* is True. + + {{i: deprecated at version 3.0.0}} + + kwargs: deprecated at version 3.0.0 + + :Returns: + + `dict` + + **Examples:** + + >>> f.iscyclic('X') + True + >>> f.dimension_coordinate('X').data + TODO + >>> print(f.dimension_coordinate('X').array) + [ 0 45 90 135 180 225 270 315] + >>> g = f.anchor('X', 230) + >>> print(g.dimension_coordinate('X').array) + [270 315 0 45 90 135 180 225] + >>> g = f.anchor('X', cf.Data(590, 'degreesE')) + >>> print(g.dimension_coordinate('X').array) + [630 675 360 405 450 495 540 585] + >>> g = f.anchor('X', cf.Data(-490, 'degreesE')) + >>> print(g.dimension_coordinate('X').array) + [-450 -405 -720 -675 -630 -585 -540 -495] + + >>> f.iscyclic('X') + True + >>> f.dimension_coordinate('X').data + + >>> f.anchor('X', 10000).dimension_coordinate('X').data + + >>> d = f.anchor('X', 10000, dry_run=True) + >>> d + {'axis': 'domainaxis2', + 'nperiod': , + 'roll': 28} + >>> (f.roll(d['axis'], d['roll']).dimension_coordinate( + ... d['axis']) + d['nperiod']).data + + + """ + if kwargs: + _DEPRECATION_ERROR_KWARGS( + self, "anchor", kwargs + ) # pragma: no cover + + axis_in = axis +# axis = self._parse_axes(axis_in) + + da_key, axis = self.domain_axis(axis, item=True) + + if dry_run: + f = self + else: + f = _inplace_enabled_define_and_cleanup(self) + + dim = f.dimension_coordinate(filter_by_axis=(da_key,), default=None) + if dim is None: + raise ValueError( + "Can't shift non-cyclic " + f"{f.constructs.domain_axis_identity(da_key)!r} axis" + ) + + period = dim.period() + if period is None: + raise ValueError(f"Cyclic {dim.identity()!r} axis has no period") + + value = f._Data.asdata(value) + if not value.Units: + value = value.override_units(dim.Units) + elif not value.Units.equivalent(dim.Units): + raise ValueError( + f"Anchor value has incompatible units: {value.Units!r}" + ) + + axis_size = axis.get_size() + + if axis_size <= 1: + # Don't need to roll a size one axis + if dry_run: + return {"axis": da_key, "roll": 0, "nperiod": 0} + + return f + + c = dim.get_data(_fill_value=False) + + if dim.increasing: + # Adjust value so it's in the range [c[0], c[0]+period) + n = ((c[0] - value) / period).ceil() + value1 = value + n * period + + shift = axis_size - np.argmax((c - value1 >= 0).array) + if not dry_run: + f.roll(da_key, shift, inplace=True) + + # Re-get dim + dim = f.dimension_coordinate(filter_by_axis=(da_key,)) + # TODO CHECK n for dry run or not + n = ((value - dim.data[0]) / period).ceil() + else: + # Adjust value so it's in the range (c[0]-period, c[0]] + n = ((c[0] - value) / period).floor() + value1 = value + n * period + + shift = axis_size - np.argmax((value1 - c >= 0).array) + + if not dry_run: + f.roll(da_key, shift, inplace=True) + + # Re-get dim + dim = f.dimension_coordinate(filter_by_axis=(da_key,)) + # TODO CHECK n for dry run or not + n = ((value - dim.data[0]) / period).floor() + + if dry_run: + return {"axis": da_key, "roll": shift, "nperiod": n * period} + + if n: + with bounds_combination_mode("OR"): + dim += n * period + + return f + + @_manage_log_level_via_verbosity + def autocyclic(self, key=None, coord=None, verbose=None): + """Set dimensions to be cyclic. + + A dimension is set to be cyclic if it has a unique longitude + (or grid longitude) dimension coordinate construct with bounds + and the first and last bounds values differ by 360 degrees (or + an equivalent amount in other units). + + .. versionadded:: 1.0 + + .. seealso:: `cyclic`, `iscyclic`, `period` + + :Parameters: + + {{verbose: `int` or `str` or `None`, optional}} + + :Returns: + + `bool` + + **Examples:** + + >>> f.autocyclic() + + """ + if coord is None: + key, coord = self.dimension_coordinate( + "X", item=True, default=(None, None) + ) + if coord is None: + return False + elif not coord.X: + return False + + bounds = coord.get_bounds(None) + if bounds is None: + self.cyclic(key, iscyclic=False) + return False + + data = bounds.get_data(None, _fill_value=False) + if data is None: + self.cyclic(key, iscyclic=False) + return False + + units = bounds.Units + if units.islongitude: + period = Data(360.0, units="degrees_east") + elif units == _units_degrees: + period = Data(360.0, units="degrees") + else: + self.cyclic(key, iscyclic=False) + return False + + period.Units = data.Units + + if abs(data.last_element() - data.first_element()) != period.array: + self.cyclic(key, iscyclic=False) + return False + + self.cyclic(key, iscyclic=True, period=period) + + return True + + def del_construct(self, identity=None, default=ValueError()): + """Remove a metadata construct. + + If a domain axis construct is selected for removal then it + can't be spanned by any metadata construct's data. See + `del_domain_axis` for more options in this case. + + A domain ancillary construct may be removed even if it is + referenced by coordinate reference construct. In this case the + reference is replace with `None`. + + .. versionadded:: 3.TODO.0 + + .. seealso:: `constructs`, `get_construct`, `has_construct`, + `set_construct`, `del_domain_axis`, + `del_coordinate_reference` + + :Parameters: + + identity: optional + Select the construct by one of + + * A metadata construct identity. + + {{construct selection identity}} + + * The key of a metadata construct + + * `None`. This is the default, which selects the + metadata construct when there is only one of them. + + *Parameter example:* + ``identity='latitude'`` + + *Parameter example:* + ``identity='T' + + *Parameter example:* + ``identity='long_name=Cell Area'`` + + *Parameter example:* + ``identity='cellmeasure1'`` + + *Parameter example:* + ``identity='measure:area'`` + + *Parameter example:* + ``identity=cf.eq('time')'`` + + *Parameter example:* + ``identity=re.compile('^lat')`` + + Select the construct to removed. Must be + + * The identity or key of a metadata construct. + + A construct identity is specified by a string + (e.g. ``'latitude'``, ``'long_name=time'``, + ``'ncvar%lat'``, etc.); a `Query` object + (e.g. ``cf.eq('longitude')``); or a compiled regular + expression (e.g. ``re.compile('^atmosphere')``) that + selects the relevant constructs whose identities match via + `re.search`. + + A construct has a number of identities, and is selected if + any of them match any of those provided. A construct's + identities are those returned by its `!identities` + method. In the following example, the construct ``x`` has + six identities: + + >>> x.identities() + ['time', + 'long_name=Time', + 'foo=bar', + 'standard_name=time', + 'ncvar%t', + 'T'] + + A construct key may optionally have the ``'key%'`` + prefix. For example ``'dimensioncoordinate2'`` and + ``'key%dimensioncoordinate2'`` are both acceptable keys. + + Note that in the output of a `print` call or `!dump` + method, a construct is always described by one of its + identities, and so this description may always be used as + an *identity* argument. + + *Parameter example:* + ``identity='measure:area'`` + + *Parameter example:* + ``identity='cell_area'`` + + *Parameter example:* + ``identity='long_name=Cell Area'`` + + *Parameter example:* + ``identity='cellmeasure1'`` + + default: optional + Return the value of the *default* parameter if the + construct can not be removed, or does not exist. If set to + an `Exception` instance then it will be raised instead. + + :Returns: + + The removed metadata construct. + + **Examples:** + + >>> f.del_construct('X') + + + """ + key = self.construct_key(identity, default=None) + if key is None: + return self._default( + default, + f"Can't identify construct to delete from {identity!r}", + ) + + return super().del_construct(key, default=default) + + def del_coordinate_reference( + self, identity=None, construct=None, default=ValueError() + ): + """Remove a coordinate reference construct and all of its domain + ancillary constructs. + + .. versionadded:: 3.0.0 + + .. seealso:: `del_construct` + + :Parameters: + + identity: optional + Select the coordinate reference construct by one of: + + * The identity of a coordinate reference construct. + + {{construct selection identity}} + + * The key of a coordinate reference construct + + * `None`. This is the default, which selects the + coordinate reference construct when there is only + one of them. + + *Parameter example:* + ``identity='standard_name:atmosphere_hybrid_height_coordinate'`` + + *Parameter example:* + ``identity='grid_mapping_name:rotated_latitude_longitude'`` + + *Parameter example:* + ``identity='transverse_mercator'`` + + *Parameter example:* + ``identity='coordinatereference1'`` + + *Parameter example:* + ``identity='key%coordinatereference1'`` + + *Parameter example:* + ``identity='ncvar%lat_lon'`` + + *Parameter example:* + ``identity=cf.eq('rotated_pole')'`` + + *Parameter example:* + ``identity=re.compile('^rotated')`` + + construct: optional + TODO + + default: optional + Return the value of the *default* parameter if the + construct can not be removed, or does not exist. + + {{default Exception}} + + :Returns: + + The removed coordinate reference construct. + + **Examples:** + + >>> f.del_coordinate_reference('rotated_latitude_longitude') + + + """ + if construct is None: + if identity is None: + raise ValueError("TODO") + + key = self.coordinate_reference(identity, key=True, default=None) + if key is None: + return self._default( + default, + f"Can't identify construct from {identity!r}", + ) + + ref = self.del_construct(key) + + for ( + da_key + ) in ref.coordinate_conversion.domain_ancillaries().values(): + self.del_construct(da_key, default=None) + + return ref + elif identity is not None: + raise ValueError("TODO") + + out = [] + + c_key = self.construct(construct, key=True, default=None) + if c_key is None: + return self._default( + default, f"Can't identify construct from {construct!r}" + ) + + for key, ref in tuple(self.coordinate_references(todict=True).items()): + if c_key in ref.coordinates(): + self.del_coordinate_reference( + key, construct=None, default=default + ) + out.append(ref) + continue + + if ( + c_key + in ref.coordinate_conversion.domain_ancillaries().values() + ): + self.del_coordinate_reference( + key, construct=None, default=default + ) + out.append(ref) + continue + + return out + + def del_domain_axis( + self, identity=None, squeeze=False, default=ValueError() + ): + """Remove a domain axis construct. + + In general, a domain axis construct can only be removed if it + is not spanned by any construct's data. However, a size 1 + domain axis construct can be removed in any case if the + *squeeze* parameter is set to `True`. In this case, a metadata + construct whose data spans only the removed domain axis + construct will also be removed. + + .. versionadded:: 3.6.0 + + .. seealso:: `del_construct` + + :Parameters: + + identity: optional + Select the domain axis construct by one of: + + * An identity or key of a 1-d dimension or auxiliary + coordinate construct that whose data spans the + domain axis construct. + + {{construct selection identity}} + + * A domain axis construct identity. + + {{domain axis selection identity}} + + * The key of a domain axis construct. + + * `None`. This is the default, which selects the + domain axis construct when there is only one of + them. ``'key%dimensioncoordinate2'`` are both + acceptable keys. + + *Parameter example:* + ``identity='long_name=Latitude'`` + + *Parameter example:* + ``identity='dimensioncoordinate1'`` + + *Parameter example:* + ``identity='domainaxis2'`` + + *Parameter example:* + ``identity='key%domainaxis2'`` + + *Parameter example:* + ``identity='ncdim%y'`` + + squeeze: `bool`, optional + If True then allow the removal of a size 1 domain axis + construct that is spanned by any data array and + squeeze the corresponding dimension from those arrays. + + default: optional + Return the value of the *default* parameter if the + construct can not be removed, or does not exist. + + {{default Exception}} + + :Returns: + + `DomainAxis` + The removed domain axis construct. + + **Examples:** + + >>> f = cf.example_field(0) + >>> g = f[0] + Field: specific_humidity (ncvar%q) + ---------------------------------- + Data : specific_humidity(latitude(1), longitude(8)) 1 + Cell methods : area: mean + Dimension coords: latitude(1) = [-75.0] degrees_north + : longitude(8) = [22.5, ..., 337.5] degrees_east + : time(1) = [2019-01-01 00:00:00] + >>> g.del_domain_axis('Y', squeeze=True) + + >>> print(g) + Field: specific_humidity (ncvar%q) + ---------------------------------- + Data : specific_humidity(longitude(8)) 1 + Cell methods : area: mean + Dimension coords: longitude(8) = [22.5, ..., 337.5] degrees_east + : time(1) = [2019-01-01 00:00:00] + >>> g.del_domain_axis('T', squeeze=True) + + >>> print(g) + Field: specific_humidity (ncvar%q) + ---------------------------------- + Data : specific_humidity(longitude(8)) 1 + Cell methods : area: mean + Dimension coords: longitude(8) = [22.5, ..., 337.5] degrees_east + + """ + dakey, domain_axis= self.domain_axis(identity, item=True) + + if not squeeze: + return self.del_construct(dakey) + + if dakey in self.get_data_axes(default=()): + self.squeeze(dakey, inplace=True) + + for ckey, construct in self.constructs.filter_by_data( + todict=True + ).items(): + data = construct.get_data(None, _fill_value=False) + if data is None: + continue + + construct_axes = self.get_data_axes(ckey) + if dakey not in construct_axes: + continue + + i = construct_axes.index(dakey) + construct.squeeze(i, inplace=True) + construct_axes = list(construct_axes) + construct_axes.remove(dakey) + self.set_data_axes(axes=construct_axes, key=ckey) + + if not construct_axes: + self.del_construct(ckey) + + return domain_axis + + def auxiliary_coordinate( + self, + *identity, + default=ValueError(), + key=False, + item=False, + **filter_kwargs, + ): + """Return an auxiliary coordinate construct, or its key. + + .. versionadded:: 3.0.0 + + .. seealso:: `construct`, `auxiliary_coordinates`, `cell_measure`, + `cell_method`, `coordinate`, `coordinate_reference`, + `dimension_coordinate`, `domain_ancillary`, + `domain_axis`, `field_ancillary` + + :Parameters: + + identity: optional + Select the auxiliary coordinate construct by one of: + + * `None`. This is the default, which selects the + auxiliary coordinate construct when there is only one + of them. + + * The identity or key of an auxiliary coordinate + construct. + + * The identity or key of a domain axis construct that is + spanned by a unique 1-d auxiliary coordinate + construct's data. + + * The position, in the field construct's data, of a + domain axis construct that is spanned by a unique 1-d + auxiliary coordinate construct's data. + + A construct identity is specified by a string + (e.g. ``'latitude'``, ``'long_name=time'``, + ``'ncvar%lat'``, etc.); a `Query` object + (e.g. ``cf.eq('longitude')``); or a compiled regular + expression (e.g. ``re.compile('^atmosphere')``) that + selects the relevant constructs whose identities match via + `re.search`. + + A construct has a number of identities, and is selected if + any of them match any of those provided. A construct's + identities are those returned by its `!identities` + method. In the following example, the construct ``x`` has + six identities: + + >>> x.identities() + ['time', + 'long_name=Time', + 'foo=bar', + 'standard_name=time', + 'ncvar%t', + 'T'] + + A construct key may optionally have the ``'key%'`` + prefix. For example ``'auxiliarycoordinate2'`` and + ``'key%auxiliarycoordinate2'`` are both acceptable keys. + + A position of a domain axis construct in the field + construct's data is specified by an integer index. + + Note that in the output of a `print` call or `!dump` + method, a construct is always described by one of its + identities, and so this description may always be used as + an *identity* argument. + + *Parameter example:* + ``identity='Y'`` + + *Parameter example:* + ``identity='latitude'`` + + *Parameter example:* + ``identity='long_name=Latitude'`` + + *Parameter example:* + ``identity='auxiliarycoordinate1'`` + + *Parameter example:* + ``identity='domainaxis2'`` + + *Parameter example:* + ``identity='ncdim%y'`` + + *Parameter example:* + ``identity=0`` + + key: `bool`, optional + If True then return the selected construct key. By + default the construct itself is returned. + + default: optional + Return the value of the *default* parameter if a construct + can not be found. If set to an `Exception` instance then + it will be raised instead. + + :Returns: + + `AuxiliaryCoordinate` or `str` + The selected auxiliary coordinate construct, or its key. + + **Examples:** + + TODO + + """ + return self._filter_interface( + ("auxiliary_coordinate",), + "auxiliary_coordinate", + identity, + construct=True, + key=key, + item=item, + default=default, + **filter_kwargs, + ) + + def construct( + self, + *identity, + default=ValueError(), + key=False, + item=False, + **filter_kwargs, + ): + """Select a metadata construct by its identity. + + .. seealso:: `del_construct`, `get_construct`, `has_construct`, + `set_construct` + + :Parameters: + + identity: optional + Select the construct. Must be + + * The identity or key of a metadata construct. + + A construct identity is specified by a string + (e.g. ``'latitude'``, ``'long_name=time'``, + ``'ncvar%lat'``, etc.); a `Query` object + (e.g. ``cf.eq('longitude')``); or a compiled regular + expression (e.g. ``re.compile('^atmosphere')``) that + selects the relevant constructs whose identities match via + `re.search`. + + A construct has a number of identities, and is selected if + any of them match any of those provided. A construct's + identities are those returned by its `!identities` + method. In the following example, the construct ``x`` has + six identities: + + >>> x.identities() + ['time', + 'long_name=Time', + 'foo=bar', + 'standard_name=time', + 'ncvar%t', + 'T'] + + A construct key may optionally have the ``'key%'`` + prefix. For example ``'dimensioncoordinate2'`` and + ``'key%dimensioncoordinate2'`` are both acceptable keys. + + Note that in the output of a `print` call or `!dump` + method, a construct is always described by one of its + identities, and so this description may always be used as + an *identity* argument. + + *Parameter example:* + ``identity='T' + + *Parameter example:* + ``identity='measure:area'`` + + *Parameter example:* + ``identity='cell_area'`` + + *Parameter example:* + ``identity='long_name=Cell Area'`` + + *Parameter example:* + ``identity='cellmeasure1'`` + + default: optional + Return the value of the *default* parameter if a construct + can not be found. + + {{default Exception}} + + If the *default* is `None`, or if *item* is True and + *default* is a 2-tuple of `Ǹone`s, then TODO + + key: `bool`, optional + If True then return the selected construct key. By + default the construct itself is returned. + + item: TODO + + :Returns: + + The selected coordinate construct, or its key. + + **Examples:** + + >>> f = cf.example_field(1) + >>> print(f) + Field: air_temperature (ncvar%ta) + --------------------------------- + Data : air_temperature(atmosphere_hybrid_height_coordinate(1), grid_latitude(10), grid_longitude(9)) K + Cell methods : grid_latitude(10): grid_longitude(9): mean where land (interval: 0.1 degrees) time(1): maximum + Field ancils : air_temperature standard_error(grid_latitude(10), grid_longitude(9)) = [[0.76, ..., 0.32]] K + Dimension coords: atmosphere_hybrid_height_coordinate(1) = [1.5] + : grid_latitude(10) = [2.2, ..., -1.76] degrees + : grid_longitude(9) = [-4.7, ..., -1.18] degrees + : time(1) = [2019-01-01 00:00:00] + Auxiliary coords: latitude(grid_latitude(10), grid_longitude(9)) = [[53.941, ..., 50.225]] degrees_N + : longitude(grid_longitude(9), grid_latitude(10)) = [[2.004, ..., 8.156]] degrees_E + : long_name=Grid latitude name(grid_latitude(10)) = [--, ..., b'kappa'] + Cell measures : measure:area(grid_longitude(9), grid_latitude(10)) = [[2391.9657, ..., 2392.6009]] km2 + Coord references: grid_mapping_name:rotated_latitude_longitude + : standard_name:atmosphere_hybrid_height_coordinate + Domain ancils : ncvar%a(atmosphere_hybrid_height_coordinate(1)) = [10.0] m + : ncvar%b(atmosphere_hybrid_height_coordinate(1)) = [20.0] + : surface_altitude(grid_latitude(10), grid_longitude(9)) = [[0.0, ..., 270.0]] m + + >>> f.construct('long_name=Grid latitude name') + + >>> f.construct('ncvar%a') + + >>> f.construct('measure:area') + + >>> f.construct('domainaxis0') + + >>> f.construct('height') + Traceback (most recent call last): + ... + ValueError: Can't return zero constructs + >>> f.construct('height', default=False) + False + >>> f.construct('height', default=TypeError("No height coordinates")) + Traceback (most recent call last): + ... + TypeError: No height coordinates + + """ + return self._filter_interface( + (), + "construct", + identity, + construct=True, + key=key, + item=item, + default=default, + **filter_kwargs, + ) + + def cell_measure( + self, + *identity, + default=ValueError(), + key=False, + item=False, + **filter_kwargs, + ): + """Select a cell measure construct by its identity. + + .. versionadded:: 3.0.0 + + .. seealso:: `construct`, `auxiliary_coordinate`, `cell_measures`, + `cell_method`, `coordinate`, `coordinate_reference`, + `dimension_coordinate`, `domain_ancillary`, + `domain_axis`, `field_ancillary` + + :Parameters: + + identity: optional + Select the cell measure construct by: + + * `None`. This is the default, which selects the cell + measure construct when there is only one of them. + + * The identity or key of a cell measure construct. + + * The identity or key of a domain axis construct that is + spanned by a unique 1-d cell measure construct's data. + + * The position, in the field construct's data, of a + domain axis construct that is spanned by a unique 1-d + cell measure construct's data. + + A construct identity is specified by a string + (e.g. ``'long_name=Cell Area', ``'ncvar%areacello'``, + etc.); a `Query` object (e.g. ``cf.eq('measure:area')``); + or a compiled regular expression + (e.g. ``re.compile('^atmosphere')``) that selects the + relevant constructs whose identities match via + `re.search`. + + Each construct has a number of identities, and is selected + if any of them match any of those provided. A construct's + identities are those returned by its `!identities` + method. In the following example, the construct ``x`` has + six identities: + + >>> x.identities() + ['time', + 'long_name=Time', + 'foo=bar', + 'standard_name=time', + 'ncvar%t', + 'T'] + + A construct key may optionally have the ``'key%'`` + prefix. For example ``'cellmeasure2'`` and + ``'key%cellmeasure2'`` are both acceptable keys. + + A position of a domain axis construct in the field + construct's data is specified by an integer index. + + Note that in the output of a `print` call or `!dump` + method, a construct is always described by one of its + identities, and so this description may always be used as + an *identity* argument. + + *Parameter example:* + ``identity='measure:area'`` + + *Parameter example:* + ``identity='cell_area'`` + + *Parameter example:* + ``identity='long_name=Cell Area'`` + + *Parameter example:* + ``identity='cellmeasure1'`` + + *Parameter example:* + ``identity='domainaxis2'`` + + *Parameter example:* + ``identity=0`` + + key: `bool`, optional + If True then return the selected construct key. By + default the construct itself is returned. + + default: optional + Return the value of the *default* parameter if a construct + can not be found. If set to an `Exception` instance then + it will be raised instead. + + :Returns: + + `CellMeasure`or `str` + The selected cell measure construct, or its key. + + **Examples:** + + TODO + + """ + return self._filter_interface( + ("cell_measure",), + "cell_meausure", + identity, + construct=True, + key=key, + default=default, + item=item, + **filter_kwargs, + ) + + def coordinate( + self, + *identity, + default=ValueError(), + key=False, + item=False, + **filter_kwargs, + ): + """Return a dimension or auxiliary coordinate construct, or its + key. + + .. versionadded:: 3.0.0 + + .. seealso:: `construct`, `auxiliary_coordinate`, `coordinates`, + `dimension_coordinate` + + :Parameters: + + identity: optional + Select the dimension coordinate construct by one of: + + * `None`. This is the default, which selects the + coordinate construct when there is only one of them. + + * The identity or key of a dimension coordinate + construct. + + * The identity or key of a domain axis construct that is + spanned by a unique 1-d coordinate construct's data. + + * The position, in the field construct's data, of a + domain axis construct that is spanned by a unique 1-d + coordinate construct's data. + + A construct identity is specified by a string + (e.g. ``'latitude'``, ``'long_name=time'``, + ``'ncvar%lat'``, etc.); a `Query` object + (e.g. ``cf.eq('longitude')``); or a compiled regular + expression (e.g. ``re.compile('^atmosphere')``) that + selects the relevant constructs whose identities match via + `re.search`. + + A construct has a number of identities, and is selected if + any of them match any of those provided. A construct's + identities are those returned by its `!identities` + method. In the following example, the construct ``x`` has + six identities: + + >>> x.identities() + ['time', + 'long_name=Time', + 'foo=bar', + 'standard_name=time', + 'ncvar%t', + 'T'] + + A construct key may optionally have the ``'key%'`` + prefix. For example ``'auxiliarycoordinate2'`` and + ``'key%dimensioncoordinate2'`` are both acceptable keys. + + A position of a domain axis construct in the field + construct's data is specified by an integer index. + + Note that in the output of a `print` call or `!dump` + method, a construct is always described by one of its + identities, and so this description may always be used as + an *identity* argument. + + *Parameter example:* + ``identity='Y'`` + + *Parameter example:* + ``identity='latitude'`` + + *Parameter example:* + ``identity='long_name=Latitude'`` + + *Parameter example:* + ``identity='dimensioncoordinate1'`` + + *Parameter example:* + ``identity='domainaxis2'`` + + *Parameter example:* + ``identity='ncdim%y'`` + + key: `bool`, optional + If True then return the selected construct key. By + default the construct itself is returned. + + default: optional + Return the value of the *default* parameter if a construct + can not be found. If set to an `Exception` instance then + it will be raised instead. + + :Returns: + + `DimensionCoordinate` or `AuxiliaryCoordinate` or `str` + The selected dimension or auxiliary coordinate construct, + or its key. + + **Examples:** + + TODO + + """ + return self._filter_interface( + ("dimension_coordinate", "auxiliary_coordinate"), + "coordinate", + identity, + construct=True, + key=key, + item=item, + default=default, + **filter_kwargs, + ) + def coordinate_reference( + self, + *identity, + default=ValueError(), + key=False, + item=False, + **filter_kwargs, + ): + """Return a coordinate reference construct, or its key. + + .. versionadded:: 3.0.0 + + .. seealso:: `construct`, `auxiliary_coordinate`, `cell_measure`, + `cell_method`, `coordinate`, `coordinate_references`, + `dimension_coordinate`, `domain_ancillary`, + `domain_axis`, `field_ancillary` + + :Parameters: + + identity: optional + Select the coordinate reference construct by one of: + + * `None`. This is the default, which selects the + coordinate reference construct when there is only one + of them. + + * The identity or key of a coordinate reference + construct. + + A construct identity is specified by a string + (e.g. ``'grid_mapping_name:latitude_longitude'``, + ``'latitude_longitude'``, ``'ncvar%lat_lon'``, etc.); a + `Query` object (e.g. ``cf.eq('latitude_longitude')``); or + a compiled regular expression + (e.g. ``re.compile('^atmosphere')``) that selects the + relevant constructs whose identities match via + `re.search`. + + Each construct has a number of identities, and is selected + if any of them match any of those provided. A construct's + identities are those returned by its `!identities` + method. In the following example, the construct ``x`` has + two identities: + + >>> x.identities() + ['grid_mapping_name:latitude_longitude', 'ncvar%lat_lon'] + + A identity's prefix of ``'grid_mapping_name:'`` or + ``'standard_name:'`` may be omitted + (e.g. ``'standard_name:atmosphere_hybrid_height_coordinate'`` + and ``'atmosphere_hybrid_height_coordinate'`` are both + acceptable identities). + + A construct key may optionally have the ``'key%'`` + prefix. For example ``'coordinatereference2'`` and + ``'key%coordinatereference2'`` are both acceptable keys. + + Note that in the output of a `print` call or `!dump` + method, a construct is always described by one of its + identities, and so this description may always be used as + an *identity* argument. + + *Parameter example:* + ``identity='standard_name:atmosphere_hybrid_height_coordinate'`` + + *Parameter example:* + ``identity='grid_mapping_name:rotated_latitude_longitude'`` + + *Parameter example:* + ``identity='transverse_mercator'`` + + *Parameter example:* + ``identity='coordinatereference1'`` + + *Parameter example:* + ``identity='key%coordinatereference1'`` + + *Parameter example:* + ``identity='ncvar%lat_lon'`` + + key: `bool`, optional + If True then return the selected construct key. By + default the construct itself is returned. + + default: optional + Return the value of the *default* parameter if a construct + can not be found. If set to an `Exception` instance then + it will be raised instead. + + :Returns: + + `CoordinateReference` or `str` + The selected coordinate reference construct, or its key. + + **Examples:** + + TODO + + """ + return self._filter_interface( + ("coordinate_reference",), + "coordinate_reference", + identity, + construct=True, + key=key, + default=default, + item=item, + **filter_kwargs, + ) + + def coordinate_reference_domain_axes(self, identity=None): + """Return the domain axes that apply to a coordinate reference + construct. + + :Parameters: + + identity: optional + Select the coordinate reference construct by one of: + + * The identity of a coordinate reference construct. + + {{construct selection identity}} + + * The key of a coordinate reference construct + + * `None`. This is the default, which selects the + coordinate reference construct when there is only + one of them. + + *Parameter example:* + ``identity='standard_name:atmosphere_hybrid_height_coordinate'`` + + *Parameter example:* + ``identity='grid_mapping_name:rotated_latitude_longitude'`` + + *Parameter example:* + ``identity='transverse_mercator'`` + + *Parameter example:* + ``identity='coordinatereference1'`` + + *Parameter example:* + ``identity='key%coordinatereference1'`` + + *Parameter example:* + ``identity='ncvar%lat_lon'`` + + *Parameter example:* + ``identity=cf.eq('rotated_pole')'`` + + *Parameter example:* + ``identity=re.compile('^rotated')`` + + :Returns: + + `set` + The identifiers of the domain axis constructs that san + the data of all coordinate and domain ancillary + constructs used by the selected coordinate reference + construct. + + **Examples:** + + >>> f.coordinate_reference_domain_axes('coordinatereference0') + {'domainaxis0', 'domainaxis1', 'domainaxis2'} + + >>> f.coordinate_reference_domain_axes( + ... 'atmosphere_hybrid_height_coordinate') + {'domainaxis0', 'domainaxis1', 'domainaxis2'} + + """ + cr = self.coordinate_reference(identity) + + data_axes = self.constructs.data_axes() + + axes = [] + for i in cr.coordinates() | set( + cr.coordinate_conversion.domain_ancillaries().values() + ): + key = self.construct(i, key=True, default=None) + axes.extend(data_axes.get(key, ())) + + return set(axes) + + def dimension_coordinate( + self, + *identity, + key=False, + default=ValueError(), + item=False, + **filter_kwargs, + ): + """Select a dimension coordinate construct. + + .. versionadded:: 3.0.0 + + .. seealso:: `construct`, `dimension_coordinates` + + :Parameters: + + identity: optional + Select dimension coordinate constructs that have an + identity, defined by their `!identities` methods, that + matches any of the given values. In addition to + construct identities, the values are matched against: + + Additionally, the values are matched against construct + identifiers, with or without the ``'key%'`` prefix. + + Additionly, TODOx the values are matched against the identity or + construct identifier, with or without the ``'key%'`` + prefix, of a domain axis construct that is spanned by + a dimension coordinate construct's data. + + *Parameter example:* + ``'domainaxis2'`` + + *Parameter example:* + ``'ncdim%latitude'`` + + * The integer position, in the field construct's data, + of the domain axis construct that is spanned by a + dimension coordinate construct's data. + + *Parameter example:* + ``0'`` + + *Parameter example:* + ``cf.gt(2)`` + + If no values are provided then all constructs are + selected. + + {{value match}} + + {{displayed identity}} + + *Parameter example:* + ``'Y'`` + + *Parameter example:* + ``latitude'`` + + *Parameter example:* + ``re.compile('^lat')`` + + *Parameter example:* + ``'long_name=Latitude'`` + + *Parameter example:* + ``'Z', 'altutude'`` + + key: `bool`, optional + If True then return the selected construct + identifier. By default the construct itself is + returned. + + default: optional + Return the value of the *default* parameter if a construct + can not be found. If set to an `Exception` instance then + it will be raised instead. + + item: `bool`, optional + If True then return the selected construct and its + construct identifier in a 2-tuple. By default the only + construct is returned. + + .. versionadded:: 3.9.0 + + :Returns: + + `DimensionCoordinate` or `str` or `tuple` + The selected dimension coordinate construct, or its + construct identifier, or both. + + **Examples:** + + TODO + + """ + return self._filter_interface( + ("dimension_coordinate",), + "dimension_coordinate", + identity, + construct=True, + key=key, + item=item, + default=default, + **filter_kwargs, + ) + + @_deprecated_kwarg_check("axes") + def direction(self, identity=None, axes=None, **kwargs): + """Whether or not a domain axis is increasing. + + An domain axis is considered to be increasing if its dimension + coordinate values are increasing in index space or if it has + no dimension coordinate. + + .. seealso:: `directions` + + :Parameters: + + identity: optional + Select the domain axis construct by one of: + + * An identity or key of a 1-d dimension or auxiliary + coordinate construct that whose data spans the + domain axis construct. + + {{construct selection identity}} + + * A domain axis construct identity + + The domain axis is that which would be selected by + passing the given axis description to a call of the + construct's `domain_axis` method. For example, for a + value of ``'X'``, the domain axis construct returned + by ``f.domain_axis('X')`` is selected. + + * `None`. This is the default, which selects the + domain construct when there is only one of them. + + axes: deprecated at version 3.0.0 + Use the *identity* parmeter instead. + + size: deprecated at version 3.0.0 + + kwargs: deprecated at version 3.0.0 + + :Returns: + + `bool` + Whether or not the domain axis is increasing. + + **Examples:** + + >>> print(f.dimension_coordinate('X').array) + array([ 0 30 60]) + >>> f.direction('X') + True + >>> g = f.flip('X') + >>> g.direction('X') + False + + """ + if kwargs: + _DEPRECATION_ERROR_KWARGS( + self, "direction", kwargs + ) # pragma: no cover + + axis = self.domain_axis(identity, key=True, default=None) + if axis is None: + return True + + for coord in self.dimension_coordinates(filter_by_axis=(axis,), todict=True).values(): + return coord.direction() + + return True + + def directions(self): + """Return a dictionary mapping all domain axes to their directions. + + .. seealso:: `direction` + + :Returns: + + `dict` + A dictionary whose key/value pairs are domain axis + keys and their directions. + + **Examples:** + + >>> d.directions() + {'domainaxis1': True, 'domainaxis1': False} + + """ + out = {key: True for key in self.domain_axes(todict=True)} + + data_axes = self.constructs.data_axes() + + for key, coord in self.dimension_coordinates(todict=True).items(): + axis = data_axes[key][0] + out[axis] = direction + + return out + + def domain_ancillary( + self, + *identity, + default=ValueError(), + key=False, + item=False, + **filter_kwargs, + ): + """Return a domain ancillary construct, or its key. + + .. versionadded:: 3.0.0 + + .. seealso:: `construct`, `auxiliary_coordinate`, `cell_measure`, + `cell_method`, `coordinate`, `coordinate_reference`, + `dimension_coordinate`, `domain_ancillaries`, + `domain_axis`, `field_ancillary` + + :Parameters: + + identity: optional + Select the domain ancillary construct by one of: + + * `None`. This is the default, which selects the domain + ancillary construct when there is only one of them. + + * The identity or key of a domain ancillary construct. + + * The identity or key of a domain axis construct that is + spanned by a unique 1-d domain ancillary construct's data. + + * The position, in the field construct's data, of a domain + axis construct that is spanned by a unique 1-d domain + ancillary construct's data. + + A construct identity is specified by a string + (e.g. ``'latitude'``, ``'long_name=time'``, + ``'ncvar%lat'``, etc.); a `Query` object + (e.g. ``cf.eq('longitude')``); or a compiled regular + expression (e.g. ``re.compile('^atmosphere')``) that + selects the relevant constructs whose identities match via + `re.search`. + + A construct has a number of identities, and is selected if + any of them match any of those provided. A construct's + identities are those returned by its `!identities` + method. In the following example, the construct ``x`` has + six identities: + + >>> x.identities() + ['time', + 'long_name=Time', + 'foo=bar', + 'standard_name=time', + 'ncvar%t', + 'T'] + + A construct key may optionally have the ``'key%'`` + prefix. For example ``'domainancillary2'`` and + ``'key%domainancillary2'`` are both acceptable keys. + + A position of a domain axis construct in the field + construct's data is specified by an integer index. + + Note that in the output of a `print` call or `!dump` + method, a construct is always described by one of its + identities, and so this description may always be used as + an *identity* argument. + + *Parameter example:* + ``identity='Y'`` + + *Parameter example:* + ``identity='latitude'`` + + *Parameter example:* + ``identity='long_name=Latitude'`` + + *Parameter example:* + ``identity='domainancillary1'`` + + *Parameter example:* + ``identity='ncdim%y'`` + + *Parameter example:* + ``identity='domainaxis2'`` + + *Parameter example:* + ``identity=0`` + + key: `bool`, optional + If True then return the selected construct key. By + default the construct itself is returned. + + default: optional + Return the value of the *default* parameter if a construct + can not be found. If set to an `Exception` instance then + it will be raised instead. + + :Returns: + + `DomainAncillary` or `str` + The selected domain ancillary coordinate construct, or its + key. + + **Examples:** + + TODO + + """ + return self._filter_interface( + ("domain_ancillary",), + "domain_ancillary", + identity, + construct=True, + key=key, + default=default, + item=item, + **filter_kwargs, + ) + + def domain_axis( + self, + *identity, + key=False, + default=ValueError(), + item=False, + **filter_kwargs, + ): + """Return a domain axis construct, or its key. + + .. versionadded:: 3.0.0 + + .. seealso:: `construct`, `auxiliary_coordinate`, `cell_measure`, + `cell_method`, `coordinate`, `coordinate_reference`, + `dimension_coordinate`, `domain_ancillary`, + `domain_axes`, `field_ancillary` + + :Parameters: + + identity: + Select the domain axis construct by one of: + + * An identity or key of a 1-d coordinate construct that + whose data spans the domain axis construct. + + * A domain axis construct identity or key. + + * The position of the domain axis construct in the field + construct's data. + + A construct identity is specified by a string + (e.g. ``'latitude'``, ``'long_name=time'``, + ``'ncvar%lat'``, etc.); or a compiled regular expression + (e.g. ``re.compile('^atmosphere')``) that selects the + relevant constructs whose identities match via + `re.search`. + + Each construct has a number of identities, and is selected + if any of them match any of those provided. A construct's + identities are those returned by its `!identities` + method. In the following example, the construct ``x`` has + six identities: + + >>> x.identities() + ['time', 'long_name=Time', 'foo=bar', 'standard_name=time', 'ncvar%t', 'T'] + + A construct key may optionally have the ``'key%'`` + prefix. For example ``'dimensioncoordinate2'`` and + ``'key%dimensioncoordinate2'`` are both acceptable keys. + + A position of a domain axis construct in the field + construct's data is specified by an integer index. + + Note that in the output of a `print` call or `!dump` + method, a construct is always described by one of its + identities, and so this description may always be used as + an *identity* argument. + + *Parameter example:* + ``identity='long_name=Latitude'`` + + *Parameter example:* + ``identity='dimensioncoordinate1'`` + + *Parameter example:* + ``identity='domainaxis2'`` + + *Parameter example:* + ``identity='key%domainaxis2'`` + + *Parameter example:* + ``identity='ncdim%y'`` + + *Parameter example:* + ``identity=2`` + + key: `bool`, optional + If True then return the selected construct key. By + default the construct itself is returned. + + default: optional + Return the value of the *default* parameter if a construct + can not be found. If set to an `Exception` instance then + it will be raised instead. + + :Returns: + + `DomainAxis` or `str` + The selected domain axis construct, or its key. + + **Examples:** + + TODO + + """ + # Try for integer index + if identity: + identity2 = [] + + data_axes = self.get_data_axes(default=None) + for i in identity: + try: + identity2.append(data_axes[i]) + except TypeError: + identity2.append(i) + except IndexError: + pass + + if not identity2: + if default is None: + return default + + return self._default( + default, + "Indices do not exist for field construct data dimenions", + ) + + identity = identity2 + +# c = self._select_construct( +# ("domain_axis",), +# "domain_axis", +# identity, +# key=key, +# default=None, +# item=item, +# **filter_kwargs, +# ) + c = self._filter_interface( + ("domain_axis",), + "domain_axis", + identity, + construct=True, + key=key, + item=item, + default=None, + **filter_kwargs, + ) + if c is not None: + return c + + da_key = self.domain_axis_key(*identity, default=None) + + if da_key is not None: + if key: + return da_key + + construct = self.constructs[da_key] + + if item: + return da_key, construct + + return construct + + if default is None: + return default + + return self._default( + default, + f"{self.__class__.__name__}.domain_axis() can't return zero " + "constructs", + ) + + def get_coordinate_reference( + self, identity=None, key=False, construct=None, default=ValueError() + ): + """TODO + + .. versionadded:: 3.0.2 + + .. seealso:: `construct` + + :Parameters: + + identity: optional + Select the coordinate reference construct by one of: + + * The identity of a coordinate reference construct. + + {{construct selection identity}} + + * The key of a coordinate reference construct + + * `None`. This is the default, which selects the + coordinate reference construct when there is only + one of them. + + *Parameter example:* + ``identity='standard_name:atmosphere_hybrid_height_coordinate'`` + + *Parameter example:* + ``identity='grid_mapping_name:rotated_latitude_longitude'`` + + *Parameter example:* + ``identity='transverse_mercator'`` + + *Parameter example:* + ``identity='coordinatereference1'`` + + *Parameter example:* + ``identity='key%coordinatereference1'`` + + *Parameter example:* + ``identity='ncvar%lat_lon'`` + + *Parameter example:* + ``identity=cf.eq('rotated_pole')'`` + + *Parameter example:* + ``identity=re.compile('^rotated')`` + + construct: optional + TODO + + key: `bool`, optional + If True then return the selected construct key. By + default the construct itself is returned. + + default: optional + Return the value of the *default* parameter if a + construct can not be found. + + {{default Exception}} + + :Returns: + + `CoordinateReference` or `str` + The selected coordinate reference construct, or its + key. + + **Examples:** + + """ + if construct is None: + return self.coordinate_reference( + identity=identity, key=key, default=default + ) + + out = [] + + c_key = self.construct(construct, key=True, default=None) + if c_key is None: + return self._default( + default, f"Can't identify construct from {construct!r}" + ) + + for cr_key, ref in tuple( + self.coordinate_references(todict=True).items() + ): + if c_key in [ + ref.coordinates(), + ref.coordinate_conversion.domain_ancillaries().values(), + ]: + if key: + if cr_key not in out: + out.append(cr_key) + elif ref not in out: + out.append(ref) + + continue + + return out + + def has_construct(self, identity=None): + """Whether a metadata construct exists. + + .. versionadded:: 3.4.0 + + .. seealso:: `construct`, `del_construct`, `get_construct`, + `set_construct` + + :Parameters: + + identity: optional + Select the construct. Must be + + * The identity or key of a metadata construct. + + A construct identity is specified by a string + (e.g. ``'latitude'``, ``'long_name=time'``, + ``'ncvar%lat'``, etc.); a `Query` object + (e.g. ``cf.eq('longitude')``); or a compiled regular + expression (e.g. ``re.compile('^atmosphere')``) that + selects the relevant constructs whose identities match via + `re.search`. + + A construct has a number of identities, and is selected if + any of them match any of those provided. A construct's + identities are those returned by its `!identities` + method. In the following example, the construct ``x`` has + six identities: + + >>> x.identities() + ['time', + 'long_name=Time', + 'foo=bar', + 'standard_name=time', + 'ncvar%t', + 'T'] + + A construct key may optionally have the ``'key%'`` + prefix. For example ``'dimensioncoordinate2'`` and + ``'key%dimensioncoordinate2'`` are both acceptable keys. + + Note that in the output of a `print` call or `!dump` + method, a construct is always described by one of its + identities, and so this description may always be used as + an *identity* argument. + + *Parameter example:* + ``identity='T' + + *Parameter example:* + ``identity='measure:area'`` + + *Parameter example:* + ``identity='cell_area'`` + + *Parameter example:* + ``identity='long_name=Cell Area'`` + + *Parameter example:* + ``identity='cellmeasure1'`` + + :Returns: + + `bool` + `True` if the construct exists, otherwise `False`. + + **Examples:** + + >>> f = cf.example_field(0) + >>> print(f) + Field: specific_humidity (ncvar%q) + ---------------------------------- + Data : specific_humidity(latitude(5), longitude(8)) 1 + Cell methods : area: mean + Dimension coords: latitude(5) = [-75.0, ..., 75.0] degrees_north + : longitude(8) = [22.5, ..., 337.5] degrees_east + : time(1) = [2019-01-01 00:00:00] + + >>> f.has_construct('T') + True + >>> f.has_construct('longitude') + True + >>> f.has_construct('Z') + False + + """ + return self.construct(identity, default=None) is not None + + def iscyclic(self, identity, **kwargs): + """Returns True if the given axis is cyclic. + + .. versionadded:: 1.0 + + .. seealso:: `axis`, `cyclic`, `period` + + :Parameters: + + identity: + Select the domain axis construct by one of: + + * An identity or key of a 1-d coordinate construct that + whose data spans the domain axis construct. + + * A domain axis construct identity or key. + + * The position of the domain axis construct in the field + construct's data. + + The *identity* parameter selects the domain axis as + returned by this call of the field construct's + `domain_axis` method: ``f.domain_axis(identity)``. + + kwargs: deprecated at version 3.0.0 + + :Returns: + + `bool` + True if the selected axis is cyclic, otherwise False. + + **Examples:** + + >>> f.iscyclic('X') + True + >>> f.iscyclic('latitude') + False + + >>> x = f.iscyclic('long_name=Latitude') + >>> x = f.iscyclic('dimensioncoordinate1') + >>> x = f.iscyclic('domainaxis2') + >>> x = f.iscyclic('key%domainaxis2') + >>> x = f.iscyclic('ncdim%y') + >>> x = f.iscyclic(2) + + """ + if kwargs: + _DEPRECATION_ERROR_KWARGS( + self, "iscyclic", kwargs + ) # pragma: no cover + + axis = self.domain_axis(identity, key=True, default=None) + if axis is None: + raise ValueError( + f"Can't identify unique axis from identity {identity!r}" + ) + + return axis in self.cyclic() + + def match_by_rank(self, *ranks): + """Whether or not the number of domain axis constructs satisfies + conditions. + + .. versionadded:: 3.0.0 + + .. seealso:: `match`, `match_by_property`, + `match_by_identity`, `match_by_ncvar`, + `match_by_construct` + + :Parameters: + + ranks: optional + Define conditions on the number of domain axis + constructs. + + A condition is one of: + + * `int` + * a `Query` object + + The condition is satisfied if the number of domain + axis constructs equals the condition value. + + *Parameter example:* + To see if the field construct has 4 domain axis + constructs: ``4`` + + *Parameter example:* + To see if the field construct has at least 3 domain + axis constructs: ``cf.ge(3)`` + + :Returns: + + `bool` + Whether or not at least one of the conditions are met. + + **Examples:** + + >>> f.match_by_rank(3, 4) + + >>> f.match_by_rank(cf.wi(2, 4)) + + >>> f.match_by_rank(1, cf.gt(3)) + + """ + if not ranks: + return True + + n_domain_axes = len(self.domain_axes(todict=True)) + for rank in ranks: + ok = rank == n_domain_axes + if ok: + return True + + return False + + def _parse_axes(self, axes): + """Convert the given axes to their domain axis identifiers. + + .. versionadded:: 3.TODO:0 + + :Parameters: + + axes: + One or more axis specifications. + + If *axes* is a sequence then the returned identifiers + are in the same order. + + :Returns: + + `list` + The domain axis identifiers. + + """ + if isinstance(axes, str): + axes = (axes,) + else: + try: + len(axes) + except TypeError: + axes = (axes,) + + return [self.domain_axis(x, key=True) for x in axes] + + def replace_construct(self, *identity, new=None, copy=True, + **filter_kwargs): + """Replace a metadata construct. + + Replacement assigns the same construct key and, if applicable, the + domain axes of the original construct to the new, replacing + construct. + + .. versionadded:: 3.0.0 + + .. seealso:: `set_construct` + + :Parameters: + + identity: + Select TODO the metadata construct to be replaced by one of: + + * The identity or key of a metadata construct. + + * The identity or key of a domain axis construct that is + spanned by a metadata construct's data. + + A construct identity is specified by a string + (e.g. ``'latitude'``, ``'long_name=time'``, ``'ncvar%lat'``, + etc.); a `Query` object (e.g. ``cf.eq('longitude')``); or + a compiled regular expression + (e.g. ``re.compile('^atmosphere')``) that selects the + relevant constructs whose identities match via + `re.search`. + + A construct has a number of identities, and is selected if + any of them match any of those provided. A construct's + identities are those returned by its `!identities` + method. In the following example, the construct ``x`` has + six identities: + + >>> x.identities() + ['time', + 'long_name=Time', + 'foo=bar', + 'standard_name=time', + 'ncvar%t', + 'T'] + + A construct key may optionally have the ``'key%'`` + prefix. For example ``'dimensioncoordinate2'`` and + ``'key%dimensioncoordinate2'`` are both acceptable keys. + + Note that in the output of a `print` call or `!dump` + method, a construct is always described by one of its + identities, and so this description may always be used as + an *identity* argument. + + *Parameter example:* + ``identity='Y'`` + + *Parameter example:* + ``identity='latitude'`` + + *Parameter example:* + ``identity='long_name=Latitude'`` + + *Parameter example:* + ``identity='dimensioncoordinate1'`` + + *Parameter example:* + ``identity='domainaxis2'`` + + *Parameter example:* + ``identity='ncdim%y'`` + + construct: + The new construct to replace that selected by the + *identity* parameter. + + copy: `bool`, optional + If True then set a copy of the new construct. By default + the construct is copied. + + :Returns: + + The construct that was replaced. + + **Examples:** + + >>> f.replace_construct('X', new=X_construct) + + """ + key, c = self.construct(*identity, item=True, **filter_kwargs) + + if not isinstance(new, c.__class__): + raise ValueError( + f"Can't replace a {c.__class__.__name__} construct " + f"with a {new.__class__.__name__} object" + ) + + axes = self.get_data_axes(key, None) + if axes is not None: + shape0 = getattr(c, "shape", None) + shape1 = getattr(new, "shape", None) + if shape0 != shape1: + raise ValueError("TODO bb") + + self.set_construct(new, key=key, axes=axes, copy=copy) + + return c + + def set_coordinate_reference( + self, coordinate_reference, key=None, parent=None, strict=True + ): + """Set a coordinate reference construct. + + By default, this is equivalent to using the `set_construct` + method. If, however, the *parent* parameter has been set to be + a field or domain construct that contains the new coordinate + reference construct then copies of its coordinate and domain + ancillary constructs will be referenced by the inserted + coordinate reference construct. + + .. versionadded:: 3.0.0 + + .. seealso:: `set_construct` + + :Parameters: + + coordinate_reference: `CoordinateReference` + The coordinate reference construct to be inserted. + + key: `str`, optional + The construct identifier to be used for the + construct. If not set then a new, unique identifier is + created automatically. If the identifier already + exisits then the exisiting construct will be replaced. + + *Parameter example:* + ``key='coordinatereference1'`` + + parent: `Field` or `Domain`, optional + A field or domain construct that contains the new + coordinate reference construct. + + strict: `bool`, optional + If False then allow non-strict identities for + identifying coordinate and domain ancillary metadata + constructs. + + :Returns: + + `str` + The construct identifier for the coordinate refernece + construct. + + """ + if parent is None: + return self.set_construct(coordinate_reference, key=key, copy=True) + + # Still here? + ref = coordinate_reference.copy() + + coordinates = parent.coordinates(todict=True) + domain_ancillaries = parent.domain_ancillaries(todict=True) + + ckeys = [] + for value in coordinate_reference.coordinates(): + if value in coordinates: + identity = coordinates[value].identity(strict=strict) + ckeys.append(self.coordinate(identity, key=True, default=None)) + + ref.clear_coordinates() + ref.set_coordinates(ckeys) + + coordinate_conversion = coordinate_reference.coordinate_conversion + + dakeys = {} + for term, value in coordinate_conversion.domain_ancillaries().items(): + if value in domain_ancillaries: + identity = domain_ancillaries[value].identity(strict=strict) + dakeys[term] = self.domain_ancillary( + identity, key=True, default=None + ) + else: + dakeys[term] = None + + ref.coordinate_conversion.clear_domain_ancillaries() + ref.coordinate_conversion.set_domain_ancillaries(dakeys) + + return self.set_construct(ref, key=key, copy=False) + + # ---------------------------------------------------------------- + # Aliases + # ---------------------------------------------------------------- + def aux( + self, + *identity, + key=False, + default=ValueError(), + item=False, + **filter_kwargs, + ): + """Alias for `auxiliary_coordinate`.""" + return self.auxiliary_coordinate( + *identity, key=key, default=default, item=item, **filter_kwargs + ) + + def auxs(self, *identities, **filter_kwargs): + """Alias for `coordinates`.""" + return self.auxiliary_coordinates(*identities, **filter_kwargs) + + def axes(self, *identities, **filter_kwargs): + """Alias for `domain_axes`.""" + return self.domain_axes(*identities, **filter_kwargs) + + def axis( + self, + *identity, + key=False, + default=ValueError(), + item=False, + **filter_kwargs, + ): + """Alias for `domain_axis`.""" + return self.domain_axis( + *identity, key=key, default=default, item=item, **filter_kwargs + ) + + def coord( + self, + *identity, + key=False, + default=ValueError(), + item=False, + **filter_kwargs, + ): + """Alias for `coordinate`.""" + return self.coordinate( + *identity, key=key, default=default, item=item, **filter_kwargs + ) + + def coords(self, *identities, **filter_kwargs): + """Alias for `coordinates`.""" + return self.coordinates(*identities, **filter_kwargs) + + def dim( + self, + *identity, + key=False, + default=ValueError(), + item=False, + **filter_kwargs, + ): + """Alias for `dimension_coordinate`.""" + return self.dimension_coordinate( + *identity, key=key, default=default, item=item, **filter_kwargs + ) + + def dims(self, *identities, **filter_kwargs): + """Alias for `dimension_coordinates`.""" + return self.dimension_coordinates(*identities, **filter_kwargs) + + def domain_anc( + self, + *identity, + key=False, + default=ValueError(), + item=False, + **filter_kwargs, + ): + """Alias for `domain_ancillary`.""" + return self.domain_ancillary( + *identity, key=key, default=default, item=item, **filter_kwargs + ) + + def domain_ancs(self, *identities, **filter_kwargs): + """Alias for `domain_ancillaries`.""" + return self.domain_ancillaries(*identities, **filter_kwargs) + + def key(self, *identity, default=ValueError(), **filter_kwargs): + """Alias for `construct_key`.""" + return self.construct(*identity, default=default, key=True, + **filter_kwargs) + + def measure( + self, + *identity, + key=False, + default=ValueError(), + item=False, + **filter_kwargs, + ): + """Alias for `cell_measure`.""" + return self.cell_measure( + *identity, + key=key, + default=default, + item=item, + **filter_kwargs, + ) + + def measures(self, *identities, **filter_kwargs): + """Alias for `cell_measures`.""" + return self.cell_measures(*identities, **filter_kwargs) + + def ref( + self, + *identity, + default=ValueError(), + key=False, + item=False, + **filter_kwargs, + ): + """Alias for `coordinate_reference`.""" + return self.coordinate_reference( + *identity, + key=key, + default=default, + item=item, + **filter_kwargs, + ) + + def refs(self, *identities, **filter_kwargs): + """Alias for `coordinate_references`.""" + return self.coordinate_references(*identities, **filter_kwargs) + + +def _create_auxiliary_mask_component(mask_shape, ind, compress): + """Create an auxiliary mask component + + .. versionadded:: 3.TODO.0 + + :Parameters: + + mask_shape: `tuple` + The shape of the mask component to be created. + + *Parameter example* + ``mask_shape=(3,)`` + + *Parameter example* + ``mask_shape=(9, 10)`` + + ind: sequnce of `list` + As returned by a single argument call of + ``np[.ma].where(....)``. + + compress: `bool` + If True then remove whole slices which only contain masked + points. + + :Returns: + + `Data` + The mask array. + + **Examples:** + + >>> f = cf.{{class}}() + >>> d = _create_auxiliary_mask_component( + ... (4,), ([0, 3, 1],) + ... ) + >>> print(d.array) + [False False True False] + >>> d = f._create_auxiliary_mask_component( + ... (4, 6), ([0, 3, 1], [5, 3, 2]) + ... ) + >>> print(d.array) + [[ True True True True True False] + [ True True False True True True] + [ True True True True True True] + [ True True True False True True]] + + """ + # Note that, for now, auxiliary_mask has to be numpy array (rather + # than a cf.Data object) because we're going to index it with + # fancy indexing which a cf.Data object might not support - namely + # a non-monotonic list of integers. + auxiliary_mask = np.ones(mask_shape, dtype=bool) + + auxiliary_mask[tuple(ind)] = False + + # For compressed indices, remove slices which only contain masked + # points. + if compress: + for i, (index, n) in enumerate(zip(ind, mask_shape)): + index = set(index) + if len(index) == n: + continue + + auxiliary_mask = auxiliary_mask.take(sorted(index), axis=i) + + return Data(auxiliary_mask) diff --git a/cf/mixin/properties.py b/cf/mixin/properties.py index 76f5373382..3e5a8c6133 100644 --- a/cf/mixin/properties.py +++ b/cf/mixin/properties.py @@ -932,7 +932,6 @@ def match_by_property(self, *mode, **properties): "Positional argument, if provided, must one of 'or', " "'and'" ) - # --- End: if if not properties: return True @@ -1131,7 +1130,6 @@ def match(self, *identities, **kwargs): _DEPRECATION_ERROR_DICT( "Use 'match_by_*' methods instead." ) # pragma: no cover - # --- End: for return self.match_by_identity(*identities) @@ -1166,5 +1164,3 @@ def getprop(self, prop): self, "getprop", "Use method 'get_property' instead" ) # pragma: no cover - -# --- End: class diff --git a/cf/mixin/propertiesdata.py b/cf/mixin/propertiesdata.py index 691e1c8dcc..0f61930ced 100644 --- a/cf/mixin/propertiesdata.py +++ b/cf/mixin/propertiesdata.py @@ -4763,21 +4763,22 @@ def identity( def identities(self, generator=False, **kwargs): """Return all possible identities. - The identities comprise: + The identities comprise: - * The "standard_name" property. - * The "id" attribute, preceded by ``'id%'``. - * The "cf_role" property, preceded by ``'cf_role='``. - * The "axis" property, preceded by ``'axis='``. - * The "long_name" property, preceded by ``'long_name='``. - * All other properties (including "standard_name"), preceded by - the property name and an ``'='``. - * The coordinate type (``'X'``, ``'Y'``, ``'Z'`` or ``'T'``). - * The netCDF variable name, preceded by ``'ncvar%'``. + * The "standard_name" property. + * The "id" attribute, preceded by ``'id%'``. + * The "cf_role" property, preceded by ``'cf_role='``. + * The "axis" property, preceded by ``'axis='``. + * The "long_name" property, preceded by ``'long_name='``. + * All other properties (including "standard_name"), preceded by + the property name and an ``'='``. + * The coordinate type (``'X'``, ``'Y'``, ``'Z'`` or ``'T'``). + * The netCDF variable name, preceded by ``'ncvar%'``. - .. versionadded:: 3.0.0 + .. versionadded:: 3.0.0 + + .. seealso:: `id`, `identity` - .. seealso:: `id`, `identity` TODO :Returns: @@ -4800,14 +4801,14 @@ def identities(self, generator=False, **kwargs): 'ncvar%tas'] """ - id_identity = "" - i = getattr(self, "id", None) - if i is not None: - id_identity = (f"id%{i}",) - identities = super().identities(generator=True, **kwargs) + + i = getattr(self, "id", None) + if i is None: + g = identities + else: + g = chain((f"id%{i}",), identities) - g = chain(id_identity, identities) if generator: return g diff --git a/cf/test/test_Field.py b/cf/test/test_Field.py index 55f06c0d14..97d699d4cb 100644 --- a/cf/test/test_Field.py +++ b/cf/test/test_Field.py @@ -440,13 +440,13 @@ def test_Field_replace_construct(self): "ncvar%a", ): for copy in (True, False): - f.replace_construct(x, f.construct(x), copy=copy) + f.replace_construct(x, new=f.construct(x), copy=copy) with self.assertRaises(Exception): - f.replace_construct("grid_longitude", f.construct("latitude")) + f.replace_construct("grid_longitude", new=f.construct("latitude")) with self.assertRaises(Exception): - f.replace_construct("grid_longitude", f.construct("grid_latitude")) + f.replace_construct("grid_longitude", new=f.construct("grid_latitude")) def test_Field_allclose(self): f = self.f.copy() @@ -1953,22 +1953,12 @@ def test_Field_coordinate(self): f = self.f for identity in ( - "domainaxis2", "latitude", "grid_longitude", "auxiliarycoordinate1", "dimensioncoordinate1", ): - if identity == "domainaxis2": - key, c = f.dimension_coordinates( - filter_by_axis=(identity,), axis_mode="exact", todict=True - ).popitem() - - else: - key, c = f.construct_item(identity) - - self.assertTrue(f.coordinate(identity).equals(c, verbose=2)) - self.assertEqual(f.coordinate(identity, key=True), key) + key, c = f.construct(identity, item=True) with self.assertRaises(ValueError): f.coord("long_name:qweRty") @@ -2038,7 +2028,7 @@ def test_Field_coordinate_reference(self): cr = g.coordinate_reference( "grid_mapping_name:rotated_latitude_longitude" ) - f.set_coordinate_reference(cr, field=g) + f.set_coordinate_reference(cr, parent=g) self.assertEqual(len(f.coordinate_references()), 1) cr = g.coordinate_reference( @@ -2048,7 +2038,7 @@ def test_Field_coordinate_reference(self): cr.coordinate_conversion.set_domain_ancillary( "foo", "domainancillary99" ) - f.set_coordinate_reference(cr, field=g) + f.set_coordinate_reference(cr, parent=g) self.assertEqual(len(f.coordinate_references()), 2) self.assertEqual(len(f.domain_ancillaries()), 3) @@ -2068,28 +2058,23 @@ def test_Field_dimension_coordinate(self): f = self.f for identity in ( - "domainaxis2", "grid_latitude", "X", "dimensioncoordinate1", ): - if identity == "domainaxis2": - key, c = f.dimension_coordinates( - filter_by_axis=(identity,), axis_mode="exact", todict=True - ).popitem() - elif identity == "X": - key, c = f.construct_item("grid_longitude") + if identity == "X": + key, c = f.construct("grid_longitude", item=True) else: - key, c = f.construct_item(identity) + key, c = f.construct(identity, item=True) self.assertTrue( f.dimension_coordinate(identity).equals(c, verbose=2) ) self.assertEqual(f.dimension_coordinate(identity, key=True), key) - i = f.dimension_coordinate(identity, item=True) - self.assertEqual(i[0], key) - self.assertTrue(i[1].equals(c)) + k, v = f.dimension_coordinate(identity, item=True) + self.assertEqual(k, key) + self.assertTrue(v.equals(c)) self.assertIsNone( f.dimension_coordinate("long_name=qwerty:asd", default=None) From 57998b3872d2d569ff025ef2b90094c1d398ca2a Mon Sep 17 00:00:00 2001 From: David Hassell Date: Tue, 13 Apr 2021 23:06:57 +0100 Subject: [PATCH 28/53] devs --- cf/constructs.py | 22 +- cf/docstring/docstring.py | 59 +- cf/field.py | 2982 ++---------------------------------- cf/mixin/coordinate.py | 5 +- cf/mixin/fielddomain.py | 661 +++----- cf/mixin/properties.py | 1 - cf/mixin/propertiesdata.py | 2 +- cf/test/test_Field.py | 4 +- 8 files changed, 415 insertions(+), 3321 deletions(-) diff --git a/cf/constructs.py b/cf/constructs.py index eab1b48ffa..fece7ecfe3 100644 --- a/cf/constructs.py +++ b/cf/constructs.py @@ -168,7 +168,7 @@ def _filter_by_identity(self, arg, todict, _config, identities): if len(ctypes) == len(identities): # All identities are coordinate types (X, T, Y or Z) return self._filter_by_coordinate_type(arg, todict, ctypes) - + config = {"identities_kwargs": {"ctypes": ctypes}} if _config: config.update(_config) @@ -213,19 +213,19 @@ def _short_iteration(cls, x): If this method returns True then only ther first identity return by the construct's `!identities` method will be checked. - + See `_filter_by_identity` for details. - + .. versionadded:: (cfdm) 1.8.9.0 - + :Parameters: - + x: `str` The value against which the construct's identities are being compared. - + :Returns: - + `bool` Returns `True` if a construct's `identities` method is to short circuit after the first identity is @@ -234,8 +234,8 @@ def _short_iteration(cls, x): """ if not isinstance(x, str): return False - - if x in "XTYZ" or x.startswith('measure:') or x.startswith('id%'): + + if x in "XTYZ" or x.startswith("measure:") or x.startswith("id%"): return True - - return "=" not in x and ":" not in x and "%" not in x + + return "=" not in x and ":" not in x and "%" not in x diff --git a/cf/docstring/docstring.py b/cf/docstring/docstring.py index aba58aebc1..b4c6c92cf8 100644 --- a/cf/docstring/docstring.py +++ b/cf/docstring/docstring.py @@ -28,7 +28,7 @@ """ _docstring_substitution_definitions = { # ---------------------------------------------------------------- - # General docstring susbstitutions + # General susbstitutions (not indent-dependent) # ---------------------------------------------------------------- "{{repr}}": "CF ", "{{formula terms links}}": """See the parametric vertical coordinate sections of the CF @@ -40,31 +40,54 @@ `Appendix D: Parametric Vertical Coordinates `_""", # ---------------------------------------------------------------- - # Parameter description substitutions + # Class description susbstitutions (1 level of indentation) # ---------------------------------------------------------------- + # ---------------------------------------------------------------- + # Method description susbstitutions (2 levels of indentation) + # ---------------------------------------------------------------- + # ---------------------------------------------------------------- + # Method description susbstitutions (3 levels of indentataion) + # ---------------------------------------------------------------- + # i: deprecated at version 3.0.0 "{{i: deprecated at version 3.0.0}}": """i: deprecated at version 3.0.0 - Use the *inplace* parameter instead.""", + Use the *inplace* parameter instead.""", + # default_to_zero: `bool`, optional "{{default_to_zero: `bool`, optional}}": """default_to_zero: `bool`, optional - If False then do not assume that missing terms have a - value of zero. By default a missing term is assumed to be - zero.""", + If False then do not assume that missing terms have a + value of zero. By default a missing term is assumed to + be zero.""", + # key: `bool`, optional}} + "{{key: `bool`, optional}}": """key: `bool`, optional + If True then return the selected construct + identifier. By default the construct itself is + returned.""", + # item: `bool`, optional + "{{item: `bool`, optional}}": """item: `bool`, optional + If True then return the selected construct identifier + and the construct itself. By default the construct + itself is returned.""", # ---------------------------------------------------------------- - # Returns substitutions + # Method description susbstitutions (4 levels of indentataion) # ---------------------------------------------------------------- + # Returns formula} "{{Returns formula}}": """5-`tuple` - * The standard name of the parametric coordinates. + * The standard name of the parametric coordinates. - * The standard name of the computed non-parametric - coordinates. + * The standard name of the computed non-parametric + coordinates. - * The computed non-parametric coordinates in a - `DomainAncillary` construct. + * The computed non-parametric coordinates in a + `DomainAncillary` construct. - * A tuple of the domain axis construct keys for the - dimensions of the computed non-parametric coordinates. + * A tuple of the domain axis construct keys for the + dimensions of the computed non-parametric + coordinates. - * A tuple containing the construct key of the vertical - domain axis. If the vertical axis does not appear in the - computed non-parametric coodinates then this an empty - tuple.""", + * A tuple containing the construct key of the vertical + domain axis. If the vertical axis does not appear in + the computed non-parametric coodinates then this an + empty tuple.""", + # Returns construct + "{{Returns construct}}": """The selected construct, or its identifier if *key* is + True, or a tuple of both if *item* is True.""", } diff --git a/cf/field.py b/cf/field.py index 0d983c693e..fbf78f82df 100644 --- a/cf/field.py +++ b/cf/field.py @@ -99,7 +99,7 @@ # -------------------------------------------------------------------- # Commonly used units # -------------------------------------------------------------------- -#_units_degrees = Units("degrees") +# _units_degrees = Units("degrees") _units_radians = Units("radians") _units_metres = Units("m") _units_1 = Units("1") @@ -876,6 +876,44 @@ def _is_broadcastable(self, shape): return True + def _axis_positions(self, axes, parse=True, return_axes=False): + """Convert the given axes to their positions in the data. + Any domain axes that are not spanned by the data are ignored. + If there is no data then an empty list is returned. + .. versionadded:: 3.TODO.0 + :Parameters: + axes: (sequence of) `str` or `int` + The axes to be converted. + {{domain axis selection}} + parse: `bool`, optional + If False then do not parse the *axes*. Parsing should + always occur unless the given *axes* are the output of + a previous call to `parse_axes`. By default *axes* is + parsed by `_parse_axes`. + return_axes: `bool`, optional + If True then also return the domain axis identifiers + corresponding to the positions. + :Returns: + `list` [, `list`] + The domain axis identifiers. If *return_axes* is True + then also return the corresponding domain axis + identifiers. + """ + data_axes = self.get_data_axes(default=None) + if data_axes is None: + return [] + + if parse: + axes = self._parse_axes(axes) + + axes = [a for a in axes if a in data_axes] + positions = [data_axes.index(a) for a in axes] + + if return_axes: + return positions, axes + + return positions + def _binary_operation_old(self, other, method): """Implement binary arithmetic and comparison operations on the master data array with metadata-aware broadcasting. @@ -3900,7 +3938,7 @@ def _regrid_copy_coordinate_references(self, dst, dst_axis_keys): if axes and set(axes).issubset(dst_axis_keys): # This coordinate reference's coordinates span the X # and/or Y axes - self.set_coordinate_reference(ref, field=dst, strict=True) + self.set_coordinate_reference(ref, parent=dst, strict=True) @classmethod def _regrid_use_bounds(cls, method): @@ -8245,590 +8283,6 @@ def histogram(self, digitized): """ raise RuntimeError("Use cf.histogram instead.") - # def del_construct(self, identity, default=ValueError()): - # """Remove a metadata construct. - # - # If a domain axis construct is selected for removal then it can't - # be spanned by any metadata construct data, nor the field - # construct's data; nor be referenced by any cell method constructs. - # - # However, a domain ancillary construct may be removed even if it is - # referenced by coordinate reference construct. In this case the - # reference is replace with `None`. - # - # .. versionadded:: 3.0.0 - # - # .. seealso:: `constructs`, `get_construct`, `has_construct`, - # `set_construct`, `del_domain_axis`, - # `del_coordinate_reference` - # - # :Parameters: - # - # identity: - # Select the construct to removed. Must be - # - # * The identity or key of a metadata construct. - # - # A construct identity is specified by a string - # (e.g. ``'latitude'``, ``'long_name=time'``, - # ``'ncvar%lat'``, etc.); a `Query` object - # (e.g. ``cf.eq('longitude')``); or a compiled regular - # expression (e.g. ``re.compile('^atmosphere')``) that - # selects the relevant constructs whose identities match via - # `re.search`. - # - # A construct has a number of identities, and is selected if - # any of them match any of those provided. A construct's - # identities are those returned by its `!identities` - # method. In the following example, the construct ``x`` has - # six identities: - # - # >>> x.identities() - # ['time', - # 'long_name=Time', - # 'foo=bar', - # 'standard_name=time', - # 'ncvar%t', - # 'T'] - # - # A construct key may optionally have the ``'key%'`` - # prefix. For example ``'dimensioncoordinate2'`` and - # ``'key%dimensioncoordinate2'`` are both acceptable keys. - # - # Note that in the output of a `print` call or `!dump` - # method, a construct is always described by one of its - # identities, and so this description may always be used as - # an *identity* argument. - # - # *Parameter example:* - # ``identity='measure:area'`` - # - # *Parameter example:* - # ``identity='cell_area'`` - # - # *Parameter example:* - # ``identity='long_name=Cell Area'`` - # - # *Parameter example:* - # ``identity='cellmeasure1'`` - # - # default: optional - # Return the value of the *default* parameter if the - # construct can not be removed, or does not exist. If set to - # an `Exception` instance then it will be raised instead. - # - # :Returns: - # - # The removed metadata construct. - # - # **Examples:** - # - # >>> f.del_construct('X') - # - # - # """ - # key = self.construct_key(identity, default=None) - # if key is None: - # return self._default( - # default, - # "Can't identify construct to delete from identity " - # f"{identity!r}", - # ) - # - # return super().del_construct(key, default=default) - -# def del_coordinate_reference( -# self, identity=None, construct=None, default=ValueError() -# ): -# """Remove a coordinate reference construct and all of its domain -# ancillary constructs. -# -# .. versionadded:: 3.0.0 -# -# .. seealso:: `del_construct` -# -# :Parameters: -# -# identity: optional -# Select the coordinate reference construct by one of: -# -# * The identity or key of a coordinate reference -# construct. -# -# A construct identity is specified by a string -# (e.g. ``'grid_mapping_name:latitude_longitude'``, -# ``'latitude_longitude'``, ``'ncvar%lat_lon'``, etc.); a -# `Query` object (e.g. ``cf.eq('latitude_longitude')``); or -# a compiled regular expression -# (e.g. ``re.compile('^atmosphere')``) that selects the -# relevant constructs whose identities match via -# `re.search`. -# -# Each construct has a number of identities, and is selected -# if any of them match any of those provided. A construct's -# identities are those returned by its `!identities` -# method. In the following example, the construct ``x`` has -# two identities: -# -# >>> x.identities() -# ['grid_mapping_name:latitude_longitude', 'ncvar%lat_lon'] -# -# A identity's prefix of ``'grid_mapping_name:'`` or -# ``'standard_name:'`` may be omitted -# (e.g. ``'standard_name:atmosphere_hybrid_height_coordinate'`` -# and ``'atmosphere_hybrid_height_coordinate'`` are both -# acceptable identities). -# -# A construct key may optionally have the ``'key%'`` -# prefix. For example ``'coordinatereference2'`` and -# ``'key%coordinatereference2'`` are both acceptable keys. -# -# Note that in the output of a `print` call or `!dump` -# method, a construct is always described by one of its -# identities, and so this description may always be used as -# an *identity* argument. -# -# *Parameter example:* -# ``identity='standard_name:atmosphere_hybrid_height_coordinate'`` -# -# *Parameter example:* -# ``identity='grid_mapping_name:rotated_latitude_longitude'`` -# -# *Parameter example:* -# ``identity='transverse_mercator'`` -# -# *Parameter example:* -# ``identity='coordinatereference1'`` -# -# *Parameter example:* -# ``identity='key%coordinatereference1'`` -# -# *Parameter example:* -# ``identity='ncvar%lat_lon'`` -# -# construct: optional -# The coordinate reference construct to remove. This may -# alternatively be specified via the *identity* parameter. -# -# default: optional -# Return the value of the *default* parameter if the -# construct can not be removed, or does not exist. If set to -# an `Exception` instance then it will be raised instead. -# -# :Returns: -# -# The removed coordinate reference construct. -# -# **Examples:** -# -# >>> f.del_coordinate_reference('rotated_latitude_longitude') -# -# -# """ -# if construct is None: -# if identity is None: -# raise ValueError("TODO") -# -# key = self.coordinate_reference(identity, key=True, default=None) -# if key is None: -# return self._default( -# default, -# f"Can't identify construct from {identity!r}", -# ) -# -# ref = self.del_construct(key) -# -# for ( -# da_key -# ) in ref.coordinate_conversion.domain_ancillaries().values(): -# self.del_construct(da_key, default=None) -# -# return ref -# elif identity is not None: -# raise ValueError("TODO") -# -# out = [] -# -# c_key = self.construct(construct, key=True, default=None) -# if c_key is None: -# return self._default( -# default, f"Can't identify construct from {construct!r}" -# ) -# -# for key, ref in tuple(self.coordinate_references(todict=True).items()): -# if c_key in ref.coordinates(): -# self.del_coordinate_reference( -# key, construct=None, default=default -# ) -# out.append(ref) -# continue -# -# if ( -# c_key -# in ref.coordinate_conversion.domain_ancillaries().values() -# ): -# self.del_coordinate_reference( -# key, construct=None, default=default -# ) -# out.append(ref) -# continue -# -# return out -# -# def del_domain_axis( -# self, identity=None, squeeze=False, default=ValueError() -# ): -# """Remove a domain axis construct. -# -# In general, a domain axis construct can only be removed if it is -# not spanned by any construct's data. However, a size 1 domain axis -# construct can be removed in any case if the *squeeze* parameter is -# set to `True`. In this case, a metadata construct whose data spans -# only the removed domain axis construct will also be removed. -# -# .. versionadded:: 3.6.0 -# -# .. seealso:: `del_construct` -# -# :Parameters: -# -# identity: -# Select the domain axis construct by one of: -# -# * An identity or key of a 1-d coordinate construct that -# whose data spans the domain axis construct. -# -# * A domain axis construct identity or key. -# -# * The position of the domain axis construct in the field -# construct's data. -# -# A construct identity is specified by a string -# (e.g. ``'latitude'``, ``'long_name=time'``, -# ``'ncvar%lat'``, etc.); or a compiled regular expression -# (e.g. ``re.compile('^atmosphere')``) that selects the -# relevant constructs whose identities match via -# `re.search`. -# -# Each construct has a number of identities, and is selected -# if any of them match any of those provided. A construct's -# identities are those returned by its `!identities` -# method. In the following example, the construct ``x`` has -# six identities: -# -# >>> x.identities() -# ['time' -# 'long_name=Time', -# 'foo=bar', -# 'standard_name=time' -# 'ncvar%t', -# 'T'] -# -# A construct key may optionally have the ``'key%'`` -# prefix. For example ``'dimensioncoordinate2'`` and -# ``'key%dimensioncoordinate2'`` are both acceptable keys. -# -# A position of a domain axis construct in the field -# construct's data is specified by an integer index. -# -# Note that in the output of a `print` call or `!dump` -# method, a construct is always described by one of its -# identities, and so this description may always be used as -# an *identity* argument. -# -# *Parameter example:* -# ``identity='long_name=Latitude'`` -# -# *Parameter example:* -# ``identity='dimensioncoordinate1'`` -# -# *Parameter example:* -# ``identity='domainaxis2'`` -# -# *Parameter example:* -# ``identity='key%domainaxis2'`` -# -# *Parameter example:* -# ``identity='ncdim%y'`` -# -# *Parameter example:* -# ``identity=2`` -# -# squeeze: `bool`, optional -# If True then allow the removal of a size 1 domain axis -# construct that is spanned by any data array and squeeze -# the corresponding dimension from those arrays. -# -# default: optional -# Return the value of the *default* parameter if the -# construct can not be removed, or does not exist. If set to -# an `Exception` instance then it will be raised instead. -# -# :Returns: -# -# `DomainAxis` -# The removed domain axis construct. -# -# **Examples:** -# -# >>> f = cf.example_field(0) -# >>> g = f[0] -# Field: specific_humidity (ncvar%q) -# ---------------------------------- -# Data : specific_humidity(latitude(1), longitude(8)) 1 -# Cell methods : area: mean -# Dimension coords: latitude(1) = [-75.0] degrees_north -# : longitude(8) = [22.5, ..., 337.5] degrees_east -# : time(1) = [2019-01-01 00:00:00] -# >>> g.del_domain_axis('Y', squeeze=True) -# -# >>> print(g) -# Field: specific_humidity (ncvar%q) -# ---------------------------------- -# Data : specific_humidity(longitude(8)) 1 -# Cell methods : area: mean -# Dimension coords: longitude(8) = [22.5, ..., 337.5] degrees_east -# : time(1) = [2019-01-01 00:00:00] -# >>> g.del_domain_axis('T', squeeze=True) -# -# >>> print(g) -# Field: specific_humidity (ncvar%q) -# ---------------------------------- -# Data : specific_humidity(longitude(8)) 1 -# Cell methods : area: mean -# Dimension coords: longitude(8) = [22.5, ..., 337.5] degrees_east -# -# """ -# dakey = self.domain_axis(identity, key=True) -# domain_axis = self.constructs[dakey] -# -# if not squeeze: -# return self.del_construct(dakey) -# -# if dakey in self.get_data_axes(default=()): -# self.squeeze(dakey, inplace=True) -# -# for ckey, construct in self.constructs.filter_by_data( -# todict=True -# ).items(): -# data = construct.get_data(None, _fill_value=False) -# if data is None: -# continue -# -# construct_axes = self.get_data_axes(ckey) -# if dakey not in construct_axes: -# continue -# -# i = construct_axes.index(dakey) -# construct.squeeze(i, inplace=True) -# construct_axes = list(construct_axes) -# construct_axes.remove(dakey) -# self.set_data_axes(axes=construct_axes, key=ckey) -# -# if not construct_axes: -# self.del_construct(ckey) -# -# return domain_axis - -# def get_coordinate_reference( -# self, identity=None, key=False, construct=None, default=ValueError() -# ): -# """Returns selected coordinate reference constructs. -# -# .. versionadded:: 3.0.2 -# -# .. seealso:: `construct` -# -# :Parameters: -# -# identity: -# Select the coordinate reference construct by one of: -# -# * The identity or key of a coordinate reference -# construct. -# -# A construct identity is specified by a string -# (e.g. ``'grid_mapping_name:latitude_longitude'``, -# ``'latitude_longitude'``, ``'ncvar%lat_lon'``, etc.); a -# `Query` object (e.g. ``cf.eq('latitude_longitude')``); or -# a compiled regular expression -# (e.g. ``re.compile('^atmosphere')``) that selects the -# relevant constructs whose identities match via -# `re.search`. -# -# Each construct has a number of identities, and is selected -# if any of them match any of those provided. A construct's -# identities are those returned by its `!identities` -# method. In the following example, the construct ``x`` has -# two identities: -# -# >>> x.identities() -# ['grid_mapping_name:latitude_longitude', 'ncvar%lat_lon'] -# -# A identity's prefix of ``'grid_mapping_name:'`` or -# ``'standard_name:'`` may be omitted -# (e.g. ``'standard_name:atmosphere_hybrid_height_coordinate'`` -# and ``'atmosphere_hybrid_height_coordinate'`` are both -# acceptable identities). -# -# A construct key may optionally have the ``'key%'`` -# prefix. For example ``'coordinatereference2'`` and -# ``'key%coordinatereference2'`` are both acceptable keys. -# -# Note that in the output of a `print` call or `!dump` -# method, a construct is always described by one of its -# identities, and so this description may always be used as -# an *identity* argument. -# -# *Parameter example:* -# ``identity='standard_name:atmosphere_hybrid_height_coordinate'`` -# -# *Parameter example:* -# ``identity='grid_mapping_name:rotated_latitude_longitude'`` -# -# *Parameter example:* -# ``identity='transverse_mercator'`` -# -# *Parameter example:* -# ``identity='coordinatereference1'`` -# -# *Parameter example:* -# ``identity='key%coordinatereference1'`` -# -# *Parameter example:* -# ``identity='ncvar%lat_lon'`` -# -# key: `bool`, optional -# If True then return the selected construct key. By -# default the construct itself is returned. -# -# default: optional -# Return the value of the *default* parameter if a construct -# can not be found. If set to an `Exception` instance then -# it will be raised instead. -# -# :Returns: -# -# `CoordinateReference` or `str` -# The selected coordinate reference construct, or its key. -# -# **Examples:** -# -# TODO -# -# """ -# if construct is None: -# return self.coordinate_reference( -# identity=identity, key=key, default=default -# ) -# -# out = [] -# -# c_key = self.construct(construct, key=True, default=None) -# if c_key is None: -# return self._default( -# default, f"Can't identify construct from {construct!r}" -# ) -# -# for cr_key, ref in tuple( -# self.coordinate_references(todict=True).items() -# ): -# if c_key in [ -# ref.coordinates(), -# ref.coordinate_conversion.domain_ancillaries().values(), -# ]: -# if key: -# if cr_key not in out: -# out.append(cr_key) -# elif ref not in out: -# out.append(ref) -# -# continue -# -# return out -# -# def set_coordinate_reference( -# self, coordinate_reference, key=None, field=None, strict=True -# ): -# """Set a coordinate reference construct. -# -# By default, this is equivalent to using the `set_construct` -# method. If, however, the *field* parameter has been set then it is -# assumed to be a field construct that contains the new coordinate -# reference construct. In this case, existing coordinate and domain -# ancillary constructs will be referenced by the inserted coordinate -# reference construct, based on those which are referenced from the -# other parent field construct (given by the *field* parameter). -# -# .. versionadded:: 3.0.0 -# -# .. seealso:: `set_construct` -# -# :Parameters: -# -# coordinate_reference: `CoordinateReference` -# The coordinate reference construct to be inserted. -# -# key: `str`, optional -# The construct identifier to be used for the construct. If -# not set then a new, unique identifier is created -# automatically. If the identifier already exists then the -# existing construct will be replaced. -# -# *Parameter example:* -# ``key='coordinatereference1'`` -# -# field: `Field`, optional -# A parent field construct that contains the new coordinate -# reference construct. -# -# strict: `bool`, optional -# If False then allow non-strict identities for -# identifying coordinate and domain ancillary metadata -# constructs. -# -# :Returns: -# -# `str` -# The construct identifier for the coordinate reference -# construct. -# -# """ -# if field is None: -# return self.set_construct(coordinate_reference, key=key, copy=True) -# -# # Still here? -# ref = coordinate_reference.copy() -# -# coordinates = field.coordinates(todict=True) -# domain_ancillaries = field.domain_ancillaries(todict=True) -# -# ckeys = [] -# for value in coordinate_reference.coordinates(): -# if value in coordinates: -# identity = coordinates[value].identity(strict=strict) -# ckeys.append(self.coordinate(identity, key=True, default=None)) -# -# ref.clear_coordinates() -# ref.set_coordinates(ckeys) -# -# coordinate_conversion = coordinate_reference.coordinate_conversion -# -# dakeys = {} -# for term, value in coordinate_conversion.domain_ancillaries().items(): -# if value in domain_ancillaries: -# identity = domain_ancillaries[value].identity(strict=strict) -# dakeys[term] = self.domain_ancillary( -# identity, key=True, default=None -# ) -# else: -# dakeys[term] = None -# -# ref.coordinate_conversion.clear_domain_ancillaries() -# ref.coordinate_conversion.set_domain_ancillaries(dakeys) -# -# return self.set_construct(ref, key=key, copy=False) - @_deprecated_kwarg_check("i") @_manage_log_level_via_verbosity def collapse( @@ -12374,99 +11828,6 @@ def _update_cell_methods( f" Modified cell methods = {self.cell_methods().ordered()}" ) # pragma: no cover - @_deprecated_kwarg_check("axes") - def direction(self, identity, axes=None, **kwargs): - """Whether or not a domain axis is increasing. - - An domain axis is considered to be increasing if its dimension - coordinate values are increasing in index space or if it has no - dimension coordinate. - - .. seealso:: `directions` - - :Parameters: - - identity: - Select the domain axis construct by one of: - - * An identity or key of a 1-d coordinate construct that - whose data spans the domain axis construct. - - * A domain axis construct identity or key. - - * The position of the domain axis construct in the field - construct's data. - - The *identity* parameter selects the domain axis as - returned by this call of the field construct's - `domain_axis` method: ``f.domain_axis(identity)``. - - axes: deprecated at version 3.0.0 - Use the *identity* parameter instead. - - size: deprecated at version 3.0.0 - - kwargs: deprecated at version 3.0.0 - - :Returns: - - `bool` - Whether or not the domain axis is increasing. - - **Examples:** - - >>> print(f.dimension_coordinate('X').array) - array([ 0 30 60]) - >>> f.direction('X') - True - >>> g = f.flip('X') - >>> g.direction('X') - False - - """ - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, "direction", kwargs - ) # pragma: no cover - - axis = self.domain_axis(identity, key=True, default=None) - if axis is None: - return True - - for key, coord in self.dimension_coordinates(todict=True).items(): - if axis == self.get_data_axes(key)[0]: - return coord.direction() - - return True - - def directions(self): - """Return a dictionary mapping all domain axes to their - directions. - - .. seealso:: `direction` - - :Returns: - - `dict` - A dictionary whose key/value pairs are domain axis keys - and their directions. - - **Examples:** - - >>> d.directions() - {'dim1': True, 'dim0': False} - - """ - out = {key: True for key in self.domain_axes(todict=True)} - - for key, dc in self.dimension_coordinates(todict=True).items(): - direction = dc.direction() - if not direction: - axis = self.get_data_axes(key)[0] - out[axis] = dc.direction() - - return out - @_inplace_enabled(default=False) def insert_dimension(self, axis, position=0, inplace=False): """Insert a size 1 axis into the data array. @@ -13863,62 +13224,6 @@ def match_by_construct(self, *identities, OR=False, **conditions): return True - def match_by_rank(self, *ranks): - """Whether or not the number of domain axis constructs satisfies - conditions. - - .. versionadded:: 3.0.0 - - .. seealso:: `match`, `match_by_property`, `match_by_identity`, - `match_by_ncvar`, `match_by_units`, - `match_by_construct` - - :Parameters: - - ranks: optional - Define conditions on the number of domain axis constructs. - - A condition is one of: - - * `int` - * a `Query` object - - The condition is satisfied if the number of domain axis - constructs equals the condition value. - - *Parameter example:* - To see if the field construct has 4 domain axis - constructs: ``4`` - - *Parameter example:* - To see if the field construct has at least 3 domain axis - constructs: ``cf.ge(3)`` - - :Returns: - - `bool` - Whether or not at least one of the conditions are met. - - **Examples:** - - >>> f.match_by_rank(3, 4) - - >>> f.match_by_rank(cf.wi(2, 4)) - - >>> f.match_by_rank(1, cf.gt(3)) - - """ - if not ranks: - return True - - n_domain_axes = len(self.domain_axes(todict=True)) - for rank in ranks: - ok = rank == n_domain_axes - if ok: - return True - - return False - @_inplace_enabled(default=False) def moving_window( self, @@ -15014,205 +14319,6 @@ def flip(self, axes=None, inplace=False, i=False, **kwargs): return f - @_deprecated_kwarg_check("i") - @_inplace_enabled(default=False) - def anchor( - self, axis, value, inplace=False, dry_run=False, i=False, **kwargs - ): - """Roll a cyclic axis so that the given value lies in the first - coordinate cell. - - A unique axis is selected with the *axes* and *kwargs* parameters. - - .. versionadded:: 1.0 - - .. seealso:: `axis`, `cyclic`, `iscyclic`, `period`, `roll` - - :Parameters: - - axis: - The cyclic axis to be rolled, defined by that which would - be selected by passing the given axis description to a - call of the field construct's `domain_axis` method. For - example, for a value of ``'X'``, the domain axis construct - returned by ``f.domain_axis('X')`` is selected. - - value: - Anchor the dimension coordinate values for the selected - cyclic axis to the *value*. May be any numeric scalar - object that can be converted to a `Data` object (which - includes `numpy` and `Data` objects). If *value* has units - then they must be compatible with those of the dimension - coordinates, otherwise it is assumed to have the same - units as the dimension coordinates. The coordinate values - are transformed so that *value* is "equal to or just - before" the new first coordinate value. More specifically: - - * Increasing dimension coordinates with positive period, - P, are transformed so that *value* lies in the - half-open range (L-P, F], where F and L are the - transformed first and last coordinate values, - respectively. - - .. - - * Decreasing dimension coordinates with positive period, - P, are transformed so that *value* lies in the - half-open range (L+P, F], where F and L are the - transformed first and last coordinate values, - respectively. - - *Parameter example:* - If the original dimension coordinates are ``0, 5, ..., - 355`` (evenly spaced) and the period is ``360`` then - ``value=0`` implies transformed coordinates of ``0, 5, - ..., 355``; ``value=-12`` implies transformed - coordinates of ``-10, -5, ..., 345``; ``value=380`` - implies transformed coordinates of ``380, 385, ..., - 715``. - - *Parameter example:* - If the original dimension coordinates are ``355, 350, - ..., 0`` (evenly spaced) and the period is ``360`` then - ``value=355`` implies transformed coordinates of ``355, - 350, ..., 0``; ``value=0`` implies transformed - coordinates of ``0, -5, ..., -355``; ``value=392`` - implies transformed coordinates of ``390, 385, ..., - 30``. - - {{inplace: `bool`, optional}} - - dry_run: `bool`, optional - Return a dictionary of parameters which describe the - anchoring process. The field is not changed, even if *i* - is True. - - {{i: deprecated at version 3.0.0}} - - kwargs: deprecated at version 3.0.0 - - :Returns: - - `Field` - The rolled field. - - **Examples:** - - >>> f.iscyclic('X') - True - >>> f.dimension_coordinate('X').data - TODO - >>> print(f.dimension_coordinate('X').array) - [ 0 45 90 135 180 225 270 315] - >>> g = f.anchor('X', 230) - >>> print(g.dimension_coordinate('X').array) - [270 315 0 45 90 135 180 225] - >>> g = f.anchor('X', cf.Data(590, 'degreesE')) - >>> print(g.dimension_coordinate('X').array) - [630 675 360 405 450 495 540 585] - >>> g = f.anchor('X', cf.Data(-490, 'degreesE')) - >>> print(g.dimension_coordinate('X').array) - [-450 -405 -720 -675 -630 -585 -540 -495] - - >>> f.iscyclic('X') - True - >>> f.dimension_coordinate('X').data - - >>> f.anchor('X', 10000).dimension_coordinate('X').data - - >>> d = f.anchor('X', 10000, dry_run=True) - >>> d - {'axis': 'domainaxis2', - 'nperiod': , - 'roll': 28} - >>> (f.roll(d['axis'], d['roll']).dimension_coordinate( - ... d['axis']) + d['nperiod']).data - - - """ - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, "anchor", kwargs - ) # pragma: no cover - - axis = self.domain_axis(axis, key=True) - - if dry_run: - f = self - else: - f = _inplace_enabled_define_and_cleanup(self) - - dim = f.dimension_coordinate(filter_by_axis=(axis,), default=None) - if dim is None: - raise ValueError( - "Can't shift non-cyclic " - f"{f.constructs.domain_axis_identity(axis)!r} axis" - ) - - period = dim.period() - if period is None: - raise ValueError(f"Cyclic {dim.identity()!r} axis has no period") - - value = Data.asdata(value) - if not value.Units: - value = value.override_units(dim.Units) - elif not value.Units.equivalent(dim.Units): - raise ValueError( - f"Anchor value has incompatible units: {value.Units!r}" - ) - - axis_size = f.domain_axes(todict=True)[axis].get_size() - if axis_size <= 1: - # Don't need to roll a size one axis - if dry_run: - return {"axis": axis, "roll": 0, "nperiod": 0} - else: - if inplace: - f = None - - return f - - c = dim.get_data(_fill_value=False) - - if dim.increasing: - # Adjust value so it's in the range [c[0], c[0]+period) - n = ((c[0] - value) / period).ceil() - value1 = value + n * period - - shift = axis_size - numpy_argmax((c - value1 >= 0).array) - if not dry_run: - f.roll(axis, shift, inplace=True) - - dim = f.dimension_coordinate(filter_by_axis=(axis,)) - - n = ((value - dim.data[0]) / period).ceil() - else: - # Adjust value so it's in the range (c[0]-period, c[0]] - n = ((c[0] - value) / period).floor() - value1 = value + n * period - - shift = axis_size - numpy_argmax((value1 - c >= 0).array) - - if not dry_run: - f.roll(axis, shift, inplace=True) - - # TODO should this call be like the one above? - dim = f.dimension_coordinate(filter_by_axis=(axis,)) - - n = ((value - dim.data[0]) / period).floor() - - if dry_run: - return {"axis": axis, "roll": shift, "nperiod": n * period} - - if n: - np = n * period - dim += np - bounds = dim.get_bounds(None) - if bounds is not None: - bounds += np - - return f - def argmax(self, axis=None): """Return the indices of the maximum values along an axis. @@ -15311,65 +14417,65 @@ def argmax(self, axis=None): # # return out -# @_manage_log_level_via_verbosity -# def autocyclic(self, key=None, coord=None, verbose=None): -# """Set dimensions to be cyclic. -# -# A dimension is set to be cyclic if it has a unique longitude (or -# grid longitude) dimension coordinate construct with bounds and the -# first and last bounds values differ by 360 degrees (or an -# equivalent amount in other units). -# -# .. versionadded:: 1.0 -# -# .. seealso:: `cyclic`, `iscyclic`, `period` -# -# :Parameters: -# -# {{verbose: `int` or `str` or `None`, optional}} -# -# :Returns: -# -# `bool` -# -# """ -# if coord is None: -# key, coord = self.dimension_coordinate( -# "X", item=True, default=(None, None) -# ) -# if coord is None: -# return False -# elif not coord.X: -# return False -# -# bounds = coord.get_bounds(None) -# if bounds is None: -# self.cyclic(key, iscyclic=False) -# return False -# -# data = bounds.get_data(None, _fill_value=False) -# if data is None: -# self.cyclic(key, iscyclic=False) -# return False -# -# units = bounds.Units -# if units.islongitude: -# period = Data(360.0, units="degrees_east") -# elif units == _units_degrees: -# period = Data(360.0, units="degrees") -# else: -# self.cyclic(key, iscyclic=False) -# return False -# -# period.Units = data.Units -# -# if abs(data.last_element() - data.first_element()) != period.array: -# self.cyclic(key, iscyclic=False) -# return False -# -# self.cyclic(key, iscyclic=True, period=period) -# -# return True + # @_manage_log_level_via_verbosity + # def autocyclic(self, key=None, coord=None, verbose=None): + # """Set dimensions to be cyclic. + # + # A dimension is set to be cyclic if it has a unique longitude (or + # grid longitude) dimension coordinate construct with bounds and the + # first and last bounds values differ by 360 degrees (or an + # equivalent amount in other units). + # + # .. versionadded:: 1.0 + # + # .. seealso:: `cyclic`, `iscyclic`, `period` + # + # :Parameters: + # + # {{verbose: `int` or `str` or `None`, optional}} + # + # :Returns: + # + # `bool` + # + # """ + # if coord is None: + # key, coord = self.dimension_coordinate( + # "X", item=True, default=(None, None) + # ) + # if coord is None: + # return False + # elif not coord.X: + # return False + # + # bounds = coord.get_bounds(None) + # if bounds is None: + # self.cyclic(key, iscyclic=False) + # return False + # + # data = bounds.get_data(None, _fill_value=False) + # if data is None: + # self.cyclic(key, iscyclic=False) + # return False + # + # units = bounds.Units + # if units.islongitude: + # period = Data(360.0, units="degrees_east") + # elif units == _units_degrees: + # period = Data(360.0, units="degrees") + # else: + # self.cyclic(key, iscyclic=False) + # return False + # + # period.Units = data.Units + # + # if abs(data.last_element() - data.first_element()) != period.array: + # self.cyclic(key, iscyclic=False) + # return False + # + # self.cyclic(key, iscyclic=True, period=period) + # + # return True @_deprecated_kwarg_check("i") def squeeze(self, axes=None, inplace=False, i=False, **kwargs): @@ -15718,529 +14824,6 @@ def unsqueeze(self, inplace=False, i=False, axes=None, **kwargs): return f -# def auxiliary_coordinate( -# self, -# *identity, -# default=ValueError(), -# key=False, -# item=False, -# **filter_kwargs, -# ): -# """Return an auxiliary coordinate construct, or its key. -# -# .. versionadded:: 3.0.0 -# -# .. seealso:: `construct`, `auxiliary_coordinates`, `cell_measure`, -# `cell_method`, `coordinate`, `coordinate_reference`, -# `dimension_coordinate`, `domain_ancillary`, -# `domain_axis`, `field_ancillary` -# -# :Parameters: -# -# identity: optional -# Select the auxiliary coordinate construct by one of: -# -# * `None`. This is the default, which selects the -# auxiliary coordinate construct when there is only one -# of them. -# -# * The identity or key of an auxiliary coordinate -# construct. -# -# * The identity or key of a domain axis construct that is -# spanned by a unique 1-d auxiliary coordinate -# construct's data. -# -# * The position, in the field construct's data, of a -# domain axis construct that is spanned by a unique 1-d -# auxiliary coordinate construct's data. -# -# A construct identity is specified by a string -# (e.g. ``'latitude'``, ``'long_name=time'``, -# ``'ncvar%lat'``, etc.); a `Query` object -# (e.g. ``cf.eq('longitude')``); or a compiled regular -# expression (e.g. ``re.compile('^atmosphere')``) that -# selects the relevant constructs whose identities match via -# `re.search`. -# -# A construct has a number of identities, and is selected if -# any of them match any of those provided. A construct's -# identities are those returned by its `!identities` -# method. In the following example, the construct ``x`` has -# six identities: -# -# >>> x.identities() -# ['time', -# 'long_name=Time', -# 'foo=bar', -# 'standard_name=time', -# 'ncvar%t', -# 'T'] -# -# A construct key may optionally have the ``'key%'`` -# prefix. For example ``'auxiliarycoordinate2'`` and -# ``'key%auxiliarycoordinate2'`` are both acceptable keys. -# -# A position of a domain axis construct in the field -# construct's data is specified by an integer index. -# -# Note that in the output of a `print` call or `!dump` -# method, a construct is always described by one of its -# identities, and so this description may always be used as -# an *identity* argument. -# -# *Parameter example:* -# ``identity='Y'`` -# -# *Parameter example:* -# ``identity='latitude'`` -# -# *Parameter example:* -# ``identity='long_name=Latitude'`` -# -# *Parameter example:* -# ``identity='auxiliarycoordinate1'`` -# -# *Parameter example:* -# ``identity='domainaxis2'`` -# -# *Parameter example:* -# ``identity='ncdim%y'`` -# -# *Parameter example:* -# ``identity=0`` -# -# key: `bool`, optional -# If True then return the selected construct key. By -# default the construct itself is returned. -# -# default: optional -# Return the value of the *default* parameter if a construct -# can not be found. If set to an `Exception` instance then -# it will be raised instead. -# -# :Returns: -# -# `AuxiliaryCoordinate` or `str` -# The selected auxiliary coordinate construct, or its key. -# -# **Examples:** -# -# TODO -# -# """ -# return self._filter_interface( -# ("auxiliary_coordinate",), -# "auxiliary_coordinate", -# identity, -# construct=True, -# key=key, -# item=item, -# default=default, -# **filter_kwargs, -# ) -# if c is not None: -# return c -# -# if identity: -# da_key = self.domain_axis(*identity, key=True, default=None) -# if da_key is not None: -# return self._select_construct( -# ("auxiliary_coordinate",), -# "auxiliary_coordinate", -# (), -# construct=True, -# key=key, -# item=item, -# default=default, -# filter_by_axis=(da_key,), -# axis_mode="exact", -# ) -# -# if default is None: -# return default -# -# return self._default( -# default, -# f"{self.__class__.__name__}.auxiliary_coordinate() can only " -# "return a unique construct", -# ) -# -# def construct( -# self, -# *identity, -# default=ValueError(), -# key=False, -# item=False, -# **filter_kwargs, -# ): -# """Select a metadata construct by its identity. -# -# .. seealso:: `del_construct`, `get_construct`, `has_construct`, -# `set_construct` -# -# :Parameters: -# -# identity: optional -# Select the construct. Must be -# -# * The identity or key of a metadata construct. -# -# A construct identity is specified by a string -# (e.g. ``'latitude'``, ``'long_name=time'``, -# ``'ncvar%lat'``, etc.); a `Query` object -# (e.g. ``cf.eq('longitude')``); or a compiled regular -# expression (e.g. ``re.compile('^atmosphere')``) that -# selects the relevant constructs whose identities match via -# `re.search`. -# -# A construct has a number of identities, and is selected if -# any of them match any of those provided. A construct's -# identities are those returned by its `!identities` -# method. In the following example, the construct ``x`` has -# six identities: -# -# >>> x.identities() -# ['time', -# 'long_name=Time', -# 'foo=bar', -# 'standard_name=time', -# 'ncvar%t', -# 'T'] -# -# A construct key may optionally have the ``'key%'`` -# prefix. For example ``'dimensioncoordinate2'`` and -# ``'key%dimensioncoordinate2'`` are both acceptable keys. -# -# Note that in the output of a `print` call or `!dump` -# method, a construct is always described by one of its -# identities, and so this description may always be used as -# an *identity* argument. -# -# *Parameter example:* -# ``identity='T' -# -# *Parameter example:* -# ``identity='measure:area'`` -# -# *Parameter example:* -# ``identity='cell_area'`` -# -# *Parameter example:* -# ``identity='long_name=Cell Area'`` -# -# *Parameter example:* -# ``identity='cellmeasure1'`` -# -# default: optional -# Return the value of the *default* parameter if a construct -# can not be found. -# -# {{default Exception}} -# -# If the *default* is `None`, or if *item* is True and -# *default* is a 2-tuple of `Ǹone`s, then TODO -# -# key: `bool`, optional -# If True then return the selected construct key. By -# default the construct itself is returned. -# -# item: TODO -# -# :Returns: -# -# The selected coordinate construct, or its key. -# -# **Examples:** -# -# >>> f = cf.example_field(1) -# >>> print(f) -# Field: air_temperature (ncvar%ta) -# --------------------------------- -# Data : air_temperature(atmosphere_hybrid_height_coordinate(1), grid_latitude(10), grid_longitude(9)) K -# Cell methods : grid_latitude(10): grid_longitude(9): mean where land (interval: 0.1 degrees) time(1): maximum -# Field ancils : air_temperature standard_error(grid_latitude(10), grid_longitude(9)) = [[0.76, ..., 0.32]] K -# Dimension coords: atmosphere_hybrid_height_coordinate(1) = [1.5] -# : grid_latitude(10) = [2.2, ..., -1.76] degrees -# : grid_longitude(9) = [-4.7, ..., -1.18] degrees -# : time(1) = [2019-01-01 00:00:00] -# Auxiliary coords: latitude(grid_latitude(10), grid_longitude(9)) = [[53.941, ..., 50.225]] degrees_N -# : longitude(grid_longitude(9), grid_latitude(10)) = [[2.004, ..., 8.156]] degrees_E -# : long_name=Grid latitude name(grid_latitude(10)) = [--, ..., b'kappa'] -# Cell measures : measure:area(grid_longitude(9), grid_latitude(10)) = [[2391.9657, ..., 2392.6009]] km2 -# Coord references: grid_mapping_name:rotated_latitude_longitude -# : standard_name:atmosphere_hybrid_height_coordinate -# Domain ancils : ncvar%a(atmosphere_hybrid_height_coordinate(1)) = [10.0] m -# : ncvar%b(atmosphere_hybrid_height_coordinate(1)) = [20.0] -# : surface_altitude(grid_latitude(10), grid_longitude(9)) = [[0.0, ..., 270.0]] m -# -# >>> f.construct('long_name=Grid latitude name') -# -# >>> f.construct('ncvar%a') -# -# >>> f.construct('measure:area') -# -# >>> f.construct('domainaxis0') -# -# >>> f.construct('height') -# Traceback (most recent call last): -# ... -# ValueError: Can't return zero constructs -# >>> f.construct('height', default=False) -# False -# >>> f.construct('height', default=TypeError("No height coordinates")) -# Traceback (most recent call last): -# ... -# TypeError: No height coordinates -# -# """ -# return self._filter_interface( -# (), -# "construct", -# identity, -# construct=True, -# key=key, -# item=item, -# default=default, -# **filter_kwargs, -# ) -# -# def domain_ancillary( -# self, -# *identity, -# default=ValueError(), -# key=False, -# item=False, -# **filter_kwargs, -# ): -# """Return a domain ancillary construct, or its key. -# -# .. versionadded:: 3.0.0 -# -# .. seealso:: `construct`, `auxiliary_coordinate`, `cell_measure`, -# `cell_method`, `coordinate`, `coordinate_reference`, -# `dimension_coordinate`, `domain_ancillaries`, -# `domain_axis`, `field_ancillary` -# -# :Parameters: -# -# identity: optional -# Select the domain ancillary construct by one of: -# -# * `None`. This is the default, which selects the domain -# ancillary construct when there is only one of them. -# -# * The identity or key of a domain ancillary construct. -# -# * The identity or key of a domain axis construct that is -# spanned by a unique 1-d domain ancillary construct's data. -# -# * The position, in the field construct's data, of a domain -# axis construct that is spanned by a unique 1-d domain -# ancillary construct's data. -# -# A construct identity is specified by a string -# (e.g. ``'latitude'``, ``'long_name=time'``, -# ``'ncvar%lat'``, etc.); a `Query` object -# (e.g. ``cf.eq('longitude')``); or a compiled regular -# expression (e.g. ``re.compile('^atmosphere')``) that -# selects the relevant constructs whose identities match via -# `re.search`. -# -# A construct has a number of identities, and is selected if -# any of them match any of those provided. A construct's -# identities are those returned by its `!identities` -# method. In the following example, the construct ``x`` has -# six identities: -# -# >>> x.identities() -# ['time', -# 'long_name=Time', -# 'foo=bar', -# 'standard_name=time', -# 'ncvar%t', -# 'T'] -# -# A construct key may optionally have the ``'key%'`` -# prefix. For example ``'domainancillary2'`` and -# ``'key%domainancillary2'`` are both acceptable keys. -# -# A position of a domain axis construct in the field -# construct's data is specified by an integer index. -# -# Note that in the output of a `print` call or `!dump` -# method, a construct is always described by one of its -# identities, and so this description may always be used as -# an *identity* argument. -# -# *Parameter example:* -# ``identity='Y'`` -# -# *Parameter example:* -# ``identity='latitude'`` -# -# *Parameter example:* -# ``identity='long_name=Latitude'`` -# -# *Parameter example:* -# ``identity='domainancillary1'`` -# -# *Parameter example:* -# ``identity='ncdim%y'`` -# -# *Parameter example:* -# ``identity='domainaxis2'`` -# -# *Parameter example:* -# ``identity=0`` -# -# key: `bool`, optional -# If True then return the selected construct key. By -# default the construct itself is returned. -# -# default: optional -# Return the value of the *default* parameter if a construct -# can not be found. If set to an `Exception` instance then -# it will be raised instead. -# -# :Returns: -# -# `DomainAncillary` or `str` -# The selected domain ancillary coordinate construct, or its -# key. -# -# **Examples:** -# -# TODO -# -# """ -# return self._filter_interface( -# ("domain_ancillary",), -# "domain_ancillary", -# identity, -# construct=True, -# key=key, -# default=default, -# item=item, -# **filter_kwargs, -# ) -# -# def cell_measure( -# self, -# *identity, -# default=ValueError(), -# key=False, -# item=False, -# **filter_kwargs, -# ): -# """Select a cell measure construct by its identity. -# -# .. versionadded:: 3.0.0 -# -# .. seealso:: `construct`, `auxiliary_coordinate`, `cell_measures`, -# `cell_method`, `coordinate`, `coordinate_reference`, -# `dimension_coordinate`, `domain_ancillary`, -# `domain_axis`, `field_ancillary` -# -# :Parameters: -# -# identity: optional -# Select the cell measure construct by: -# -# * `None`. This is the default, which selects the cell -# measure construct when there is only one of them. -# -# * The identity or key of a cell measure construct. -# -# * The identity or key of a domain axis construct that is -# spanned by a unique 1-d cell measure construct's data. -# -# * The position, in the field construct's data, of a -# domain axis construct that is spanned by a unique 1-d -# cell measure construct's data. -# -# A construct identity is specified by a string -# (e.g. ``'long_name=Cell Area', ``'ncvar%areacello'``, -# etc.); a `Query` object (e.g. ``cf.eq('measure:area')``); -# or a compiled regular expression -# (e.g. ``re.compile('^atmosphere')``) that selects the -# relevant constructs whose identities match via -# `re.search`. -# -# Each construct has a number of identities, and is selected -# if any of them match any of those provided. A construct's -# identities are those returned by its `!identities` -# method. In the following example, the construct ``x`` has -# six identities: -# -# >>> x.identities() -# ['time', -# 'long_name=Time', -# 'foo=bar', -# 'standard_name=time', -# 'ncvar%t', -# 'T'] -# -# A construct key may optionally have the ``'key%'`` -# prefix. For example ``'cellmeasure2'`` and -# ``'key%cellmeasure2'`` are both acceptable keys. -# -# A position of a domain axis construct in the field -# construct's data is specified by an integer index. -# -# Note that in the output of a `print` call or `!dump` -# method, a construct is always described by one of its -# identities, and so this description may always be used as -# an *identity* argument. -# -# *Parameter example:* -# ``identity='measure:area'`` -# -# *Parameter example:* -# ``identity='cell_area'`` -# -# *Parameter example:* -# ``identity='long_name=Cell Area'`` -# -# *Parameter example:* -# ``identity='cellmeasure1'`` -# -# *Parameter example:* -# ``identity='domainaxis2'`` -# -# *Parameter example:* -# ``identity=0`` -# -# key: `bool`, optional -# If True then return the selected construct key. By -# default the construct itself is returned. -# -# default: optional -# Return the value of the *default* parameter if a construct -# can not be found. If set to an `Exception` instance then -# it will be raised instead. -# -# :Returns: -# -# `CellMeasure`or `str` -# The selected cell measure construct, or its key. -# -# **Examples:** -# -# TODO -# -# """ -# return self._filter_interface( -# ("cell_measure",), -# "cell_meausure", -# identity, -# construct=True, -# key=key, -# default=default, -# item=item, -# **filter_kwargs, -# ) - def cell_method( self, *identity, @@ -16249,88 +14832,54 @@ def cell_method( item=False, **filter_kwargs, ): - """Select a cell method construct by its identity. + """Select a cell method construct. + + {{unique construct}} .. versionadded:: 3.0.0 - .. seealso:: `construct`, `auxiliary_coordinate`, `cell_measure`, - `cell_methods`, `coordinate`, `coordinate_reference`, - `dimension_coordinate`, `domain_ancillary`, - `domain_axis`, `field_ancillary` + .. seealso:: `construct`, `cell_methods` :Parameters: identity: optional - Select the cell method construct by: - - * `None`. This is the default, which selects the cell - method construct when there is only one of them. + Select cell method constructs that have an identity, + defined by their `!identities` methods, that matches + any of the given values. - * The identity or key of a cell method construct. + Additionally, the values are matched against construct + identifiers, with or without the ``'key%'`` prefix. - * The identity or key of a domain axis construct that a - unique cell method construct applies to. + Additionally, if a value would select a unique domain + axis construct with ``f.domain_axis(value)`` then any + cell method constructs that span exactly that axis are + selected. - * The position, in the field construct's data, of a - domain axis construct that a unique cell method - construct applies to. + If no values are provided then all cell method + constructs are selected. - A construct identity is specified by a string - (e.g. ``'method:mean'``, etc.); a `Query` object - (e.g. ``cf.eq('method:maximum')``); or a compiled regular - expression (e.g. ``re.compile('^m')``) that selects the - relevant constructs whose identities match via - `re.search`. + {{value match}} - Each construct has a number of identities, and is selected - if any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``c`` has - two identities: + {{displayed identity}} - >>> c.identities() - ['method:minimum', - 'over:sea'] + {{key: `bool`, optional}} - A construct key may optionally have the ``'key%'`` - prefix. For example ``'cellmethod2'`` and - ``'key%cellmethod2'`` are both acceptable keys. + {{item: `bool`, optional}} - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. - - *Parameter example:* - ``identity='method:variance'`` - - *Parameter example:* - ``identity='cellmethod1'`` - - *Parameter example:* - ``identity='domainaxis2'`` - - *Parameter example:* - ``identity=0`` + default: optional + Return the value of the *default* parameter if there + is no unique construct. - key: `bool`, optional - If True then return the selected construct key. By - default the construct itself is returned. + {{default Exception}} - default: optional - Return the value of the *default* parameter if a construct - can not be found. If set to an `Exception` instance then - it will be raised instead. + {{filter_kwargs: optional}} :Returns: - `CellMethod`or `str` - The selected cell method construct, or its key. + {{Returns construct}} **Examples:** - TODO - """ c = self._filter_interface( ("cell_method",), @@ -16373,259 +14922,6 @@ def cell_method( "return a unique construct", ) -# def coordinate( -# self, -# *identity, -# default=ValueError(), -# key=False, -# item=False, -# **filter_kwargs, -# ): -# """Return a dimension or auxiliary coordinate construct, or its -# key. -# -# .. versionadded:: 3.0.0 -# -# .. seealso:: `construct`, `auxiliary_coordinate`, `coordinates`, -# `dimension_coordinate` -# -# :Parameters: -# -# identity: optional -# Select the dimension coordinate construct by one of: -# -# * `None`. This is the default, which selects the -# coordinate construct when there is only one of them. -# -# * The identity or key of a dimension coordinate -# construct. -# -# * The identity or key of a domain axis construct that is -# spanned by a unique 1-d coordinate construct's data. -# -# * The position, in the field construct's data, of a -# domain axis construct that is spanned by a unique 1-d -# coordinate construct's data. -# -# A construct identity is specified by a string -# (e.g. ``'latitude'``, ``'long_name=time'``, -# ``'ncvar%lat'``, etc.); a `Query` object -# (e.g. ``cf.eq('longitude')``); or a compiled regular -# expression (e.g. ``re.compile('^atmosphere')``) that -# selects the relevant constructs whose identities match via -# `re.search`. -# -# A construct has a number of identities, and is selected if -# any of them match any of those provided. A construct's -# identities are those returned by its `!identities` -# method. In the following example, the construct ``x`` has -# six identities: -# -# >>> x.identities() -# ['time', -# 'long_name=Time', -# 'foo=bar', -# 'standard_name=time', -# 'ncvar%t', -# 'T'] -# -# A construct key may optionally have the ``'key%'`` -# prefix. For example ``'auxiliarycoordinate2'`` and -# ``'key%dimensioncoordinate2'`` are both acceptable keys. -# -# A position of a domain axis construct in the field -# construct's data is specified by an integer index. -# -# Note that in the output of a `print` call or `!dump` -# method, a construct is always described by one of its -# identities, and so this description may always be used as -# an *identity* argument. -# -# *Parameter example:* -# ``identity='Y'`` -# -# *Parameter example:* -# ``identity='latitude'`` -# -# *Parameter example:* -# ``identity='long_name=Latitude'`` -# -# *Parameter example:* -# ``identity='dimensioncoordinate1'`` -# -# *Parameter example:* -# ``identity='domainaxis2'`` -# -# *Parameter example:* -# ``identity='ncdim%y'`` -# -# key: `bool`, optional -# If True then return the selected construct key. By -# default the construct itself is returned. -# -# default: optional -# Return the value of the *default* parameter if a construct -# can not be found. If set to an `Exception` instance then -# it will be raised instead. -# -# :Returns: -# -# `DimensionCoordinate` or `AuxiliaryCoordinate` or `str` -# The selected dimension or auxiliary coordinate construct, -# or its key. -# -# **Examples:** -# -# TODO -# -# """ -# return self._filter_interface( -# ("dimension_coordinate", "auxiliary_coordinate"), -# "coordinate", -# identity, -# construct=True, -# key=key, -# item=item, -# default=default, -# **filter_kwargs, -# ) -# if c is not None: -# return c -# -# if identity: -# da_key = self.domain_axis(*identity, key=True, default=None) -# if da_key is not None: -# return self._filter_interface( -# ("dimension_coordinate", "auxiliary_coordinate"), -# "coordinate", -# (), -# construct=True, -# key=key, -# item=item, -# default=default, -# filter_by_axis=(da_key,), -# axis_mode="exact", -# ) -# -# if default is None: -# return default -# -# return self._default( -# default, -# f"{self.__class__.__name__}.coordinate() can only " -# "return a unique construct", -# ) -# -# def coordinate_reference( -# self, -# *identity, -# default=ValueError(), -# key=False, -# item=False, -# **filter_kwargs, -# ): -# """Return a coordinate reference construct, or its key. -# -# .. versionadded:: 3.0.0 -# -# .. seealso:: `construct`, `auxiliary_coordinate`, `cell_measure`, -# `cell_method`, `coordinate`, `coordinate_references`, -# `dimension_coordinate`, `domain_ancillary`, -# `domain_axis`, `field_ancillary` -# -# :Parameters: -# -# identity: optional -# Select the coordinate reference construct by one of: -# -# * `None`. This is the default, which selects the -# coordinate reference construct when there is only one -# of them. -# -# * The identity or key of a coordinate reference -# construct. -# -# A construct identity is specified by a string -# (e.g. ``'grid_mapping_name:latitude_longitude'``, -# ``'latitude_longitude'``, ``'ncvar%lat_lon'``, etc.); a -# `Query` object (e.g. ``cf.eq('latitude_longitude')``); or -# a compiled regular expression -# (e.g. ``re.compile('^atmosphere')``) that selects the -# relevant constructs whose identities match via -# `re.search`. -# -# Each construct has a number of identities, and is selected -# if any of them match any of those provided. A construct's -# identities are those returned by its `!identities` -# method. In the following example, the construct ``x`` has -# two identities: -# -# >>> x.identities() -# ['grid_mapping_name:latitude_longitude', 'ncvar%lat_lon'] -# -# A identity's prefix of ``'grid_mapping_name:'`` or -# ``'standard_name:'`` may be omitted -# (e.g. ``'standard_name:atmosphere_hybrid_height_coordinate'`` -# and ``'atmosphere_hybrid_height_coordinate'`` are both -# acceptable identities). -# -# A construct key may optionally have the ``'key%'`` -# prefix. For example ``'coordinatereference2'`` and -# ``'key%coordinatereference2'`` are both acceptable keys. -# -# Note that in the output of a `print` call or `!dump` -# method, a construct is always described by one of its -# identities, and so this description may always be used as -# an *identity* argument. -# -# *Parameter example:* -# ``identity='standard_name:atmosphere_hybrid_height_coordinate'`` -# -# *Parameter example:* -# ``identity='grid_mapping_name:rotated_latitude_longitude'`` -# -# *Parameter example:* -# ``identity='transverse_mercator'`` -# -# *Parameter example:* -# ``identity='coordinatereference1'`` -# -# *Parameter example:* -# ``identity='key%coordinatereference1'`` -# -# *Parameter example:* -# ``identity='ncvar%lat_lon'`` -# -# key: `bool`, optional -# If True then return the selected construct key. By -# default the construct itself is returned. -# -# default: optional -# Return the value of the *default* parameter if a construct -# can not be found. If set to an `Exception` instance then -# it will be raised instead. -# -# :Returns: -# -# `CoordinateReference` or `str` -# The selected coordinate reference construct, or its key. -# -# **Examples:** -# -# TODO -# -# """ -# return self._filter_interface( -# ("coordinate_reference",), -# "coordinate_reference", -# identity, -# construct=True, -# key=key, -# default=default, -# item=item, -# **filter_kwargs, -# ) - def field_ancillary( self, *identity, @@ -16634,107 +14930,49 @@ def field_ancillary( item=False, **filter_kwargs, ): - """Return a field ancillary construct, or its key. + """Select a field ancillary construct. + + {{unique construct}} .. versionadded:: 3.0.0 - .. seealso:: `construct`, `auxiliary_coordinate`, `cell_measure`, - `cell_method`, `coordinate`, `coordinate_reference`, - `dimension_coordinate`, `domain_ancillary`, - `domain_axis`, `field_ancillaries` + .. seealso:: `construct`, `field_ancillaries` :Parameters: identity: optional - Select the field ancillary construct by one of: - - * `None`. This is the default, which selects the field - ancillary construct when there is only one of them. - - * The identity or key of an field ancillary construct. - - * The identity or key of a domain axis construct that is - spanned by a unique 1-d field ancillary construct's - data. - - * The position, in the field construct's data, of a - domain axis construct that is spanned by a unique 1-d - field ancillary construct's data. - - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, ``'ncvar%lat'``, - etc.); a `Query` object (e.g. ``cf.eq('longitude')``); or - a compiled regular expression - (e.g. ``re.compile('^atmosphere')``) that selects the - relevant constructs whose identities match via - `re.search`. - - A construct has a number of identities, and is selected if - any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: - - >>> x.identities() - ['time', - 'long_name=Time', - 'foo=bar', - 'standard_name=time', - 'ncvar%t', - 'T'] - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'fieldancillary2'`` and - ``'key%fieldancillary2'`` are both acceptable keys. - - A position of a domain axis construct in the field construct's - data is specified by an integer index. - - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. + Select field ancillary constructs that have an + identity, defined by their `!identities` methods, that + matches any of the given values. - *Parameter example:* - ``identity='Y'`` + Additionally, the values are matched against construct + identifiers, with or without the ``'key%'`` prefix. - *Parameter example:* - ``identity='latitude'`` + If no values are provided then all field ancillary + constructs are selected. - *Parameter example:* - ``identity='long_name=Latitude'`` + {{value match}} - *Parameter example:* - ``identity='fieldancillary1'`` + {{displayed identity}} - *Parameter example:* - ``identity='domainaxis2'`` + {{key: `bool`, optional}} - *Parameter example:* - ``identity='ncdim%y'`` + {{item: `bool`, optional}} - *Parameter example:* - ``identity=0`` + default: optional + Return the value of the *default* parameter if there + is no unique construct. - key: `bool`, optional - If True then return the selected construct key. By - default the construct itself is returned. + {{default Exception}} - default: optional - Return the value of the *default* parameter if a construct - can not be found. If set to an `Exception` instance then - it will be raised instead. + {{filter_kwargs: optional}} :Returns: - `FieldAncillary` or `str` - The selected field ancillary coordinate construct, or its - key. + {{Returns construct}} **Examples:** - TODO - """ return self._filter_interface( ("field_ancillary",), @@ -16747,319 +14985,6 @@ def field_ancillary( **filter_kwargs, ) -# def dimension_coordinate( -# self, -# *identity, -# key=False, -# default=ValueError(), -# item=False, -# **filter_kwargs, -# ): -# """Select a dimension coordinate construct. -# -# .. versionadded:: 3.0.0 -# -# .. seealso:: `construct`, `dimension_coordinates` -# -# :Parameters: -# -# identity: optional -# Select dimension coordinate constructs that have an -# identity, defined by their `!identities` methods, that -# matches any of the given values. In addition to -# construct identities, the values are matched against: -# -# Additionally, the values are matched against construct -# identifiers, with or without the ``'key%'`` prefix. -# -# Additionly, TODOx the values are matched against the identity or -# construct identifier, with or without the ``'key%'`` -# prefix, of a domain axis construct that is spanned by -# a dimension coordinate construct's data. -# -# *Parameter example:* -# ``'domainaxis2'`` -# -# *Parameter example:* -# ``'ncdim%latitude'`` -# -# * The integer position, in the field construct's data, -# of the domain axis construct that is spanned by a -# dimension coordinate construct's data. -# -# *Parameter example:* -# ``0'`` -# -# *Parameter example:* -# ``cf.gt(2)`` -# -# If no values are provided then all constructs are -# selected. -# -# {{value match}} -# -# {{displayed identity}} -# -# *Parameter example:* -# ``'Y'`` -# -# *Parameter example:* -# ``latitude'`` -# -# *Parameter example:* -# ``re.compile('^lat')`` -# -# *Parameter example:* -# ``'long_name=Latitude'`` -# -# *Parameter example:* -# ``'Z', 'altutude'`` -# -# key: `bool`, optional -# If True then return the selected construct -# identifier. By default the construct itself is -# returned. -# -# default: optional -# Return the value of the *default* parameter if a construct -# can not be found. If set to an `Exception` instance then -# it will be raised instead. -# -# item: `bool`, optional -# If True then return the selected construct and its -# construct identifier in a 2-tuple. By default the only -# construct is returned. -# -# .. versionadded:: 3.9.0 -# -# :Returns: -# -# `DimensionCoordinate` or `str` or `tuple` -# The selected dimension coordinate construct, or its -# construct identifier, or both. -# -# **Examples:** -# -# TODO -# -# """ -# return self._filter_interface( -# ("dimension_coordinate",), -# "dimension_coordinate", -# identity, -# construct=True, -# key=key, -# item=item, -# default=default, -## _identity_config={"identities_kwargs": {"ctype": False}}, -# **filter_kwargs, -# ) -# if c is not None: -# return c -# -# c = self._select_construct( -# ("dimension_coordinate",), -# "dimension_coordinate", -# identity, -# key=key, -# item=item, -# default=None, -# _last_filter=last_filter, -# _identity_config={"identities_kwargs": {"ctype": False}}, -# **filter_kwargs, -# ) -# if c is not None: -# return c -# -# if not filter_kwargs and len(identity) == 1 and identity in self.domain_axes(todict=True): -# raise DeprecationError() -# da_key = self.domain_axis(*identity, key=True, default=None) -# if da_key is not None: -# return self._filter_interface( -# ("dimension_coordinate",), -# "dimension_coordinate", -# (), -# construct=True, -# key=key, -# item=item, -# default=default, -# filter_by_axis=(da_key,), -# axis_mode="exact", -# ) -# -# if default is None: -# return None -# -# return self._default( -# default, -# f"{self.__class__.__name__}.dimension_coordinate() can only " -# "return a unique construct", -# ) -# -# -# def domain_axis( -# self, -# *identity, -# key=False, -# default=ValueError(), -# item=False, -# **filter_kwargs, -# ): -# """Return a domain axis construct, or its key. -# -# .. versionadded:: 3.0.0 -# -# .. seealso:: `construct`, `auxiliary_coordinate`, `cell_measure`, -# `cell_method`, `coordinate`, `coordinate_reference`, -# `dimension_coordinate`, `domain_ancillary`, -# `domain_axes`, `field_ancillary` -# -# :Parameters: -# -# identity: -# Select the domain axis construct by one of: -# -# * An identity or key of a 1-d coordinate construct that -# whose data spans the domain axis construct. -# -# * A domain axis construct identity or key. -# -# * The position of the domain axis construct in the field -# construct's data. -# -# A construct identity is specified by a string -# (e.g. ``'latitude'``, ``'long_name=time'``, -# ``'ncvar%lat'``, etc.); or a compiled regular expression -# (e.g. ``re.compile('^atmosphere')``) that selects the -# relevant constructs whose identities match via -# `re.search`. -# -# Each construct has a number of identities, and is selected -# if any of them match any of those provided. A construct's -# identities are those returned by its `!identities` -# method. In the following example, the construct ``x`` has -# six identities: -# -# >>> x.identities() -# ['time', 'long_name=Time', 'foo=bar', 'standard_name=time', 'ncvar%t', 'T'] -# -# A construct key may optionally have the ``'key%'`` -# prefix. For example ``'dimensioncoordinate2'`` and -# ``'key%dimensioncoordinate2'`` are both acceptable keys. -# -# A position of a domain axis construct in the field -# construct's data is specified by an integer index. -# -# Note that in the output of a `print` call or `!dump` -# method, a construct is always described by one of its -# identities, and so this description may always be used as -# an *identity* argument. -# -# *Parameter example:* -# ``identity='long_name=Latitude'`` -# -# *Parameter example:* -# ``identity='dimensioncoordinate1'`` -# -# *Parameter example:* -# ``identity='domainaxis2'`` -# -# *Parameter example:* -# ``identity='key%domainaxis2'`` -# -# *Parameter example:* -# ``identity='ncdim%y'`` -# -# *Parameter example:* -# ``identity=2`` -# -# key: `bool`, optional -# If True then return the selected construct key. By -# default the construct itself is returned. -# -# default: optional -# Return the value of the *default* parameter if a construct -# can not be found. If set to an `Exception` instance then -# it will be raised instead. -# -# :Returns: -# -# `DomainAxis` or `str` -# The selected domain axis construct, or its key. -# -# **Examples:** -# -# TODO -# -# """ -# # Try for integer index -# if identity: -# identity2 = [] -# -# data_axes = self.get_data_axes(default=None) -# for i in identity: -# try: -# identity2.append(data_axes[i]) -# except TypeError: -# identity2.append(i) -# except IndexError: -# pass -# -# if not identity2: -# if default is None: -# return default -# -# return self._default( -# default, -# "Indices do not exist for field construct data dimenions", -# ) -# -# identity = identity2 -# -## c = self._select_construct( -## ("domain_axis",), -## "domain_axis", -## identity, -## key=key, -## default=None, -## item=item, -## **filter_kwargs, -## ) -# c = self._filter_interface( -# ("domain_axis",), -# "domain_axis", -# identity, -# construct=True, -# key=key, -# item=item, -# default=None, -# **filter_kwargs, -# ) -# if c is not None: -# return c -# -# da_key = self.domain_axis_key(*identity, default=None) -# -# if da_key is not None: -# if key: -# return da_key -# -# construct = self.constructs[da_key] -# -# if item: -# return da_key, construct -# -# return construct -# -# if default is None: -# return default -# -# return self._default( -# default, -# f"{self.__class__.__name__}.domain_axis() can't return zero " -# "constructs", -# ) - def domain_axis_position(self, *identity): """Return the position in the data of a domain axis construct. @@ -17153,71 +15078,6 @@ def domain_axis_position(self, *identity): key = self.domain_axis(*identity, key=True) return self.get_data_axes().index(key) -# def auxiliary_coordinates(self, *identities, **filter_kwargs): -# """Return auxiliary coordinate constructs. -# -# .. versionadded:: 3.0.0 -# -# .. seealso:: `constructs` -# -# :Parameters: -# -# identities: optional -# Select auxiliary coordinate constructs that have an -# identity, defined by their `!identities` methods, that -# matches any of the given values. -# -# If no identities are provided then all auxiliary -# coordinate constructs are selected. -# -# {{value match}} -# -# {{displayed identity}} -# -# {{filter_kwargs: optional}} -# -# :Returns: -# -# `Constructs` -# The selected constructs, unless modified by any -# *filter_kwargs* parameters. -# -# **Examples:** -# -# >>> f.auxiliary_coordinates() -# Constructs: -# {} -# -# >>> f.auxiliary_coordinates() -# Constructs: -# {'auxiliarycoordinate0': <{{repr}}AuxiliaryCoordinate: latitude(10, 9) degrees_N>, -# 'auxiliarycoordinate1': <{{repr}}AuxiliaryCoordinate: longitude(9, 10) degrees_E>, -# 'auxiliarycoordinate2': <{{repr}}AuxiliaryCoordinate: long_name:Grid latitude name(10) >} -# -# """ -# filter_by_identity = filter_kwargs.pop("filter_by_identity", None) -# if identities: -# if filter_by_identity is not None: -# raise TypeError( -# f"Can't set {self.__class__.__name__}." -# "auxiliary_coordinates() " -# "keyword argument 'filter_by_identity' when " -# "positional *identities arguments are also set" -# ) -# elif filter_by_identity is not None: -# identities = filter_by_identity -# -# ctypes = [i for i in "XTYZ" if i in identities] -# if len(ctypes) == len(identities): -# filter_kwargs["filter_by_coordinate_type"] = ctypes -# return super().auxiliary_coordinates( -# _last_filter=("filter_by_coordinate_type",), -# **filter_kwargs -# ) -# -# return super().auxiliary_coordinates( *identities, -# **filter_kwargs) - def axes_names(self, *identities, **kwargs): """Return canonical identities for each domain axis construct. @@ -18278,205 +16138,6 @@ def percentile( return out - # def period(self, *value, **kwargs): - # '''Return the period of the data, or of the data of a metadata - # construct. - # - # Note that a non-cyclic axis may have a period data. - # - # .. versionadded:: 1.0 - # - # .. seealso:: `axis`, `cyclic`, `iscyclic`, `isperiodic` - # - # :Parameters: - # - # identity: optional - # Select the construct for which to return the period of the - # data. By default the field construct itself is - # selected. May be: - # - # * `None` to select the field construct. This is the - # default. - # - # * The identity or key of a metadata construct. - # - # A construct identity is specified by a string - # (e.g. ``'latitude'``, ``'long_name=time'``, - # ``'ncvar%lat'``, etc.); or a compiled regular expression - # (e.g. ``re.compile('^atmosphere')``) that selects the - # relevant constructs whose identities match via - # `re.search`. - # - # Each construct has a number of identities, and is selected - # if any of them match any of those provided. A construct's - # identities are those returned by its `!identities` - # method. In the following example, the construct ``x`` has - # six identities: - # - # >>> x.identities() - # ['time', - # 'long_name=Time', - # 'foo=bar', - # 'standard_name=time', - # 'ncvar%t', - # 'T'] - # - # A construct key may optionally have the ``'key%'`` - # prefix. For example ``'dimensioncoordinate2'`` and - # ``'key%dimensioncoordinate2'`` are both acceptable keys. - # - # Note that in the output of a `print` call or `!dump` - # method, a construct is always described by one of its - # identities, and so this description may always be used as - # an *identity* argument. - # - # axes: deprecated at version 3.0.0 - # - # axis: deprecated at version 3.4.1 - # - # kwargs: deprecated at version 3.0.0 - # - # :Returns: - # - # `Data` or `None` - # The period of the cyclic axis's dimension coordinates, or - # `None` if no period has been set. - # - # **Examples:** - # - # >>> f.cyclic() - # {} - # >>> print(f.period('X')) - # None - # >>> f.dimension_coordinate('X').Units - # - # >>> f.cyclic('X', period=360) - # {} - # >>> print(f.period('X')) - # - # >>> f.cyclic('X', False) - # {'dim3'} - # >>> print(f.period('X')) - # - # >>> f.dimension_coordinate('X').period(None) - # - # >>> print(f.period('X')) - # None - # - # ''' - # if kwargs: - # _DEPRECATION_ERROR_KWARGS( - # self, 'period', kwargs) # pragma: no cover - # - # return super().period(*value) - -# def replace_construct(self, *identity, construct=None, copy=True, -# **filter_kwargs): -# """Replace a metadata construct. -# -# Replacement assigns the same construct key and, if applicable, the -# domain axes of the original construct to the new, replacing -# construct. -# -# .. versionadded:: 3.0.0 -# -# .. seealso:: `set_construct` -# -# :Parameters: -# -# identity: -# Select the metadata construct to be replaced by one of: -# -# * The identity or key of a metadata construct. -# -# * The identity or key of a domain axis construct that is -# spanned by a metadata construct's data. -# -# A construct identity is specified by a string -# (e.g. ``'latitude'``, ``'long_name=time'``, ``'ncvar%lat'``, -# etc.); a `Query` object (e.g. ``cf.eq('longitude')``); or -# a compiled regular expression -# (e.g. ``re.compile('^atmosphere')``) that selects the -# relevant constructs whose identities match via -# `re.search`. -# -# A construct has a number of identities, and is selected if -# any of them match any of those provided. A construct's -# identities are those returned by its `!identities` -# method. In the following example, the construct ``x`` has -# six identities: -# -# >>> x.identities() -# ['time', -# 'long_name=Time', -# 'foo=bar', -# 'standard_name=time', -# 'ncvar%t', -# 'T'] -# -# A construct key may optionally have the ``'key%'`` -# prefix. For example ``'dimensioncoordinate2'`` and -# ``'key%dimensioncoordinate2'`` are both acceptable keys. -# -# Note that in the output of a `print` call or `!dump` -# method, a construct is always described by one of its -# identities, and so this description may always be used as -# an *identity* argument. -# -# *Parameter example:* -# ``identity='Y'`` -# -# *Parameter example:* -# ``identity='latitude'`` -# -# *Parameter example:* -# ``identity='long_name=Latitude'`` -# -# *Parameter example:* -# ``identity='dimensioncoordinate1'`` -# -# *Parameter example:* -# ``identity='domainaxis2'`` -# -# *Parameter example:* -# ``identity='ncdim%y'`` -# -# construct: -# The new construct to replace that selected by the -# *identity* parameter. -# -# copy: `bool`, optional -# If True then set a copy of the new construct. By default -# the construct is copied. -# -# :Returns: -# -# The construct that was replaced. -# -# **Examples:** -# -# >>> f.replace_construct('X', new_X_construct) -# -# """ -# key, c = self.construct(*identity, item=True, **filter_kwargs) -# -# if not isinstance(construct, c.__class__): -# raise ValueError( -# f"Can't replace a {c.__class__.__name__} construct " -# f"with a {construct.__class__.__name__} construct" -# ) -# -# axes = self.get_data_axes(key, None) -# if axes is not None: -# shape0 = getattr(c, "shape", None) -# shape1 = getattr(construct, "shape", None) -# if shape0 != shape1: -# raise ValueError("TODO bb") -# -# self.set_construct(construct, key=key, axes=axes, copy=copy) -# -# return c - @_inplace_enabled(default=False) def flatten(self, axes=None, return_axis=False, inplace=False): """Flatten axes of the field. @@ -18797,6 +16458,8 @@ def roll(self, axis, shift, inplace=False, i=False, **kwargs): >>> f.roll('X', -3) """ + # TODODASK - allow multiple roll axes + axis = self.domain_axis( axis, key=True, @@ -18804,36 +16467,22 @@ def roll(self, axis, shift, inplace=False, i=False, **kwargs): f"Can't roll: Bad axis specification: {axis!r}" ), ) - f = _inplace_enabled_define_and_cleanup(self) - domain_axes = self.domain_axes(todict=True) - if domain_axes[axis].get_size() <= 1: - if inplace: - f = None - - return f - - dim = self.dimension_coordinate(filter_by_axis=(axis,), default=None) - if dim is not None and dim.period() is None: - raise ValueError( - f"Can't roll: {dim.identity()!r} axis has non-periodic " - "dimension coordinates" - ) + f = _inplace_enabled_define_and_cleanup(self) - try: - iaxis = self.get_data_axes().index(axis) - except ValueError: - if inplace: - f = None + axis = f._parse_axes(axis) - return f + # Roll the metadata constructs in-place + shift = f._roll_constructs(axis, shift) - super(Field, f).roll(iaxis, shift, inplace=True) + iaxes = self._axis_positions(axis, parse=False) + if iaxes: + # TODODASK - remove these two lines when multiaxis rolls + # are allowed at v4.0.0 + iaxis = iaxes[0] + shift = shift[0] - for key, construct in f.constructs.filter_by_data(todict=True).items(): - axes = f.get_data_axes(key, default=()) - if axis in axes: - construct.roll(axes.index(axis), shift, inplace=True) + super(Field, f).roll(iaxis, shift, inplace=True) return f @@ -19330,166 +16979,6 @@ def subspace(self): """ return SubspaceField(self) -# def coordinates(self, *identities, **filter_kwargs): -# """Return dimension and auxiliary coordinate constructs. -# -# . versionadded:: 3.0.0 -# -# . seealso:: `auxiliary_coordinates`, `constructs`, -# `dimension_coordinates` -# -# :Parameters: -# -# identities: optional -# Select coordinate constructs that have an identity, -# defined by their `!identities` methods, that matches -# any of the given values. -# -# If no identities are provided then all coordinate -# constructs are selected. -# -# {{value match}} -# -# {{displayed identity}} -# -# {{filter_kwargs: optional}} -# -# :Returns: -# -# `Constructs` -# The selected constructs, unless modified by any -# *filter_kwargs* parameters. -# -# *Examples:** -# -# >> f.coordinates() -# onstructs: -# } -# -# >> f.coordinates() -# onstructs: -# 'auxiliarycoordinate0': <{{repr}}AuxiliaryCoordinate: latitude(10, 9) degrees_N>, -# 'auxiliarycoordinate1': <{{repr}}AuxiliaryCoordinate: longitude(9, 10) degrees_E>, -# 'auxiliarycoordinate2': <{{repr}}AuxiliaryCoordinate: long_name=Grid latitude name(10) >, -# 'dimensioncoordinate0': <{{repr}}DimensionCoordinate: atmosphere_hybrid_height_coordinate(1) >, -# 'dimensioncoordinate1': <{{repr}}DimensionCoordinate: grid_latitude(10) degrees>, -# 'dimensioncoordinate2': <{{repr}}DimensionCoordinate: grid_longitude(9) degrees>, -# 'dimensioncoordinate3': <{{repr}}DimensionCoordinate: time(1) days since 2018-12-01 >} -# -# """ -# filter_by_identity = filter_kwargs.pop("filter_by_identity", None) -# if identities: -# if filter_by_identity is not None: -# raise TypeError( -# f"Can't set {self.__class__.__name__}.coordinates() " -# "keyword argument 'filter_by_identity' when " -# "positional *identities arguments are also set" -# ) -# elif filter_by_identity is not None: -# identities = filter_by_identity -# -# ctypes = [i for i in "XTYZ" if i in identities] -# if ctypes and len(ctypes) == len(identities): -# filter_kwargs["filter_by_coordinate_type"] = ctypes -# return super().coordinates( -# _last_filter=("filter_by_coordinate_type",), -# **filter_kwargs -# ) -# -# return super().coordinates(*identities, **filter_kwargs) -# -# def coordinate_reference_domain_axes(self, identity): -# """Return the domain axes that apply to a coordinate reference -# construct. -# -# :Parameters: -# -# identity: -# Select the coordinate reference construct by one of: -# -# * The identity or key of a coordinate reference construct. -# -# A construct identity is specified by a string -# (e.g. ``'grid_mapping_name:latitude_longitude'``, -# ``'latitude_longitude'``, ``'ncvar%lat_lon'``, etc.); a -# `Query` object (e.g. ``cf.eq('latitude_longitude')``); or -# a compiled regular expression -# (e.g. ``re.compile('^atmosphere')``) that selects the -# relevant constructs whose identities match via -# `re.search`. -# -# Each construct has a number of identities, and is selected -# if any of them match any of those provided. A construct's -# identities are those returned by its `!identities` -# method. In the following example, the construct ``x`` has -# two identities: -# -# >>> x.identities() -# ['grid_mapping_name:latitude_longitude', 'ncvar%lat_lon'] -# -# A identity's prefix of ``'grid_mapping_name:'`` or -# ``'standard_name:'`` may be omitted -# (e.g. ``'standard_name:atmosphere_hybrid_height_coordinate'`` -# and ``'atmosphere_hybrid_height_coordinate'`` are both -# acceptable identities). -# -# A construct key may optionally have the ``'key%'`` -# prefix. For example ``'coordinatereference2'`` and -# ``'key%coordinatereference2'`` are both acceptable keys. -# -# Note that in the output of a `print` call or `!dump` -# method, a construct is always described by one of its -# identities, and so this description may always be used as -# an *identity* argument. -# -# *Parameter example:* -# ``identity='standard_name:atmosphere_hybrid_height_coordinate'`` -# -# *Parameter example:* -# ``identity='grid_mapping_name:rotated_latitude_longitude'`` -# -# *Parameter example:* -# ``identity='transverse_mercator'`` -# -# *Parameter example:* -# ``identity='coordinatereference1'`` -# -# *Parameter example:* -# ``identity='key%coordinatereference1'`` -# -# *Parameter example:* -# ``identity='ncvar%lat_lon'`` -# -# :Returns: -# -# `set` -# The identifiers of the domain axis constructs that span -# the data of all coordinate and domain ancillary constructs -# used by the selected coordinate reference construct. -# -# **Examples:** -# -# >>> f.coordinate_reference_domain_axes('coordinatereference0') -# {'domainaxis0', 'domainaxis1', 'domainaxis2'} -# -# >>> f.coordinate_reference_domain_axes( -# ... 'atmosphere_hybrid_height_coordinate') -# {'domainaxis0', 'domainaxis1', 'domainaxis2'} -# -# """ -# cr = self.coordinate_reference(identity) -# -# data_axes = self.constructs.data_axes() -# -# axes = [] -# for i in cr.coordinates() | set( -# cr.coordinate_conversion.domain_ancillaries().values() -# ): -# i = self.construct_key(i, None) -# axes.extend(data_axes.get(i, ())) -# -# return set(axes) - def section(self, axes=None, stop=None, **kwargs): """Return a FieldList of m dimensional sections of a Field of n dimensions, where M <= N. @@ -20263,7 +17752,7 @@ def regrids( x = f.dimension_coordinate("X", default=None) if x is not None and x.Units.equivalent(Units("degrees")): f.cyclic("X", iscyclic=dst_cyclic, period=Data(360, "degrees")) - + # Release old memory from ESMF (this ought to happen garbage # collection, but it doesn't seem to work there!) regridSrc2Dst.destroy() @@ -20277,7 +17766,7 @@ def regrids( # if f.data.fits_in_one_chunk_in_memory(f.data.dtype.itemsize): # f.varray -# f.autocyclic() + # f.autocyclic() return f @@ -21049,91 +18538,6 @@ def derivative( # ---------------------------------------------------------------- # Aliases # ---------------------------------------------------------------- -# def aux( -# self, -# *identity, -# key=False, -# default=ValueError(), -# item=False, -# **filter_kwargs, -# ): -# """Alias for `auxiliary_coordinate`.""" -# return self.auxiliary_coordinate( -# *identity, key=key, default=default, item=item, **filter_kwargs -# ) -# -# def auxs(self, *identities, **filter_kwargs): -# """Alias for `coordinates`.""" -# return self.auxiliary_coordinates(*identities, **filter_kwargs) -# -# def axes(self, *identities, **filter_kwargs): -# """Alias for `domain_axes`.""" -# return self.domain_axes(*identities, **filter_kwargs) -# -# def axis( -# self, -# *identity, -# key=False, -# default=ValueError(), -# item=False, -# **filter_kwargs, -# ): -# """Alias for `domain_axis`.""" -# return self.domain_axis( -# *identity, key=key, default=default, item=item, **filter_kwargs -# ) -# -# def coord( -# self, -# *identity, -# key=False, -# default=ValueError(), -# item=False, -# **filter_kwargs, -# ): -# """Alias for `coordinate`.""" -# return self.coordinate( -# *identity, key=key, default=default, item=item, **filter_kwargs -# ) -# -# def coords(self, *identities, **filter_kwargs): -# """Alias for `coordinates`.""" -# return self.coordinates(*identities, **filter_kwargs) -# -# def dim( -# self, -# *identity, -# key=False, -# default=ValueError(), -# item=False, -# **filter_kwargs, -# ): -# """Alias for `dimension_coordinate`.""" -# return self.dimension_coordinate( -# *identity, key=key, default=default, item=item, **filter_kwargs -# ) -# -# def dims(self, *identities, **filter_kwargs): -# """Alias for `dimension_coordinates`.""" -# return self.dimension_coordinates(*identities, **filter_kwargs) -# -# def domain_anc( -# self, -# *identity, -# key=False, -# default=ValueError(), -# item=False, -# **filter_kwargs, -# ): -# """Alias for `domain_ancillary`.""" -# return self.domain_ancillary( -# *identity, key=key, default=default, item=item, **filter_kwargs -# ) -# -# def domain_ancs(self, *identities, **filter_kwargs): -# """Alias for `domain_ancillaries`.""" -# return self.domain_ancillaries(*identities, **filter_kwargs) - def field_anc( self, *identity, @@ -21151,60 +18555,6 @@ def field_ancs(self, *identities, **filter_kwargs): """Alias for `field_ancillaries`.""" return self.field_ancillaries(*identities, **filter_kwargs) -# def key(self, identity, default=ValueError(), **kwargs): -# """Alias for `cf.Field.construct_key`.""" -# if kwargs: -# _DEPRECATION_ERROR_KWARGS( -# self, -# "key", -# kwargs, -# "Use 'construct' method or 'construct_key' method instead.", -# ) # pragma: no cover -# -# return self.construct_key(identity, default=default) -# -# def measure( -# self, -# *identity, -# key=False, -# default=ValueError(), -# item=False, -# **filter_kwargs, -# ): -# """Alias for `cell_measure`.""" -# return self.cell_measure( -# *identity, -# key=key, -# default=default, -# item=item, -# **filter_kwargs, -# ) -# -# def measures(self, *identities, **filter_kwargs): -# """Alias for `cell_measures`.""" -# return self.cell_measures(*identities, **filter_kwargs) -# -# def ref( -# self, -# *identity, -# default=ValueError(), -# key=False, -# item=False, -# **filter_kwargs, -# ): -# """Alias for `coordinate_reference`.""" -# return self.coordinate_reference( -# *identity, -# key=key, -# default=default, -# item=item, -# **filter_kwargs, -# ) -# -# def refs(self, *identities, **filter_kwargs): -# """Alias for `coordinate_references`.""" -# return self.coordinate_references(*identities, **filter_kwargs) - # ---------------------------------------------------------------- # Deprecated attributes and methods # ---------------------------------------------------------------- @@ -21213,7 +18563,8 @@ def _Axes(self): """""" raise DeprecationError( f"{self.__class__.__name__} attribute '_Axes' has been deprecated " - "at version 3.0.0 and is no longer available" + "at version 3.0.0 and is no longer available and will be removed" + "at v4.0.0" "Use 'domain_axes' instead." ) @@ -21222,7 +18573,8 @@ def CellMethods(self): """""" raise DeprecationError( f"{self.__class__.__name__} attribute 'CellMethods' has been " - "deprecated at version 3.0.0 and is no longer available" + "deprecated at version 3.0.0 and is no longer available " + "and will be removed at v4.0.0. " "Use 'cell_methods' instead." ) @@ -21231,7 +18583,8 @@ def Items(self): """""" raise DeprecationError( f"{self.__class__.__name__} attribute 'Items' has been deprecated " - "at version 3.0.0 and is no longer available" + "at version 3.0.0 and is no longer available " + "and will be removed at v4.0.0. " "Use 'constructs' instead." ) @@ -21239,7 +18592,8 @@ def CM(self, xxx): """""" raise DeprecationError( f"{self.__class__.__name__} method 'CM' has been deprecated " - "at version 3.0.0 and is no longer available" + "at version 3.0.0 and is no longer available " + "and will be removed at v4.0.0. " ) def axis_name(self, *args, **kwargs): diff --git a/cf/mixin/coordinate.py b/cf/mixin/coordinate.py index 34c86747c0..f4b1e61e0c 100644 --- a/cf/mixin/coordinate.py +++ b/cf/mixin/coordinate.py @@ -1,4 +1,4 @@ -#from itertools import chain +# from itertools import chain from ..decorators import ( _inplace_enabled, @@ -583,7 +583,7 @@ def identities(self, generator=False, ctypes=None, **kwargs): kwargs["pre"] = pre else: - post = (self._ctypes_iter('XTYZ'),) + post = (self._ctypes_iter("XTYZ"),) post0 = kwargs.pop("post", None) if post0: post += tuple(post0) @@ -599,4 +599,3 @@ def _ctypes_iter(self, ctypes): # This coordinate construct is of this type yield c return - diff --git a/cf/mixin/fielddomain.py b/cf/mixin/fielddomain.py index 7f77be8e1c..2c9db7e938 100644 --- a/cf/mixin/fielddomain.py +++ b/cf/mixin/fielddomain.py @@ -103,33 +103,32 @@ def _indices(self, mode, data_axes, auxiliary_mask, **kwargs): ) # pragma: no cover domain_axes = self.domain_axes(todict=True) -# constructs = self.constructs.filter_by_data() + # constructs = self.constructs.filter_by_data() # Initialize indices indices = {axis: slice(None) for axis in domain_axes} data_axes = self.constructs.data_axes() - + parsed = {} unique_axes = set() n_axes = 0 for identity, value in kwargs.items(): -# if identity in domain_axes: -# axes = (identity,) -# key = None -# construct = None -# else: - key, construct = self.construct(identity, - item=True, - default=(None, None) + # if identity in domain_axes: + # axes = (identity,) + # key = None + # construct = None + # else: + key, construct = self.construct( + identity, item=True, default=(None, None) ) - + if construct is None: raise ValueError( f"Can't find indices. Ambiguous axis or axes: " f"{identity!r}" ) - + if key in domain_axes: axes = key else: @@ -579,7 +578,7 @@ def _roll_constructs(self, axis, shift): ) for a in axis: - dim = dims.filter_by_axis("exact", a).value(None) + # dim = dims.filter_by_axis("exact", a).value(None) dim = self.dimension_coordinate(filter_by_axis=(a,), todict=True) if dim is not None and dim.period() is None: raise ValueError( @@ -589,7 +588,9 @@ def _roll_constructs(self, axis, shift): ) data_axes = self.constructs.data_axes() - for key, construct in self.constructs.filter_by_data(todict=True).items(): + for key, construct in self.constructs.filter_by_data( + todict=True + ).items(): construct_axes = data_axes.get(key, ()) c_axes = [] @@ -737,7 +738,7 @@ def anchor( ) # pragma: no cover axis_in = axis -# axis = self._parse_axes(axis_in) + # axis = self._parse_axes(axis_in) da_key, axis = self.domain_axis(axis, item=True) @@ -773,7 +774,7 @@ def anchor( return {"axis": da_key, "roll": 0, "nperiod": 0} return f - + c = dim.get_data(_fill_value=False) if dim.increasing: @@ -1117,7 +1118,7 @@ def del_coordinate_reference( continue return out - + def del_domain_axis( self, identity=None, squeeze=False, default=ValueError() ): @@ -1217,7 +1218,7 @@ def del_domain_axis( Dimension coords: longitude(8) = [22.5, ..., 337.5] degrees_east """ - dakey, domain_axis= self.domain_axis(identity, item=True) + dakey, domain_axis = self.domain_axis(identity, item=True) if not squeeze: return self.del_construct(dakey) @@ -1255,103 +1256,46 @@ def auxiliary_coordinate( item=False, **filter_kwargs, ): - """Return an auxiliary coordinate construct, or its key. + """Select an auxiliary coordinate construct. + + {{unique construct}} .. versionadded:: 3.0.0 - .. seealso:: `construct`, `auxiliary_coordinates`, `cell_measure`, - `cell_method`, `coordinate`, `coordinate_reference`, - `dimension_coordinate`, `domain_ancillary`, - `domain_axis`, `field_ancillary` + .. seealso:: `construct`, `auxiliary_coordinates` :Parameters: identity: optional - Select the auxiliary coordinate construct by one of: - - * `None`. This is the default, which selects the - auxiliary coordinate construct when there is only one - of them. - - * The identity or key of an auxiliary coordinate - construct. - - * The identity or key of a domain axis construct that is - spanned by a unique 1-d auxiliary coordinate - construct's data. - - * The position, in the field construct's data, of a - domain axis construct that is spanned by a unique 1-d - auxiliary coordinate construct's data. - - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); a `Query` object - (e.g. ``cf.eq('longitude')``); or a compiled regular - expression (e.g. ``re.compile('^atmosphere')``) that - selects the relevant constructs whose identities match via - `re.search`. - - A construct has a number of identities, and is selected if - any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: - - >>> x.identities() - ['time', - 'long_name=Time', - 'foo=bar', - 'standard_name=time', - 'ncvar%t', - 'T'] - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'auxiliarycoordinate2'`` and - ``'key%auxiliarycoordinate2'`` are both acceptable keys. - - A position of a domain axis construct in the field - construct's data is specified by an integer index. - - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. + Select auxiliary coordinate constructs that have an + identity, defined by their `!identities` methods, that + matches any of the given values. - *Parameter example:* - ``identity='Y'`` + Additionally, the values are matched against construct + identifiers, with or without the ``'key%'`` prefix. - *Parameter example:* - ``identity='latitude'`` + If no values are provided then all auxiliary + coordinate constructs are selected. - *Parameter example:* - ``identity='long_name=Latitude'`` + {{value match}} - *Parameter example:* - ``identity='auxiliarycoordinate1'`` + {{displayed identity}} - *Parameter example:* - ``identity='domainaxis2'`` + {{key: `bool`, optional}} - *Parameter example:* - ``identity='ncdim%y'`` + {{item: `bool`, optional}} - *Parameter example:* - ``identity=0`` + default: optional + Return the value of the *default* parameter if there + is no unique construct. - key: `bool`, optional - If True then return the selected construct key. By - default the construct itself is returned. + {{default Exception}} - default: optional - Return the value of the *default* parameter if a construct - can not be found. If set to an `Exception` instance then - it will be raised instead. + {{filter_kwargs: optional}} :Returns: - `AuxiliaryCoordinate` or `str` - The selected auxiliary coordinate construct, or its key. + {{Returns construct}} **Examples:** @@ -1516,17 +1460,51 @@ def cell_measure( item=False, **filter_kwargs, ): - """Select a cell measure construct by its identity. + """Select a cell measure construct. + + {{unique construct}} .. versionadded:: 3.0.0 - .. seealso:: `construct`, `auxiliary_coordinate`, `cell_measures`, - `cell_method`, `coordinate`, `coordinate_reference`, - `dimension_coordinate`, `domain_ancillary`, - `domain_axis`, `field_ancillary` + .. seealso:: `construct`, `cell_measures` :Parameters: + identity: optional + Select dimension coordinate constructs that have an + identity, defined by their `!identities` methods, that + matches any of the given values. + + Additionally, the values are matched against construct + identifiers, with or without the ``'key%'`` prefix. + + If no values are provided then all dimension + coordinate constructs are selected. + + {{value match}} + + {{displayed identity}} + + {{key: `bool`, optional}} + + {{item: `bool`, optional}} + + default: optional + Return the value of the *default* parameter if there + is no unique construct. + + {{default Exception}} + + {{filter_kwargs: optional}} + + :Returns: + + {{Returns construct}} + + **Examples:** + + + identity: optional Select the cell measure construct by: @@ -1571,53 +1549,13 @@ def cell_measure( A position of a domain axis construct in the field construct's data is specified by an integer index. - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. - - *Parameter example:* - ``identity='measure:area'`` - - *Parameter example:* - ``identity='cell_area'`` - - *Parameter example:* - ``identity='long_name=Cell Area'`` - - *Parameter example:* - ``identity='cellmeasure1'`` - - *Parameter example:* - ``identity='domainaxis2'`` - - *Parameter example:* - ``identity=0`` - - key: `bool`, optional - If True then return the selected construct key. By - default the construct itself is returned. - - default: optional - Return the value of the *default* parameter if a construct - can not be found. If set to an `Exception` instance then - it will be raised instead. - - :Returns: - - `CellMeasure`or `str` - The selected cell measure construct, or its key. - - **Examples:** - - TODO """ return self._filter_interface( ("cell_measure",), "cell_meausure", identity, - construct=True, + construct=True, key=key, default=default, item=item, @@ -1632,98 +1570,46 @@ def coordinate( item=False, **filter_kwargs, ): - """Return a dimension or auxiliary coordinate construct, or its - key. + """Select a dimension or auxiliary coordinate construct. + + {{unique construct}} .. versionadded:: 3.0.0 - .. seealso:: `construct`, `auxiliary_coordinate`, `coordinates`, - `dimension_coordinate` + .. seealso:: `construct`, `coordinates` :Parameters: identity: optional - Select the dimension coordinate construct by one of: - - * `None`. This is the default, which selects the - coordinate construct when there is only one of them. - - * The identity or key of a dimension coordinate - construct. - - * The identity or key of a domain axis construct that is - spanned by a unique 1-d coordinate construct's data. - - * The position, in the field construct's data, of a - domain axis construct that is spanned by a unique 1-d - coordinate construct's data. - - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); a `Query` object - (e.g. ``cf.eq('longitude')``); or a compiled regular - expression (e.g. ``re.compile('^atmosphere')``) that - selects the relevant constructs whose identities match via - `re.search`. - - A construct has a number of identities, and is selected if - any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: - - >>> x.identities() - ['time', - 'long_name=Time', - 'foo=bar', - 'standard_name=time', - 'ncvar%t', - 'T'] - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'auxiliarycoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. + Select dimension or auxiliary coordinate constructs + that have an identity, defined by their `!identities` + methods, that matches any of the given values. - A position of a domain axis construct in the field - construct's data is specified by an integer index. - - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. + Additionally, the values are matched against construct + identifiers, with or without the ``'key%'`` prefix. - *Parameter example:* - ``identity='Y'`` + If no values are provided then all dimension or + auxiliary coordinate constructs are selected. - *Parameter example:* - ``identity='latitude'`` + {{value match}} - *Parameter example:* - ``identity='long_name=Latitude'`` + {{displayed identity}} - *Parameter example:* - ``identity='dimensioncoordinate1'`` + {{key: `bool`, optional}} - *Parameter example:* - ``identity='domainaxis2'`` + {{item: `bool`, optional}} - *Parameter example:* - ``identity='ncdim%y'`` + default: optional + Return the value of the *default* parameter if there + is no unique construct. - key: `bool`, optional - If True then return the selected construct key. By - default the construct itself is returned. + {{default Exception}} - default: optional - Return the value of the *default* parameter if a construct - can not be found. If set to an `Exception` instance then - it will be raised instead. + {{filter_kwargs: optional}} :Returns: - `DimensionCoordinate` or `AuxiliaryCoordinate` or `str` - The selected dimension or auxiliary coordinate construct, - or its key. + {{Returns construct}} **Examples:** @@ -1740,6 +1626,7 @@ def coordinate( default=default, **filter_kwargs, ) + def coordinate_reference( self, *identity, @@ -1934,6 +1821,8 @@ def dimension_coordinate( ): """Select a dimension coordinate construct. + {{unique construct}} + .. versionadded:: 3.0.0 .. seealso:: `construct`, `dimension_coordinates` @@ -1943,82 +1832,36 @@ def dimension_coordinate( identity: optional Select dimension coordinate constructs that have an identity, defined by their `!identities` methods, that - matches any of the given values. In addition to - construct identities, the values are matched against: + matches any of the given values. Additionally, the values are matched against construct identifiers, with or without the ``'key%'`` prefix. - Additionly, TODOx the values are matched against the identity or - construct identifier, with or without the ``'key%'`` - prefix, of a domain axis construct that is spanned by - a dimension coordinate construct's data. - - *Parameter example:* - ``'domainaxis2'`` - - *Parameter example:* - ``'ncdim%latitude'`` - - * The integer position, in the field construct's data, - of the domain axis construct that is spanned by a - dimension coordinate construct's data. - - *Parameter example:* - ``0'`` - - *Parameter example:* - ``cf.gt(2)`` - - If no values are provided then all constructs are - selected. + If no values are provided then all dimension + coordinate constructs are selected. {{value match}} {{displayed identity}} - *Parameter example:* - ``'Y'`` - - *Parameter example:* - ``latitude'`` - - *Parameter example:* - ``re.compile('^lat')`` - - *Parameter example:* - ``'long_name=Latitude'`` - - *Parameter example:* - ``'Z', 'altutude'`` + {{key: `bool`, optional}} - key: `bool`, optional - If True then return the selected construct - identifier. By default the construct itself is - returned. + {{item: `bool`, optional}} default: optional - Return the value of the *default* parameter if a construct - can not be found. If set to an `Exception` instance then - it will be raised instead. + Return the value of the *default* parameter if there + is no unique construct. - item: `bool`, optional - If True then return the selected construct and its - construct identifier in a 2-tuple. By default the only - construct is returned. + {{default Exception}} - .. versionadded:: 3.9.0 + {{filter_kwargs: optional}} :Returns: - `DimensionCoordinate` or `str` or `tuple` - The selected dimension coordinate construct, or its - construct identifier, or both. + {{Returns construct}} **Examples:** - TODO - """ return self._filter_interface( ("dimension_coordinate",), @@ -2095,7 +1938,9 @@ def direction(self, identity=None, axes=None, **kwargs): if axis is None: return True - for coord in self.dimension_coordinates(filter_by_axis=(axis,), todict=True).values(): + for coord in self.dimension_coordinates( + filter_by_axis=(axis,), todict=True + ).values(): return coord.direction() return True @@ -2120,11 +1965,11 @@ def directions(self): out = {key: True for key in self.domain_axes(todict=True)} data_axes = self.constructs.data_axes() - + for key, coord in self.dimension_coordinates(todict=True).items(): axis = data_axes[key][0] - out[axis] = direction - + out[axis] = coord.direction() + return out def domain_ancillary( @@ -2135,106 +1980,49 @@ def domain_ancillary( item=False, **filter_kwargs, ): - """Return a domain ancillary construct, or its key. + """Select a domain ancillary construct. + + {{unique construct}} .. versionadded:: 3.0.0 - .. seealso:: `construct`, `auxiliary_coordinate`, `cell_measure`, - `cell_method`, `coordinate`, `coordinate_reference`, - `dimension_coordinate`, `domain_ancillaries`, - `domain_axis`, `field_ancillary` + .. seealso:: `construct`, `domain_ancillaries` :Parameters: identity: optional - Select the domain ancillary construct by one of: - - * `None`. This is the default, which selects the domain - ancillary construct when there is only one of them. - - * The identity or key of a domain ancillary construct. - - * The identity or key of a domain axis construct that is - spanned by a unique 1-d domain ancillary construct's data. - - * The position, in the field construct's data, of a domain - axis construct that is spanned by a unique 1-d domain - ancillary construct's data. - - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); a `Query` object - (e.g. ``cf.eq('longitude')``); or a compiled regular - expression (e.g. ``re.compile('^atmosphere')``) that - selects the relevant constructs whose identities match via - `re.search`. - - A construct has a number of identities, and is selected if - any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: - - >>> x.identities() - ['time', - 'long_name=Time', - 'foo=bar', - 'standard_name=time', - 'ncvar%t', - 'T'] - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'domainancillary2'`` and - ``'key%domainancillary2'`` are both acceptable keys. - - A position of a domain axis construct in the field - construct's data is specified by an integer index. - - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. + Select domain ancillary constructs that have an + identity, defined by their `!identities` methods, that + matches any of the given values. - *Parameter example:* - ``identity='Y'`` + Additionally, the values are matched against construct + identifiers, with or without the ``'key%'`` prefix. - *Parameter example:* - ``identity='latitude'`` + If no values are provided then all domain ancillary + constructs are selected. - *Parameter example:* - ``identity='long_name=Latitude'`` + {{value match}} - *Parameter example:* - ``identity='domainancillary1'`` + {{displayed identity}} - *Parameter example:* - ``identity='ncdim%y'`` + {{key: `bool`, optional}} - *Parameter example:* - ``identity='domainaxis2'`` + {{item: `bool`, optional}} - *Parameter example:* - ``identity=0`` + default: optional + Return the value of the *default* parameter if there + is no unique construct. - key: `bool`, optional - If True then return the selected construct key. By - default the construct itself is returned. + {{default Exception}} - default: optional - Return the value of the *default* parameter if a construct - can not be found. If set to an `Exception` instance then - it will be raised instead. + {{filter_kwargs: optional}} :Returns: - `DomainAncillary` or `str` - The selected domain ancillary coordinate construct, or its - key. + {{Returns construct}} **Examples:** - TODO - """ return self._filter_interface( ("domain_ancillary",), @@ -2255,149 +2043,76 @@ def domain_axis( item=False, **filter_kwargs, ): - """Return a domain axis construct, or its key. + """Select a domain axis construct. + + {{unique construct}} .. versionadded:: 3.0.0 - .. seealso:: `construct`, `auxiliary_coordinate`, `cell_measure`, - `cell_method`, `coordinate`, `coordinate_reference`, - `dimension_coordinate`, `domain_ancillary`, - `domain_axes`, `field_ancillary` + .. seealso:: `construct`, `domain_axes` :Parameters: - identity: - Select the domain axis construct by one of: - - * An identity or key of a 1-d coordinate construct that - whose data spans the domain axis construct. - - * A domain axis construct identity or key. - - * The position of the domain axis construct in the field - construct's data. - - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); or a compiled regular expression - (e.g. ``re.compile('^atmosphere')``) that selects the - relevant constructs whose identities match via - `re.search`. - - Each construct has a number of identities, and is selected - if any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: - - >>> x.identities() - ['time', 'long_name=Time', 'foo=bar', 'standard_name=time', 'ncvar%t', 'T'] + identity: optional + Select domain axis constructs that have an identity, + defined by their `!identities` methods, that matches + any of the given values. - A construct key may optionally have the ``'key%'`` - prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. + Additionally, the values are matched against construct + identifiers, with or without the ``'key%'`` prefix. - A position of a domain axis construct in the field - construct's data is specified by an integer index. + Additionally, if a domain axis construct is spanned by + the data of a unique 1-d dimension or auxiliary + coordinate construct, then if a value matches any + identity of that coordinate construct, defined by its + `!identities` method, then that domain axis construct + is selected. - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. + Additionally, if there are `Field` data and a value + matches the positions of the domain axis construct in + that data then the corresponding domain axis + constructs are selected. - *Parameter example:* - ``identity='long_name=Latitude'`` + If no values are provided then all domain axis + constructs are selected. - *Parameter example:* - ``identity='dimensioncoordinate1'`` + {{value match}} - *Parameter example:* - ``identity='domainaxis2'`` + {{displayed identity}} - *Parameter example:* - ``identity='key%domainaxis2'`` + {{key: `bool`, optional}} - *Parameter example:* - ``identity='ncdim%y'`` + {{item: `bool`, optional}} - *Parameter example:* - ``identity=2`` + default: optional + Return the value of the *default* parameter if there + is no unique construct. - key: `bool`, optional - If True then return the selected construct key. By - default the construct itself is returned. + {{default Exception}} - default: optional - Return the value of the *default* parameter if a construct - can not be found. If set to an `Exception` instance then - it will be raised instead. + {{filter_kwargs: optional}} :Returns: - `DomainAxis` or `str` - The selected domain axis construct, or its key. + {{Returns construct}} - **Examples:** - TODO + **Examples:** """ - # Try for integer index - if identity: - identity2 = [] - - data_axes = self.get_data_axes(default=None) - for i in identity: - try: - identity2.append(data_axes[i]) - except TypeError: - identity2.append(i) - except IndexError: - pass - - if not identity2: - if default is None: - return default - - return self._default( - default, - "Indices do not exist for field construct data dimenions", - ) - - identity = identity2 - -# c = self._select_construct( -# ("domain_axis",), -# "domain_axis", -# identity, -# key=key, -# default=None, -# item=item, -# **filter_kwargs, -# ) - c = self._filter_interface( - ("domain_axis",), - "domain_axis", - identity, - construct=True, - key=key, - item=item, - default=None, - **filter_kwargs, - ) - if c is not None: - return c + filter_kwargs["todict"] = True - da_key = self.domain_axis_key(*identity, default=None) + c = self.domain_axes(*identity, **filter_kwargs) - if da_key is not None: + # Return construct, or key, or both, or default + n = len(c) + if n == 1: + k, construct = c.popitem() if key: - return da_key - - construct = self.constructs[da_key] + return k if item: - return da_key, construct + return k, construct return construct @@ -2406,7 +2121,7 @@ def domain_axis( return self._default( default, - f"{self.__class__.__name__}.domain_axis() can't return zero " + f"{self.__class__.__name__}.domain_axis() can't return {n} " "constructs", ) @@ -2743,8 +2458,9 @@ def _parse_axes(self, axes): return [self.domain_axis(x, key=True) for x in axes] - def replace_construct(self, *identity, new=None, copy=True, - **filter_kwargs): + def replace_construct( + self, *identity, new=None, copy=True, **filter_kwargs + ): """Replace a metadata construct. Replacement assigns the same construct key and, if applicable, the @@ -3021,8 +2737,9 @@ def domain_ancs(self, *identities, **filter_kwargs): def key(self, *identity, default=ValueError(), **filter_kwargs): """Alias for `construct_key`.""" - return self.construct(*identity, default=default, key=True, - **filter_kwargs) + return self.construct( + *identity, default=default, key=True, **filter_kwargs + ) def measure( self, diff --git a/cf/mixin/properties.py b/cf/mixin/properties.py index 3e5a8c6133..09515555b8 100644 --- a/cf/mixin/properties.py +++ b/cf/mixin/properties.py @@ -1163,4 +1163,3 @@ def getprop(self, prop): _DEPRECATION_ERROR_METHOD( self, "getprop", "Use method 'get_property' instead" ) # pragma: no cover - diff --git a/cf/mixin/propertiesdata.py b/cf/mixin/propertiesdata.py index 0f61930ced..76ef47b5ca 100644 --- a/cf/mixin/propertiesdata.py +++ b/cf/mixin/propertiesdata.py @@ -4802,7 +4802,7 @@ def identities(self, generator=False, **kwargs): """ identities = super().identities(generator=True, **kwargs) - + i = getattr(self, "id", None) if i is None: g = identities diff --git a/cf/test/test_Field.py b/cf/test/test_Field.py index 97d699d4cb..2de19a42ce 100644 --- a/cf/test/test_Field.py +++ b/cf/test/test_Field.py @@ -446,7 +446,9 @@ def test_Field_replace_construct(self): f.replace_construct("grid_longitude", new=f.construct("latitude")) with self.assertRaises(Exception): - f.replace_construct("grid_longitude", new=f.construct("grid_latitude")) + f.replace_construct( + "grid_longitude", new=f.construct("grid_latitude") + ) def test_Field_allclose(self): f = self.f.copy() From eb2e09caef13538ebaf0c95e4f537825cabcf9e7 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Wed, 14 Apr 2021 23:51:20 +0100 Subject: [PATCH 29/53] devs --- cf/field.py | 164 +++++++++++++++++- cf/mixin/fielddomain.py | 325 +++++++++--------------------------- cf/test/test_Field.py | 358 +++++++++++++++++++++++++++++++++++++--- 3 files changed, 570 insertions(+), 277 deletions(-) diff --git a/cf/field.py b/cf/field.py index fbf78f82df..e6047e5b47 100644 --- a/cf/field.py +++ b/cf/field.py @@ -12088,6 +12088,65 @@ def indices(self, *mode, **kwargs): """ + if "exact" in mode: + _DEPRECATION_ERROR_ARG( + self, + "indices", + "exact", + "Keywords are now never interpreted as regular expressions.", + ) # pragma: no cover + + if len(mode) > 1: + raise ValueError( + "Can't provide more than one positional argument. " + f"Got: {', '.join(repr(x) for x in mode)}" + ) + + if not mode or "compress" in mode: + mode = "compress" + elif "envelope" in mode: + mode = "envelope" + elif "full" in mode: + mode = "full" + else: + raise ValueError(f"Invalid value for 'mode' argument: {mode[0]!r}") + + data_axes = self.get_data_axes() + + # ------------------------------------------------------------ + # Get the indices for every domain axis in the domain, + # including any auxiliary masks + # ------------------------------------------------------------ + domain_indices = self._indices(mode, data_axes, True, **kwargs) + + # Initialise the output indices with any auxiliary masks + auxiliary_mask = domain_indices["mask"] + if auxiliary_mask: + # Ensure that each auxiliary mask is broadcastable to the + # data + masks = [] + for axes, mask in auxiliary_mask.items(): + axes = list(axes) + for i, axis in enumerate(data_axes): + if axis not in axes: + axes.insert(0, axis) + mask.insert_dimension(0, inplace=True) + + new_order = [axes.index(axis) for axis in data_axes] + mask.transpose(new_order, inplace=True) + masks.append(mask) + + indices = ["mask", tuple(masks)] + else: + indices = [] + + # Add the indices that apply to the field's data dimensions + indices.extend([domain_indices["indices"][axis] for axis in data_axes]) + + return tuple(indices) + + # iiiiiiiiiiiiiiiiiiii + if "exact" in mode: _DEPRECATION_ERROR_ARG( self, @@ -12773,6 +12832,95 @@ def set_data( return f + def domain_axis( + self, + *identity, + default=ValueError(), + key=False, + item=False, + **filter_kwargs, + ): + """Select a domain axis construct. + + {{unique construct}} + + .. versionadded:: 1.8.9.0 + + .. seealso:: `construct`, `domain_axes` + + :Parameters: + + identity: optional + Select domain axis constructs that have an identity, + defined by their `!identities` methods, that matches + any of the given values. + + Additionally, the values are matched against construct + identifiers, with or without the ``'key%'`` prefix. + + Additionally, if for a given value + ``f.coordinates(value, filter_by_naxes=(1,))`` returns + 1-d coordinate constructs that all span the same + domain axis construct then that domain axis construct + is selected. See `coordinates` for details. + + Additionally, if there is a `Field` data array and a + value matches the integer position of an array + dimension, then the corresponding domain axis + construct is selected. + + If no values are provided then all domain axis + constructs are selected. + + {{value match}} + + {{displayed identity}} + + {{key: `bool`, optional}} + + {{item: `bool`, optional}} + + default: optional + Return the value of the *default* parameter if there + is no unique construct. + + {{default Exception}} + + {{filter_kwargs: optional}} + + :Returns: + + {{Returns construct}} + + + **Examples:** + + """ + filter_kwargs["todict"] = True + + c = self.domain_axes(*identity, **filter_kwargs) + + # Return construct, or key, or both, or default + n = len(c) + if n == 1: + k, construct = c.popitem() + if key: + return k + + if item: + return k, construct + + return construct + + if default is None: + return default + + return self._default( + default, + f"{self.__class__.__name__}.domain_axis() can't return {n} " + "constructs", + ) + def domain_mask(self, **kwargs): """Return a boolean field that is True where criteria are met. @@ -14705,12 +14853,13 @@ def transpose( data_axes = self.get_data_axes(default=()) if isinstance(axes, (str, int)): axes = (axes,) + axes2 = [self.domain_axis(x, key=True) for x in axes] + if sorted(axes2) != sorted(data_axes): raise ValueError( - "Can't transpose {}: Bad axis specification: {!r}".format( - self.__class__.__name__, axes - ) + f"Can't transpose {self.__class__.__name__}: " + f"Bad axis specification: {axes!r}" ) iaxes = [data_axes.index(axis) for axis in axes2] @@ -14850,10 +14999,11 @@ def cell_method( Additionally, the values are matched against construct identifiers, with or without the ``'key%'`` prefix. - Additionally, if a value would select a unique domain - axis construct with ``f.domain_axis(value)`` then any - cell method constructs that span exactly that axis are - selected. + Additionally, if for a given value + ``f.domain_axes(value)`` returns a unique domain axis + construct then any cell method constructs that span + exactly that axis are selected. See `domain_axes` for + details. If no values are provided then all cell method constructs are selected. diff --git a/cf/mixin/fielddomain.py b/cf/mixin/fielddomain.py index 2c9db7e938..dfcb68a86c 100644 --- a/cf/mixin/fielddomain.py +++ b/cf/mixin/fielddomain.py @@ -42,9 +42,6 @@ class FieldDomain: """ - # ---------------------------------------------------------------- - # Private methods - # ---------------------------------------------------------------- def _indices(self, mode, data_axes, auxiliary_mask, **kwargs): """Create indices that define a subspace of the field or domain construct. @@ -108,31 +105,32 @@ def _indices(self, mode, data_axes, auxiliary_mask, **kwargs): # Initialize indices indices = {axis: slice(None) for axis in domain_axes} - data_axes = self.constructs.data_axes() + construct_data_axes = self.constructs.data_axes() parsed = {} unique_axes = set() n_axes = 0 + for identity, value in kwargs.items(): - # if identity in domain_axes: - # axes = (identity,) - # key = None - # construct = None - # else: key, construct = self.construct( - identity, item=True, default=(None, None) + identity, + filter_by_data=True, + item=True, + default=(None, None), ) - - if construct is None: - raise ValueError( - f"Can't find indices. Ambiguous axis or axes: " - f"{identity!r}" - ) - - if key in domain_axes: - axes = key + if construct is not None: + axes = self.get_data_axes(key) else: - axes = data_axes[key] + da_key = self.domain_axis(identity, key=True, default=None) + if da_key is not None: + axes = (da_key,) + key = None + construct = None + else: + raise ValueError( + f"Can't find indices. Ambiguous axis or axes " + f"defined by {identity!r}" + ) if axes in parsed: # The axes are the same as an exisiting key @@ -194,7 +192,7 @@ def _indices(self, mode, data_axes, auxiliary_mask, **kwargs): item_axes = axes[0] logger.debug( - f" item_axes = {item_axes!r}\n keys = {keys!r}" + f" item_axes = {item_axes!r}\n keys = {keys!r}" ) # pragma: no cover if n_axes == 1: @@ -227,8 +225,9 @@ def _indices(self, mode, data_axes, auxiliary_mask, **kwargs): if envelope or full: size = domain_axes[axis].get_size() - d = self._Data(range(size)) - ind = (d[value].array,) + # TODODASK - consider using dask.arange here + d = np.arange(size) # self._Data(range(size)) + ind = (d[value],) # .array,) index = slice(None) elif ( @@ -289,11 +288,11 @@ def _indices(self, mode, data_axes, auxiliary_mask, **kwargs): index = slice(start, stop, 1) if full: - # index = slice(start, start+size, 1) + # TODODASK - consider using some sort of + # dask.arange here d = self._Data(list(range(size))) d.cyclic(0) ind = (d[index].array,) - index = slice(None) elif item is not None: @@ -326,7 +325,9 @@ def _indices(self, mode, data_axes, auxiliary_mask, **kwargs): "construct with data for which to create indices" ) - logger.debug(f" index = {index}") # pragma: no cover + logger.debug( + f" index = {index}\n" f" ind = {ind}" + ) # pragma: no cover # Put the index into the correct place in the list of # indices. @@ -454,8 +455,6 @@ def _indices(self, mode, data_axes, auxiliary_mask, **kwargs): masked_subspace_size = 1 ind = np.array(ind) - logger.debug(" ind = {ind}") # pragma: no cover - for i, (axis, start, stop) in enumerate( zip(canonical_axes, ind.min(axis=1), ind.max(axis=1)) ): @@ -494,11 +493,10 @@ def _indices(self, mode, data_axes, auxiliary_mask, **kwargs): create_mask = False # TODODASK - if we have 2 list of integers then we need to - # apply different auxiliary masks (if any) - # after different __getitems__. SCRUB THAT! if - # we have an auxiliary mask, then by - # definition we do _not_ have a list(s) of - # integers + # apply different auxiliary masks (if any) + # after different __getitems__. SCRUB THAT! if + # we have an auxiliary mask, then by definition + # we do _not_ have a list(s) of integers # -------------------------------------------------------- # Create an auxiliary mask for these axes @@ -1329,117 +1327,38 @@ def construct( :Parameters: identity: optional - Select the construct. Must be - - * The identity or key of a metadata construct. - - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); a `Query` object - (e.g. ``cf.eq('longitude')``); or a compiled regular - expression (e.g. ``re.compile('^atmosphere')``) that - selects the relevant constructs whose identities match via - `re.search`. - - A construct has a number of identities, and is selected if - any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: - - >>> x.identities() - ['time', - 'long_name=Time', - 'foo=bar', - 'standard_name=time', - 'ncvar%t', - 'T'] - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. + Select constructs that have an identity, defined by + their `!identities` methods, that matches any of the + given values. - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. + Additionally, the values are matched against construct + identifiers, with or without the ``'key%'`` prefix. - *Parameter example:* - ``identity='T' + If no values are provided then all constructs are + selected. - *Parameter example:* - ``identity='measure:area'`` + {{value match}} - *Parameter example:* - ``identity='cell_area'`` + {{displayed identity}} - *Parameter example:* - ``identity='long_name=Cell Area'`` + {{key: `bool`, optional}} - *Parameter example:* - ``identity='cellmeasure1'`` + {{item: `bool`, optional}} default: optional - Return the value of the *default* parameter if a construct - can not be found. + Return the value of the *default* parameter if there + is no unique construct. {{default Exception}} - If the *default* is `None`, or if *item* is True and - *default* is a 2-tuple of `Ǹone`s, then TODO - - key: `bool`, optional - If True then return the selected construct key. By - default the construct itself is returned. - - item: TODO + {{filter_kwargs: optional}} :Returns: - The selected coordinate construct, or its key. + {{Returns construct}} **Examples:** - >>> f = cf.example_field(1) - >>> print(f) - Field: air_temperature (ncvar%ta) - --------------------------------- - Data : air_temperature(atmosphere_hybrid_height_coordinate(1), grid_latitude(10), grid_longitude(9)) K - Cell methods : grid_latitude(10): grid_longitude(9): mean where land (interval: 0.1 degrees) time(1): maximum - Field ancils : air_temperature standard_error(grid_latitude(10), grid_longitude(9)) = [[0.76, ..., 0.32]] K - Dimension coords: atmosphere_hybrid_height_coordinate(1) = [1.5] - : grid_latitude(10) = [2.2, ..., -1.76] degrees - : grid_longitude(9) = [-4.7, ..., -1.18] degrees - : time(1) = [2019-01-01 00:00:00] - Auxiliary coords: latitude(grid_latitude(10), grid_longitude(9)) = [[53.941, ..., 50.225]] degrees_N - : longitude(grid_longitude(9), grid_latitude(10)) = [[2.004, ..., 8.156]] degrees_E - : long_name=Grid latitude name(grid_latitude(10)) = [--, ..., b'kappa'] - Cell measures : measure:area(grid_longitude(9), grid_latitude(10)) = [[2391.9657, ..., 2392.6009]] km2 - Coord references: grid_mapping_name:rotated_latitude_longitude - : standard_name:atmosphere_hybrid_height_coordinate - Domain ancils : ncvar%a(atmosphere_hybrid_height_coordinate(1)) = [10.0] m - : ncvar%b(atmosphere_hybrid_height_coordinate(1)) = [20.0] - : surface_altitude(grid_latitude(10), grid_longitude(9)) = [[0.0, ..., 270.0]] m - - >>> f.construct('long_name=Grid latitude name') - - >>> f.construct('ncvar%a') - - >>> f.construct('measure:area') - - >>> f.construct('domainaxis0') - - >>> f.construct('height') - Traceback (most recent call last): - ... - ValueError: Can't return zero constructs - >>> f.construct('height', default=False) - False - >>> f.construct('height', default=TypeError("No height coordinates")) - Traceback (most recent call last): - ... - TypeError: No height coordinates - """ return self._filter_interface( (), @@ -1503,53 +1422,6 @@ def cell_measure( **Examples:** - - - identity: optional - Select the cell measure construct by: - - * `None`. This is the default, which selects the cell - measure construct when there is only one of them. - - * The identity or key of a cell measure construct. - - * The identity or key of a domain axis construct that is - spanned by a unique 1-d cell measure construct's data. - - * The position, in the field construct's data, of a - domain axis construct that is spanned by a unique 1-d - cell measure construct's data. - - A construct identity is specified by a string - (e.g. ``'long_name=Cell Area', ``'ncvar%areacello'``, - etc.); a `Query` object (e.g. ``cf.eq('measure:area')``); - or a compiled regular expression - (e.g. ``re.compile('^atmosphere')``) that selects the - relevant constructs whose identities match via - `re.search`. - - Each construct has a number of identities, and is selected - if any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: - - >>> x.identities() - ['time', - 'long_name=Time', - 'foo=bar', - 'standard_name=time', - 'ncvar%t', - 'T'] - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'cellmeasure2'`` and - ``'key%cellmeasure2'`` are both acceptable keys. - - A position of a domain axis construct in the field - construct's data is specified by an integer index. - - """ return self._filter_interface( ("cell_measure",), @@ -1613,8 +1485,6 @@ def coordinate( **Examples:** - TODO - """ return self._filter_interface( ("dimension_coordinate", "auxiliary_coordinate"), @@ -1646,85 +1516,39 @@ def coordinate_reference( :Parameters: - identity: optional - Select the coordinate reference construct by one of: - - * `None`. This is the default, which selects the - coordinate reference construct when there is only one - of them. - - * The identity or key of a coordinate reference - construct. - - A construct identity is specified by a string - (e.g. ``'grid_mapping_name:latitude_longitude'``, - ``'latitude_longitude'``, ``'ncvar%lat_lon'``, etc.); a - `Query` object (e.g. ``cf.eq('latitude_longitude')``); or - a compiled regular expression - (e.g. ``re.compile('^atmosphere')``) that selects the - relevant constructs whose identities match via - `re.search`. - - Each construct has a number of identities, and is selected - if any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - two identities: - - >>> x.identities() - ['grid_mapping_name:latitude_longitude', 'ncvar%lat_lon'] - - A identity's prefix of ``'grid_mapping_name:'`` or - ``'standard_name:'`` may be omitted - (e.g. ``'standard_name:atmosphere_hybrid_height_coordinate'`` - and ``'atmosphere_hybrid_height_coordinate'`` are both - acceptable identities). + identities: optional + Select coordinate reference constructs that have an + identity, defined by their `!identities` methods, that + matches any of the given values. - A construct key may optionally have the ``'key%'`` - prefix. For example ``'coordinatereference2'`` and - ``'key%coordinatereference2'`` are both acceptable keys. + Additionally, the values are matched against construct + identifiers, with or without the ``'key%'`` prefix. - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. + If no identities are provided then all coordinate + reference constructs are selected. - *Parameter example:* - ``identity='standard_name:atmosphere_hybrid_height_coordinate'`` - - *Parameter example:* - ``identity='grid_mapping_name:rotated_latitude_longitude'`` + {{value match}} - *Parameter example:* - ``identity='transverse_mercator'`` + {{displayed identity}} - *Parameter example:* - ``identity='coordinatereference1'`` + {{key: `bool`, optional}} - *Parameter example:* - ``identity='key%coordinatereference1'`` + {{item: `bool`, optional}} - *Parameter example:* - ``identity='ncvar%lat_lon'`` + default: optional + Return the value of the *default* parameter if there + is no unique construct. - key: `bool`, optional - If True then return the selected construct key. By - default the construct itself is returned. + {{default Exception}} - default: optional - Return the value of the *default* parameter if a construct - can not be found. If set to an `Exception` instance then - it will be raised instead. + {{filter_kwargs: optional}} :Returns: - `CoordinateReference` or `str` - The selected coordinate reference construct, or its key. + {{Returns construct}} **Examples:** - TODO - """ return self._filter_interface( ("coordinate_reference",), @@ -2053,7 +1877,7 @@ def domain_axis( :Parameters: - identity: optional + identities: `tuple`, optional Select domain axis constructs that have an identity, defined by their `!identities` methods, that matches any of the given values. @@ -2061,17 +1885,16 @@ def domain_axis( Additionally, the values are matched against construct identifiers, with or without the ``'key%'`` prefix. - Additionally, if a domain axis construct is spanned by - the data of a unique 1-d dimension or auxiliary - coordinate construct, then if a value matches any - identity of that coordinate construct, defined by its - `!identities` method, then that domain axis construct - is selected. + Additionally, if for a given value + ``f.coordinates(value, filter_by_naxes=(1,))`` returns + 1-d coordinate constructs that all span the same + domain axis construct then that domain axis construct + is selected. See `coordinates` for details. - Additionally, if there are `Field` data and a value - matches the positions of the domain axis construct in - that data then the corresponding domain axis - constructs are selected. + Additionally, if there is a `Field` data array and a + value matches the integer position of an array + dimension, then the corresponding domain axis + construct is selected. If no values are provided then all domain axis constructs are selected. diff --git a/cf/test/test_Field.py b/cf/test/test_Field.py index 2de19a42ce..9f5567e6b9 100644 --- a/cf/test/test_Field.py +++ b/cf/test/test_Field.py @@ -77,17 +77,17 @@ class FieldTest(unittest.TestCase): atol = cf.atol() rtol = cf.rtol() - test_only = [] + f = cf.read(filename)[0] - def setUp(self): - self.f = cf.read(self.filename)[0] + f0 = cf.example_field(0) + f1 = cf.example_field(1) def test_Field_creation_commands(self): for i in range(7): f = cf.example_field(i) f.creation_commands() - f = cf.example_field(1) + f = self.f1 for rd in (False, True): f.creation_commands(representative_data=rd) @@ -102,7 +102,7 @@ def test_Field_creation_commands(self): f.creation_commands(namespace=ns) def test_Field_get_filenames(self): - f = cf.example_field(0) + f = self.f0 cf.write(f, tmpfile) g = cf.read(tmpfile)[0] @@ -149,7 +149,7 @@ def test_Field_halo(self): ) def test_Field_has_construct(self): - f = cf.example_field(1) + f = self.f1 self.assertTrue(f.has_construct("T")) self.assertTrue(f.has_construct("long_name=Grid latitude name")) @@ -205,7 +205,7 @@ def test_Field_compress_uncompress(self): self.assertTrue(f.equals(c, verbose=2), message) def test_Field_apply_masking(self): - f = cf.example_field(0) + f = self.f0.copy() for prop in ( "missing_value", @@ -300,7 +300,7 @@ def test_Field_flatten(self): self.assertIsNone(f.flatten(inplace=True)) def test_Field_bin(self): - f = self.f.copy() + f = self.f d = f.digitize(10) b = f.bin("sample_size", digitized=d) @@ -451,7 +451,7 @@ def test_Field_replace_construct(self): ) def test_Field_allclose(self): - f = self.f.copy() + f = self.f g = f.copy() self.assertTrue(f.allclose(f)) @@ -1264,8 +1264,301 @@ def test_Field_insert_dimension(self): with self.assertRaises(ValueError): f.insert_dimension(1, "qwerty") + # i# def test_Field_indices(self): + # i# f = self.f.copy() + # i# + # i# array = numpy.ma.array(f.array) + # i# + # i# x = f.dimension_coordinate("X") + # i# a = x.varray + # i# a[...] = numpy.arange(0, 360, 40) + # i# x.set_bounds(x.create_bounds()) + # i# f.cyclic("X", iscyclic=True, period=360) + # i# + # i# f0 = f.copy() + # i# + # i# # wi (increasing) + # i# indices = f.indices(grid_longitude=cf.wi(50, 130)) + # i# g = f[indices] + # i# self.assertEqual(g.shape, (1, 10, 2), g.shape) + # i# x = g.dimension_coordinate("X").array + # i# self.assertTrue((x == [80, 120]).all()) + # i# + # i# indices = f.indices(grid_longitude=cf.wi(-90, 50)) + # i# g = f[indices] + # i# self.assertEqual(g.shape, (1, 10, 4), g.shape) + # i# x = g.dimension_coordinate("X").array + # i# self.assertTrue((x == [-80, -40, 0, 40]).all()) + # i# + # i# indices = f.indices(grid_longitude=cf.wi(310, 450)) + # i# g = f[indices] + # i# self.assertEqual(g.shape, (1, 10, 4), g.shape) + # i# x = g.dimension_coordinate("X").array + # i# self.assertTrue((x == [-40, 0, 40, 80]).all()) + # i# + # i# indices = f.indices(grid_longitude=cf.wi(310 - 1080, 450 - 1080)) + # i# g = f[indices] + # i# self.assertEqual(g.shape, (1, 10, 4), g.shape) + # i# x = g.dimension_coordinate("X").array + # i# self.assertTrue((x == [-40, 0, 40, 80]).all()) + # i# + # i# indices = f.indices(grid_longitude=cf.wi(310 + 720, 450 + 720)) + # i# g = f[indices] + # i# self.assertEqual(g.shape, (1, 10, 4), g.shape) + # i# x = g.dimension_coordinate("X").array + # i# self.assertTrue((x == [-40, 0, 40, 80]).all()) + # i# + # i# indices = f.indices(grid_longitude=cf.wi(-90, 370)) + # i# g = f[indices] + # i# self.assertEqual(g.shape, (1, 10, 9), g.shape) + # i# x = g.dimension_coordinate("X").array + # i# self.assertTrue( + # i# (x == [-80, -40, 0, 40, 80, 120, 160, 200, 240.0]).all() + # i# ) + # i# + # i# with self.assertRaises(IndexError): + # i# f.indices(grid_longitude=cf.wi(90, 100)) + # i# + # i# indices = f.indices("full", grid_longitude=cf.wi(310, 450)) + # i# g = f[indices] + # i# self.assertEqual(g.shape, (1, 10, 9), g.shape) + # i# x = g.dimension_coordinate("X").array + # i# self.assertEqual(x.shape, (9,), x.shape) + # i# self.assertTrue( + # i# (x == [0, 40, 80, 120, 160, 200, 240, 280, 320]).all(), x + # i# ) + # i# a = array.copy() + # i# a[..., [3, 4, 5, 6, 7]] = numpy.ma.masked + # i# self.assertTrue(cf.functions._numpy_allclose(g.array, a), g.array) + # i# + # i# indices = f.indices("full", grid_longitude=cf.wi(70, 200)) + # i# g = f[indices] + # i# self.assertEqual(g.shape, (1, 10, 9), g.shape) + # i# x = g.dimension_coordinate("X").array + # i# self.assertEqual(x.shape, (9,), x.shape) + # i# self.assertTrue( + # i# (x == [0, 40, 80, 120, 160, 200, 240, 280, 320]).all(), x + # i# ) + # i# a = array.copy() + # i# a[..., [0, 1, 6, 7, 8]] = numpy.ma.masked + # i# self.assertTrue(cf.functions._numpy_allclose(g.array, a), g.array) + # i# + # i# # wi (decreasing) + # i# f.flip("X", inplace=True) + # i# + # i# indices = f.indices(grid_longitude=cf.wi(50, 130)) + # i# g = f[indices] + # i# self.assertEqual(g.shape, (1, 10, 2), g.shape) + # i# x = g.dimension_coordinate("X").array + # i# self.assertTrue((x == [80, 120][::-1]).all()) + # i# + # i# indices = f.indices(grid_longitude=cf.wi(-90, 50)) + # i# g = f[indices] + # i# self.assertEqual(g.shape, (1, 10, 4), g.shape) + # i# x = g.dimension_coordinate("X").array + # i# self.assertTrue((x == [-80, -40, 0, 40][::-1]).all()) + # i# + # i# indices = f.indices(grid_longitude=cf.wi(310, 450)) + # i# g = f[indices] + # i# self.assertEqual(g.shape, (1, 10, 4), g.shape) + # i# x = g.dimension_coordinate("X").array + # i# self.assertTrue((x == [-40, 0, 40, 80][::-1]).all()) + # i# + # i# indices = f.indices(grid_longitude=cf.wi(310 - 1080, 450 - 1080)) + # i# g = f[indices] + # i# self.assertEqual(g.shape, (1, 10, 4), g.shape) + # i# x = g.dimension_coordinate("X").array + # i# self.assertTrue((x == [-40, 0, 40, 80][::-1]).all()) + # i# + # i# indices = f.indices(grid_longitude=cf.wi(310 + 720, 450 + 720)) + # i# g = f[indices] + # i# self.assertEqual(g.shape, (1, 10, 4), g.shape) + # i# x = g.dimension_coordinate("X").array + # i# self.assertTrue((x == [-40, 0, 40, 80][::-1]).all()) + # i# + # i# with self.assertRaises(IndexError): + # i# f.indices(grid_longitude=cf.wi(90, 100)) + # i# + # i# indices = f.indices("full", grid_longitude=cf.wi(310, 450)) + # i# g = f[indices] + # i# self.assertEqual(g.shape, (1, 10, 9), g.shape) + # i# x = g.dimension_coordinate("X").array + # i# self.assertEqual(x.shape, (9,), x.shape) + # i# self.assertTrue( + # i# (x == [0, 40, 80, 120, 160, 200, 240, 280, 320][::-1]).all(), x + # i# ) + # i# + # i# indices = f.indices("full", grid_longitude=cf.wi(70, 200)) + # i# g = f[indices] + # i# self.assertEqual(g.shape, (1, 10, 9), g.shape) + # i# x = g.dimension_coordinate("X").array + # i# self.assertEqual(x.shape, (9,), x.shape) + # i# self.assertTrue( + # i# (x == [0, 40, 80, 120, 160, 200, 240, 280, 320][::-1]).all(), x + # i# ) + # i# + # i# # wo + # i# f = f0.copy() + # i# + # i# indices = f.indices(grid_longitude=cf.wo(50, 130)) + # i# g = f[indices] + # i# self.assertEqual(g.shape, (1, 10, 7), g.shape) + # i# x = g.dimension_coordinate("X").array + # i# self.assertTrue((x == [-200, -160, -120, -80, -40, 0, 40]).all()) + # i# + # i# with self.assertRaises(IndexError): + # i# f.indices(grid_longitude=cf.wo(-90, 370)) + # i# + # i# # set + # i# indices = f.indices(grid_longitude=cf.set([320, 40, 80, 99999])) + # i# g = f[indices] + # i# self.assertEqual(g.shape, (1, 10, 3), g.shape) + # i# x = g.dimension_coordinate("X").array + # i# self.assertTrue((x == [40, 80, 320]).all()) + # i# + # i# indices = f.indices(grid_longitude=cf.lt(90)) + # i# g = f[indices] + # i# self.assertEqual(g.shape, (1, 10, 3), g.shape) + # i# x = g.dimension_coordinate("X").array + # i# self.assertTrue((x == [0, 40, 80]).all()) + # i# + # i# indices = f.indices(grid_longitude=cf.gt(90)) + # i# g = f[indices] + # i# self.assertEqual(g.shape, (1, 10, 6), g.shape) + # i# x = g.dimension_coordinate("X").array + # i# self.assertTrue((x == [120, 160, 200, 240, 280, 320]).all()) + # i# + # i# indices = f.indices(grid_longitude=cf.le(80)) + # i# g = f[indices] + # i# self.assertEqual(g.shape, (1, 10, 3), g.shape) + # i# x = g.dimension_coordinate("X").array + # i# self.assertTrue((x == [0, 40, 80]).all()) + # i# + # i# indices = f.indices(grid_longitude=cf.ge(80)) + # i# g = f[indices] + # i# self.assertEqual(g.shape, (1, 10, 7), g.shape) + # i# x = g.dimension_coordinate("X").array + # i# self.assertTrue((x == [80, 120, 160, 200, 240, 280, 320]).all()) + # i# + # i# # 2-d + # i# lon = f.construct("longitude").array + # i# lon = numpy.transpose(lon) + # i# lon = numpy.expand_dims(lon, 0) + # i# + # i# lat = f.construct("latitude").array + # i# lat = numpy.expand_dims(lat, 0) + # i# + # i# array = numpy.ma.where( + # i# (lon >= 92) & (lon <= 134), f.array, numpy.ma.masked + # i# ) + # i# + # i# for mode in ("", "compress", "full", "envelope"): + # i# indices = f.indices(mode, longitude=cf.wi(92, 134)) + # i# g = f[indices] + # i# if mode == "full": + # i# shape = (1, 10, 9) + # i# array2 = array + # i# elif mode == "envelope": + # i# shape = (1, 10, 5) + # i# array2 = array[..., 3:8] + # i# else: + # i# shape = (1, 10, 5) + # i# array2 = array[..., 3:8] + # i# + # i# self.assertEqual(g.shape, shape, str(g.shape) + "!=" + str(shape)) + # i# self.assertTrue( + # i# cf.functions._numpy_allclose(array2, g.array), g.array + # i# ) + # i# + # i# array = numpy.ma.where( + # i# ((lon >= 72) & (lon <= 83)) | (lon >= 118), + # i# f.array, + # i# numpy.ma.masked, + # i# ) + # i# + # i# for mode in ("", "compress", "full", "envelope"): + # i# indices = f.indices(mode, longitude=cf.wi(72, 83) | cf.gt(118)) + # i# g = f[indices] + # i# if mode == "full": + # i# shape = (1, 10, 9) + # i# elif mode == "envelope": + # i# shape = (1, 10, 8) + # i# else: + # i# shape = (1, 10, 6) + # i# + # i# self.assertEqual(g.shape, shape, str(g.shape) + "!=" + str(shape)) + # i# + # i# indices = f.indices( + # i# "full", + # i# longitude=cf.wi(92, 134), + # i# latitude=cf.wi(-26, -20) | cf.ge(30), + # i# ) + # i# g = f[indices] + # i# self.assertEqual(g.shape, (1, 10, 9), g.shape) + # i# array = numpy.ma.where( + # i# ( + # i# ((lon >= 92) & (lon <= 134)) + # i# & (((lat >= -26) & (lat <= -20)) | (lat >= 30)) + # i# ), + # i# f.array, + # i# numpy.ma.masked, + # i# ) + # i# self.assertTrue(cf.functions._numpy_allclose(array, g.array), g.array) + # i# + # i# for mode in ("", "compress", "full", "envelope"): + # i# indices = f.indices(mode, grid_longitude=cf.contains(23.2)) + # i# g = f[indices] + # i# if mode == "full": + # i# shape = f.shape + # i# else: + # i# shape = (1, 10, 1) + # i# + # i# self.assertEqual(g.shape, shape, g.shape) + # i# + # i# if mode != "full": + # i# self.assertEqual( + # i# g.construct("grid_longitude").array, 40 + # i# ) # TODO + # i# + # i# for mode in ("", "compress", "full", "envelope"): + # i# indices = f.indices(mode, grid_latitude=cf.contains(3)) + # i# g = f[indices] + # i# if mode == "full": + # i# shape = f.shape + # i# else: + # i# shape = (1, 1, 9) + # i# + # i# self.assertEqual(g.shape, shape, g.shape) + # i# + # i# if mode != "full": + # i# self.assertEqual(g.construct("grid_latitude").array, 3) + # i# + # i# for mode in ("", "compress", "full", "envelope"): + # i# indices = f.indices(mode, longitude=cf.contains(83)) + # i# g = f[indices] + # i# if mode == "full": + # i# shape = f.shape + # i# else: + # i# shape = (1, 1, 1) + # i# + # i# self.assertEqual(g.shape, shape, g.shape) + # i# + # i# if mode != "full": + # i# self.assertEqual(g.construct("longitude").array, 83) + # i# + # i# # Calls that should fail + # i# with self.assertRaises(Exception): + # i# f.indices(longitude=cf.gt(23), grid_longitude=cf.wi(92, 134)) + # i# with self.assertRaises(Exception): + # i# f.indices(grid_longitude=cf.gt(23), longitude=cf.wi(92, 134)) + # i# with self.assertRaises(Exception): + # i# f.indices(grid_latitude=cf.contains(-23.2)) + def test_Field_indices(self): - f = self.f.copy() + filename = os.path.join( + os.path.dirname(os.path.abspath(__file__)), "test_file.nc" + ) + f = cf.read(self.filename)[0] array = numpy.ma.array(f.array) @@ -1320,18 +1613,44 @@ def test_Field_indices(self): f.indices(grid_longitude=cf.wi(90, 100)) indices = f.indices("full", grid_longitude=cf.wi(310, 450)) + self.assertTrue(indices[0], "mask") + self.assertTrue( + ( + indices[1][0].array + == [ + [ + [ + False, + False, + False, + True, + True, + True, + True, + True, + False, + ] + ] + ] + ).all() + ) g = f[indices] self.assertEqual(g.shape, (1, 10, 9), g.shape) + x = g.dimension_coordinate("X").array self.assertEqual(x.shape, (9,), x.shape) + self.assertTrue( (x == [0, 40, 80, 120, 160, 200, 240, 280, 320]).all(), x ) + a = array.copy() - a[..., [3, 4, 5, 6, 7]] = numpy.ma.masked + a[..., 3:8] = numpy.ma.masked + self.assertTrue(cf.functions._numpy_allclose(g.array, a), g.array) indices = f.indices("full", grid_longitude=cf.wi(70, 200)) + self.assertTrue(indices[0], "mask") g = f[indices] self.assertEqual(g.shape, (1, 10, 9), g.shape) x = g.dimension_coordinate("X").array @@ -1347,6 +1666,7 @@ def test_Field_indices(self): f.flip("X", inplace=True) indices = f.indices(grid_longitude=cf.wi(50, 130)) + self.assertTrue(indices[0], "mask") g = f[indices] self.assertEqual(g.shape, (1, 10, 2), g.shape) x = g.dimension_coordinate("X").array @@ -1452,7 +1772,7 @@ def test_Field_indices(self): (lon >= 92) & (lon <= 134), f.array, numpy.ma.masked ) - for mode in ("", "compress", "full", "envelope"): + for mode in ("compress", "full", "envelope"): indices = f.indices(mode, longitude=cf.wi(92, 134)) g = f[indices] if mode == "full": @@ -1476,12 +1796,12 @@ def test_Field_indices(self): numpy.ma.masked, ) - for mode in ("", "compress", "full", "envelope"): - indices = f.indices(mode, longitude=cf.wi(72, 83) | cf.gt(118)) + for mode in ((), ("compress",), ("full",), ("envelope",)): + indices = f.indices(*mode, longitude=cf.wi(72, 83) | cf.gt(118)) g = f[indices] - if mode == "full": + if mode == ("full",): shape = (1, 10, 9) - elif mode == "envelope": + elif mode == ("envelope",): shape = (1, 10, 8) else: shape = (1, 10, 6) @@ -1505,7 +1825,7 @@ def test_Field_indices(self): ) self.assertTrue(cf.functions._numpy_allclose(array, g.array), g.array) - for mode in ("", "compress", "full", "envelope"): + for mode in ("compress", "full", "envelope"): indices = f.indices(mode, grid_longitude=cf.contains(23.2)) g = f[indices] if mode == "full": @@ -1520,7 +1840,7 @@ def test_Field_indices(self): g.construct("grid_longitude").array, 40 ) # TODO - for mode in ("", "compress", "full", "envelope"): + for mode in ("compress", "full", "envelope"): indices = f.indices(mode, grid_latitude=cf.contains(3)) g = f[indices] if mode == "full": @@ -1533,7 +1853,7 @@ def test_Field_indices(self): if mode != "full": self.assertEqual(g.construct("grid_latitude").array, 3) - for mode in ("", "compress", "full", "envelope"): + for mode in ("compress", "full", "envelope"): indices = f.indices(mode, longitude=cf.contains(83)) g = f[indices] if mode == "full": From 98b1ebddc437a243e0e66ffbde0daeadebcec358 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Thu, 15 Apr 2021 20:30:41 +0100 Subject: [PATCH 30/53] devs --- cf/constructs.py | 66 +++++----- cf/field.py | 265 ++++++++++++++++++---------------------- cf/mixin/fielddomain.py | 195 ++++++++++++++++++----------- cf/test/test_Data.py | 2 +- cf/test/test_Regrid.py | 2 +- 5 files changed, 279 insertions(+), 251 deletions(-) diff --git a/cf/constructs.py b/cf/constructs.py index fece7ecfe3..bdf9bb5812 100644 --- a/cf/constructs.py +++ b/cf/constructs.py @@ -165,9 +165,9 @@ def _filter_by_identity(self, arg, todict, _config, identities): """ ctypes = [i for i in "XTYZ" if i in identities] - if len(ctypes) == len(identities): - # All identities are coordinate types (X, T, Y or Z) - return self._filter_by_coordinate_type(arg, todict, ctypes) + # if len(ctypes) == len(identities): + # # All identities are coordinate types (X, T, Y or Z) + # return self._filter_by_coordinate_type(arg, todict, ctypes) config = {"identities_kwargs": {"ctypes": ctypes}} if _config: @@ -175,36 +175,36 @@ def _filter_by_identity(self, arg, todict, _config, identities): return super()._filter_by_identity(arg, todict, config, identities) - def _filter_by_coordinate_type(self, arg, todict, ctypes): - """Worker function for `filter_by_identity` and `filter`. - - See `filter_by_identity` for details. - - .. versionadded:: 3.9.0 - - """ - out, pop = self._filter_preprocess( - arg, - filter_applied={"filter_by_identity": ctypes}, - todict=todict, - ) - - if not ctypes: - # Return all constructs if no coordinate types have been - # provided - return out - - for cid, construct in tuple(out.items()): - ok = False - for ctype in ctypes: - if getattr(construct, ctype, False): - ok = True - break - - if not ok: - pop(cid) - - return out + # def _filter_by_coordinate_type(self, arg, todict, ctypes): + # """Worker function for `filter_by_identity` and `filter`. + # + # See `filter_by_identity` for details. + # + # .. versionadded:: 3.9.0 + # + # """ + # out, pop = self._filter_preprocess( + # arg, + # filter_applied={"filter_by_identity": ctypes}, + # todict=todict, + # ) + # + # if not ctypes: + # # Return all constructs if no coordinate types have been + # # provided + # return out + # + # for cid, construct in tuple(out.items()): + # ok = False + # for ctype in ctypes: + # if getattr(construct, ctype, False): + # ok = True + # break + # + # if not ok: + # pop(cid) + # + # return out @classmethod def _short_iteration(cls, x): diff --git a/cf/field.py b/cf/field.py index e6047e5b47..b8b972324c 100644 --- a/cf/field.py +++ b/cf/field.py @@ -878,26 +878,36 @@ def _is_broadcastable(self, shape): def _axis_positions(self, axes, parse=True, return_axes=False): """Convert the given axes to their positions in the data. + Any domain axes that are not spanned by the data are ignored. + If there is no data then an empty list is returned. - .. versionadded:: 3.TODO.0 + + .. versionadded:: 3.9.0 + :Parameters: axes: (sequence of) `str` or `int` - The axes to be converted. - {{domain axis selection}} + The axes to be converted. TODO domain axis selection + parse: `bool`, optional + If False then do not parse the *axes*. Parsing should always occur unless the given *axes* are the output of a previous call to `parse_axes`. By default *axes* is parsed by `_parse_axes`. + return_axes: `bool`, optional + If True then also return the domain axis identifiers corresponding to the positions. + :Returns: + `list` [, `list`] The domain axis identifiers. If *return_axes* is True then also return the corresponding domain axis identifiers. + """ data_axes = self.get_data_axes(default=None) if data_axes is None: @@ -4083,7 +4093,7 @@ def _regrid_update_coordinates( for aux_key, aux in dst.auxiliary_coordinates( filter_by_axis=dst_axis_keys, - axis_mode="exact", + axis_mode="subset", todict=True, ).items(): aux_axes = dst.get_data_axes(aux_key) @@ -11129,7 +11139,7 @@ def _group_weights(weights, iaxis, index): "required for an 'over days' collapse" ) - cell_methods = self.cell_methods().ordered() + cell_methods = self.cell_methods(todict=True) w = [ cm.get_qualifier("within", None) for cm in cell_methods.values() @@ -11248,7 +11258,7 @@ def _group_weights(weights, iaxis, index): "required for an 'over years' collapse" ) - cell_methods = self.cell_methods().ordered() + cell_methods = self.cell_methods(todict=True) w = [ cm.get_qualifier("within", None) for cm in cell_methods.values() @@ -11745,7 +11755,7 @@ def _update_cell_methods( `None` """ - original_cell_methods = self.cell_methods().ordered() + original_cell_methods = self.cell_methods(todict=True) # .ordered() logger.info(" Update cell methods:") # pragma: no cover logger.info( " Original cell methods = {}".format(original_cell_methods) @@ -11825,7 +11835,7 @@ def _update_cell_methods( self.set_construct(cell_method) logger.info( - f" Modified cell methods = {self.cell_methods().ordered()}" + f" Modified cell methods = {self.cell_methods()}" ) # pragma: no cover @_inplace_enabled(default=False) @@ -12832,94 +12842,94 @@ def set_data( return f - def domain_axis( - self, - *identity, - default=ValueError(), - key=False, - item=False, - **filter_kwargs, - ): - """Select a domain axis construct. - - {{unique construct}} - - .. versionadded:: 1.8.9.0 - - .. seealso:: `construct`, `domain_axes` - - :Parameters: - - identity: optional - Select domain axis constructs that have an identity, - defined by their `!identities` methods, that matches - any of the given values. - - Additionally, the values are matched against construct - identifiers, with or without the ``'key%'`` prefix. - - Additionally, if for a given value - ``f.coordinates(value, filter_by_naxes=(1,))`` returns - 1-d coordinate constructs that all span the same - domain axis construct then that domain axis construct - is selected. See `coordinates` for details. - - Additionally, if there is a `Field` data array and a - value matches the integer position of an array - dimension, then the corresponding domain axis - construct is selected. - - If no values are provided then all domain axis - constructs are selected. - - {{value match}} - - {{displayed identity}} - - {{key: `bool`, optional}} - - {{item: `bool`, optional}} - - default: optional - Return the value of the *default* parameter if there - is no unique construct. - - {{default Exception}} - - {{filter_kwargs: optional}} - - :Returns: - - {{Returns construct}} - - - **Examples:** - - """ - filter_kwargs["todict"] = True - - c = self.domain_axes(*identity, **filter_kwargs) - - # Return construct, or key, or both, or default - n = len(c) - if n == 1: - k, construct = c.popitem() - if key: - return k - - if item: - return k, construct - - return construct - - if default is None: - return default - - return self._default( - default, - f"{self.__class__.__name__}.domain_axis() can't return {n} " - "constructs", - ) + # def domain_axis( + # self, + # *identity, + # default=ValueError(), + # key=False, + # item=False, + # **filter_kwargs, + # ): + # """Select a domain axis construct. + # + # {{unique construct}} + # + # .. versionadded:: 1.8.9.0 + # + # .. seealso:: `construct`, `domain_axes` + # + # :Parameters: + # + # identity: optional + # Select domain axis constructs that have an identity, + # defined by their `!identities` methods, that matches + # any of the given values. + # + # Additionally, the values are matched against construct + # identifiers, with or without the ``'key%'`` prefix. + # + # Additionally, if for a given value + # ``f.coordinates(value, filter_by_naxes=(1,))`` returns + # 1-d coordinate constructs that all span the same + # domain axis construct then that domain axis construct + # is selected. See `coordinates` for details. + # + # Additionally, if there is a `Field` data array and a + # value matches the integer position of an array + # dimension, then the corresponding domain axis + # construct is selected. + # + # If no values are provided then all domain axis + # constructs are selected. + # + # {{value match}} + # + # {{displayed identity}} + # + # {{key: `bool`, optional}} + # + # {{item: `bool`, optional}} + # + # default: optional + # Return the value of the *default* parameter if there + # is no unique construct. + # + # {{default Exception}} + # + # {{filter_kwargs: optional}} + # + # :Returns: + # + # {{Returns construct}} + # + # + # **Examples:** + # + # """ + # filter_kwargs["todict"] = True + # + # c = self.domain_axes(*identity, **filter_kwargs) + # + # # Return construct, or key, or both, or default + # n = len(c) + # if n == 1: + # k, construct = c.popitem() + # if key: + # return k + # + # if item: + # return k, construct + # + # return construct + # + # if default is None: + # return default + # + # return self._default( + # default, + # f"{self.__class__.__name__}.domain_axis() can't return {n} " + # "constructs", + # ) def domain_mask(self, **kwargs): """Return a boolean field that is True where criteria are met. @@ -13285,7 +13295,7 @@ def match_by_construct(self, *identities, OR=False, **conditions): n = 0 - self_cell_methods = self.cell_methods() # TODO + self_cell_methods = self.cell_methods(todict=True) # TODO for identity in identities: cms = False @@ -13311,8 +13321,10 @@ def match_by_construct(self, *identities, OR=False, **conditions): if set(filtered.construct_types().values()) == { "cell_method" }: - key = tuple(self_cell_methods.ordered())[-1] - filtered = self_cell_methods(key)(identity) # TODO + key = tuple(self_cell_methods)[-1] + filtered = self.cell_method( + identity, filter_by_key=(key,) + ) if not filtered: if not OR: return False @@ -13323,9 +13335,7 @@ def match_by_construct(self, *identities, OR=False, **conditions): elif not OR: return False else: - cell_methods = tuple(self_cell_methods.ordered().values())[ - -len(cms) : - ] + cell_methods = tuple(self_cell_methods.values())[-len(cms) :] for cm0, cm1 in zip(cms, cell_methods): if cm0.has_axes() and set(cm0.get_axes()) != set( cm1.get_axes(()) @@ -15031,46 +15041,15 @@ def cell_method( **Examples:** """ - c = self._filter_interface( - ("cell_method",), + return self._construct( "cell_method", + "cell_methods", identity, - construct=True, key=key, item=item, - default=None, + default=default, **filter_kwargs, ) - if c is not None: - return c - - domain_axes = self.domain_axes(*identity, todict=True) - if domain_axes: - cell_methods = self.cell_methods(todict=True) - cm_keys = [ - k - for k, cm in cell_methods.items() - for da_key in domain_axes - if cm.get_axes(None) == (da_key,) - ] - if len(cm_keys) == 1: - k = cm_keys[0] - if key: - return k - - if item: - return k, cell_methods[k] - - return cell_methods[k] - - if default is None: - return default - - return self._default( - default, - f"{self.__class__.__name__}.cell_method() can only " - "return a unique construct", - ) def field_ancillary( self, @@ -15124,14 +15103,13 @@ def field_ancillary( **Examples:** """ - return self._filter_interface( - ("field_ancillary",), + return self._construct( "field_ancillary", + "field_ancillaries", identity, - construct=True, key=key, - default=default, item=item, + default=default, **filter_kwargs, ) @@ -17199,7 +17177,8 @@ def regrids( i=False, _compute_field_mass=None, ): - """Return the field regridded onto a new latitude-longitude grid. + """Return the field regridded onto a new latitude-longitude + grid. Regridding, also called remapping or interpolation, is the process of changing the grid underneath field data values @@ -17936,8 +17915,8 @@ def regridc( i=False, _compute_field_mass=None, ): - """Return the field with the specified Cartesian axes regridded onto a - new grid. + """Return the field with the specified Cartesian axes regridded + onto a new grid. Between 1 and 3 dimensions may be regridded. diff --git a/cf/mixin/fielddomain.py b/cf/mixin/fielddomain.py index dfcb68a86c..1a112d31c7 100644 --- a/cf/mixin/fielddomain.py +++ b/cf/mixin/fielddomain.py @@ -16,10 +16,7 @@ from ..functions import ( parse_indices, bounds_combination_mode, - _DEPRECATION_ERROR, _DEPRECATION_ERROR_KWARGS, - _DEPRECATION_ERROR_DICT, - _DEPRECATION_ERROR_SEQUENCE, ) from ..decorators import ( @@ -36,12 +33,88 @@ class FieldDomain: - """Mixin class for methods common to both field and domain constructs + """Mixin class for methods common to both field and domain + constructs. - .. versionadded:: 3.TODO.0 + .. versionadded:: 3.9.0 """ + def _construct( + self, + _method, + _constructs_method, + identities, + key=False, + item=False, + default=ValueError(), + **filter_kwargs, + ): + """An interface to `Constructs.filter`. + + {{unique construct}} + + .. versionadded:: 3.9.0 + + :Parameters: + + _method: `str` + The name of the calling method. + + _constructs_method: `str` + The name of the corresponding method that can return + any number of constructs. + + identities: sequence + As for the *identities* parmaeter of the calling + method. + + {{key: `bool`, optional}} + + {{item: `bool`, optional}} + + default: optional + Return the value of the *default* parameter if there + is no unique construct. + + {{default Exception}} + + {{filter_kwargs: optional}} + + :Returns: + + {{Returns construct}} + + """ + cached = filter_kwargs.get("cached") + if cached is not None: + return cached + + filter_kwargs["todict"] = True + + c = getattr(self, _constructs_method)(*identities, **filter_kwargs) + + # Return construct, or key, or both, or default + n = len(c) + if n == 1: + k, construct = c.popitem() + if key: + return k + + if item: + return k, construct + + return construct + + if default is None: + return default + + return self._default( + default, + f"{self.__class__.__name__}.{_method}() can't return {n} " + "constructs", + ) + def _indices(self, mode, data_axes, auxiliary_mask, **kwargs): """Create indices that define a subspace of the field or domain construct. @@ -50,7 +123,7 @@ def _indices(self, mode, data_axes, auxiliary_mask, **kwargs): See the `indices` method for more details. - .. versionadded:: 3.TODO.0 + .. versionadded:: 3.9.0 :Parameters: @@ -535,7 +608,7 @@ def _roll_constructs(self, axis, shift): If a roll axis is spanned by a dimension coordinate construct then it must be a periodic dimension coordinate construct. - .. versionadded:: 3.TODO.0 + .. versionadded:: 3.9.0 :Parameters: @@ -576,8 +649,7 @@ def _roll_constructs(self, axis, shift): ) for a in axis: - # dim = dims.filter_by_axis("exact", a).value(None) - dim = self.dimension_coordinate(filter_by_axis=(a,), todict=True) + dim = self.dimension_coordinate(filter_by_axis=(a,), default=None) if dim is not None and dim.period() is None: raise ValueError( f"Can't roll {self.__class__.__name__}. " @@ -625,7 +697,7 @@ def anchor( A unique axis is selected with the *axes* and *kwargs* parameters. - .. versionadded:: 3.TODO.0 + .. versionadded:: 3.9.0 .. seealso:: `axis`, `cyclic`, `iscyclic`, `roll` @@ -634,7 +706,7 @@ def anchor( axis: The cyclic axis to be anchored. - {{domain axis selection}} + domain axis selection TODO. value: Anchor the dimension coordinate values for the @@ -887,7 +959,7 @@ def del_construct(self, identity=None, default=ValueError()): referenced by coordinate reference construct. In this case the reference is replace with `None`. - .. versionadded:: 3.TODO.0 + .. versionadded:: 3.9.0 .. seealso:: `constructs`, `get_construct`, `has_construct`, `set_construct`, `del_domain_axis`, @@ -1297,14 +1369,11 @@ def auxiliary_coordinate( **Examples:** - TODO - """ - return self._filter_interface( - ("auxiliary_coordinate",), + return self._construct( "auxiliary_coordinate", + "auxiliary_coordinates", identity, - construct=True, key=key, item=item, default=default, @@ -1360,11 +1429,10 @@ def construct( **Examples:** """ - return self._filter_interface( - (), + return self._construct( "construct", + "constructs", identity, - construct=True, key=key, item=item, default=default, @@ -1423,14 +1491,13 @@ def cell_measure( **Examples:** """ - return self._filter_interface( - ("cell_measure",), - "cell_meausure", + return self._construct( + "cell_measure", + "cell_measures", identity, - construct=True, key=key, - default=default, item=item, + default=default, **filter_kwargs, ) @@ -1486,11 +1553,10 @@ def coordinate( **Examples:** """ - return self._filter_interface( - ("dimension_coordinate", "auxiliary_coordinate"), + return self._construct( "coordinate", + "coordinates", identity, - construct=True, key=key, item=item, default=default, @@ -1550,14 +1616,13 @@ def coordinate_reference( **Examples:** """ - return self._filter_interface( - ("coordinate_reference",), + return self._construct( "coordinate_reference", + "coordinate_references", identity, - construct=True, key=key, - default=default, item=item, + default=default, **filter_kwargs, ) @@ -1687,11 +1752,10 @@ def dimension_coordinate( **Examples:** """ - return self._filter_interface( - ("dimension_coordinate",), + return self._construct( "dimension_coordinate", + "dimension_coordinates", identity, - construct=True, key=key, item=item, default=default, @@ -1699,7 +1763,7 @@ def dimension_coordinate( ) @_deprecated_kwarg_check("axes") - def direction(self, identity=None, axes=None, **kwargs): + def direction(self, identity, axes=None, **kwargs): """Whether or not a domain axis is increasing. An domain axis is considered to be increasing if its dimension @@ -1758,19 +1822,20 @@ def direction(self, identity=None, axes=None, **kwargs): self, "direction", kwargs ) # pragma: no cover - axis = self.domain_axis(identity, key=True, default=None) - if axis is None: - return True + # axis = self.domain_axis(identity, key=True, default=None) + # if axis is None: + # return True for coord in self.dimension_coordinates( - filter_by_axis=(axis,), todict=True + filter_by_axis=(identity,), todict=True ).values(): return coord.direction() return True def directions(self): - """Return a dictionary mapping all domain axes to their directions. + """Return a dictionary mapping all domain axes to their + directions. .. seealso:: `direction` @@ -1848,14 +1913,13 @@ def domain_ancillary( **Examples:** """ - return self._filter_interface( - ("domain_ancillary",), + return self._construct( "domain_ancillary", + "domain_ancillaries", identity, - construct=True, key=key, - default=default, item=item, + default=default, **filter_kwargs, ) @@ -1885,7 +1949,7 @@ def domain_axis( Additionally, the values are matched against construct identifiers, with or without the ``'key%'`` prefix. - Additionally, if for a given value + Additionally, if for a given `value``, ``f.coordinates(value, filter_by_naxes=(1,))`` returns 1-d coordinate constructs that all span the same domain axis construct then that domain axis construct @@ -1923,35 +1987,20 @@ def domain_axis( **Examples:** """ - filter_kwargs["todict"] = True - - c = self.domain_axes(*identity, **filter_kwargs) - - # Return construct, or key, or both, or default - n = len(c) - if n == 1: - k, construct = c.popitem() - if key: - return k - - if item: - return k, construct - - return construct - - if default is None: - return default - - return self._default( - default, - f"{self.__class__.__name__}.domain_axis() can't return {n} " - "constructs", + return self._construct( + "domain_axis", + "domain_axes", + identity, + key=key, + item=item, + default=default, + **filter_kwargs, ) def get_coordinate_reference( self, identity=None, key=False, construct=None, default=ValueError() ): - """TODO + """TODO. .. versionadded:: 3.0.2 @@ -2255,7 +2304,7 @@ def match_by_rank(self, *ranks): def _parse_axes(self, axes): """Convert the given axes to their domain axis identifiers. - .. versionadded:: 3.TODO:0 + .. versionadded:: 3.9.0 :Parameters: @@ -2608,9 +2657,9 @@ def refs(self, *identities, **filter_kwargs): def _create_auxiliary_mask_component(mask_shape, ind, compress): - """Create an auxiliary mask component + """Create an auxiliary mask component. - .. versionadded:: 3.TODO.0 + .. versionadded:: 3.9.0 :Parameters: diff --git a/cf/test/test_Data.py b/cf/test/test_Data.py index 88a910d098..afa4876b5e 100644 --- a/cf/test/test_Data.py +++ b/cf/test/test_Data.py @@ -91,7 +91,7 @@ class DataTest(unittest.TestCase): mones = mones test_only = [] - # test_only = ['NOTHING!!!!!'] + test_only = ["NOTHING!!!!!"] # test_only = [ # 'test_Data_percentile', # 'test_Data_trigonometric_hyperbolic' diff --git a/cf/test/test_Regrid.py b/cf/test/test_Regrid.py index 68d468ddf6..1989524b50 100644 --- a/cf/test/test_Regrid.py +++ b/cf/test/test_Regrid.py @@ -96,7 +96,7 @@ def test_Field_regrids(self): r = f1.regrids(f5, method="linear") self.assertTrue( - f4.equals(r, verbose=2), + f4.equals(r, verbose=3), "destination=regional Field, CHUNKSIZE={}".format( chunksize ), From 8ca358848df6628f969e664bd9951fa2f27caefe Mon Sep 17 00:00:00 2001 From: David Hassell Date: Fri, 16 Apr 2021 09:15:44 +0100 Subject: [PATCH 31/53] new Constructs tests --- cf/test/test_Constructs.py | 48 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) create mode 100644 cf/test/test_Constructs.py diff --git a/cf/test/test_Constructs.py b/cf/test/test_Constructs.py new file mode 100644 index 0000000000..5638f1bd6a --- /dev/null +++ b/cf/test/test_Constructs.py @@ -0,0 +1,48 @@ +import datetime +import unittest + +import faulthandler + +faulthandler.enable() # to debug seg faults and timeouts + +import cf + + +class ConstructsTest(unittest.TestCase): + """TODO DOCS.""" + + f = cf.example_field(1) + + def setUp(self): + """TODO DOCS.""" + # Disable log messages to silence expected warnings + cf.LOG_LEVEL("DISABLE") + # Note: to enable all messages for given methods, lines or + # calls (those without a 'verbose' option to do the same) + # e.g. to debug them, wrap them (for methods, start-to-end + # internally) as follows: + # + # cf.LOG_LEVEL('DEBUG') + # < ... test code ... > + # cf.log_level('DISABLE') + + def test_Constructs__repr__(self): + """TODO DOCS.""" + f = self.f + + repr(f.constructs) + + def test_Constructs_filter_by_naxes(self): + """TODO DOCS.""" + c = self.f.constructs + + self.assertEqual(len(c.filter_by_naxes()), 12) + self.assertEqual(len(c.filter_by_naxes(1)), 7) + self.assertEqual(len(c.filter_by_naxes(cf.ge(2))), 5) + self.assertEqual(len(c.filter_by_naxes(1, cf.ge(2))), 12) + +if __name__ == "__main__": + print("Run date:", datetime.datetime.now()) + cf.environment() + print("") + unittest.main(verbosity=2) From 1c47828f1ccc6102acf4df513406a769329c1221 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Fri, 16 Apr 2021 15:22:51 +0100 Subject: [PATCH 32/53] devs --- cf/field.py | 227 +++------------------------- cf/mixin/coordinate.py | 11 +- cf/mixin/fielddomain.py | 248 ++++++++++++++++++++++++++++--- cf/mixin/propertiesdatabounds.py | 14 +- cf/read_write/um/umread.py | 122 +++++++++++---- cf/test/test_Constructs.py | 3 +- 6 files changed, 353 insertions(+), 272 deletions(-) diff --git a/cf/field.py b/cf/field.py index b8b972324c..001554721b 100644 --- a/cf/field.py +++ b/cf/field.py @@ -6311,7 +6311,8 @@ def concatenate(cls, fields, axis=0, _preserve=True): return out - def cyclic(self, identity=None, iscyclic=True, period=None, **kwargs): + def cyclic(self, identity=None, iscyclic=True, period=None, + config={}, **kwargs): """Set the cyclicity of an axis. .. versionadded:: 1.0 @@ -6384,12 +6385,18 @@ def cyclic(self, identity=None, iscyclic=True, period=None, **kwargs): if identity is None: return old - axis = self.domain_axis(identity, key=True) + if "axis" in config: + axis = config.get('axis') + if axis is None: + raise ValueError(f"Can't find config-supplied axis {axis!r}") + else: + axis = self.domain_axis(identity, key=True) + data = self.get_data(None, _fill_value=False) if data is not None: try: - data_axes = self.get_data_axes() + data_axes = self.get_data_axes() data.cyclic(data_axes.index(axis), iscyclic) except ValueError: pass @@ -6398,9 +6405,13 @@ def cyclic(self, identity=None, iscyclic=True, period=None, **kwargs): if iscyclic: self._cyclic = old.union((axis,)) - dim = self.dimension_coordinate( - filter_by_axis=(axis,), default=None - ) + if "dim" in config: + dim = config["dim"] + else: + dim = self.dimension_coordinate( + filter_by_axis=(axis,), default=None + ) + if dim is not None: if period is not None: dim.period(period) @@ -14575,66 +14586,6 @@ def argmax(self, axis=None): # # return out - # @_manage_log_level_via_verbosity - # def autocyclic(self, key=None, coord=None, verbose=None): - # """Set dimensions to be cyclic. - # - # A dimension is set to be cyclic if it has a unique longitude (or - # grid longitude) dimension coordinate construct with bounds and the - # first and last bounds values differ by 360 degrees (or an - # equivalent amount in other units). - # - # .. versionadded:: 1.0 - # - # .. seealso:: `cyclic`, `iscyclic`, `period` - # - # :Parameters: - # - # {{verbose: `int` or `str` or `None`, optional}} - # - # :Returns: - # - # `bool` - # - # """ - # if coord is None: - # key, coord = self.dimension_coordinate( - # "X", item=True, default=(None, None) - # ) - # if coord is None: - # return False - # elif not coord.X: - # return False - # - # bounds = coord.get_bounds(None) - # if bounds is None: - # self.cyclic(key, iscyclic=False) - # return False - # - # data = bounds.get_data(None, _fill_value=False) - # if data is None: - # self.cyclic(key, iscyclic=False) - # return False - # - # units = bounds.Units - # if units.islongitude: - # period = Data(360.0, units="degrees_east") - # elif units == _units_degrees: - # period = Data(360.0, units="degrees") - # else: - # self.cyclic(key, iscyclic=False) - # return False - # - # period.Units = data.Units - # - # if abs(data.last_element() - data.first_element()) != period.array: - # self.cyclic(key, iscyclic=False) - # return False - # - # self.cyclic(key, iscyclic=True, period=period) - # - # return True - @_deprecated_kwarg_check("i") def squeeze(self, axes=None, inplace=False, i=False, **kwargs): """Remove size 1 axes from the data. @@ -15352,150 +15303,6 @@ def axis_size(self, *identity, default=ValueError(), axes=None, **kwargs): return domain_axes[key].get_size(default=default) - def set_construct( - self, construct, key=None, axes=None, set_axes=True, copy=True - ): - """Set a metadata construct. - - When inserting a construct with data, the domain axes constructs - spanned by the data are either inferred, or specified with the - *axes* parameter. - - For a dimension coordinate construct, an existing dimension - coordinate construct is discarded if it spans the same domain axis - construct (since only one dimension coordinate construct can be - associated with a given domain axis construct). - - .. versionadded:: 3.0.0 - - .. seealso:: `constructs`, `creation_commands`, `del_construct`, - `get_construct`, `set_coordinate_reference`, - `set_data_axes` - - :Parameters: - - construct: - The metadata construct to be inserted. - - key: `str`, optional - The construct identifier to be used for the construct. If - not set then a new, unique identifier is created - automatically. If the identifier already exists then the - existing construct will be replaced. - - *Parameter example:* - ``key='cellmeasure0'`` - - axes: (sequence of) `str` or `int`, optional - Set the domain axes constructs that are spanned by the - construct's data. If unset, and the *set_axes* parameter - is True, then an attempt will be made to assign existing - domain axis constructs to the data. - - The contents of the *axes* parameter is mapped to domain - axis constructs by translating each element into a domain - axis construct key via the `domain_axis` method. - - *Parameter example:* - ``axes='domainaxis1'`` - - *Parameter example:* - ``axes='X'`` - - *Parameter example:* - ``axes=['latitude']`` - - *Parameter example:* - ``axes=['X', 'longitude']`` - - *Parameter example:* - ``axes=[1, 0]`` - - set_axes: `bool`, optional - If False then do not set the domain axes constructs that - are spanned by the data, even if the *axes* parameter has - been set. By default the axes are set either according to - the *axes* parameter, or an attempt will be made to assign - existing domain axis constructs to the data. - - copy: `bool`, optional - If True then set a copy of the construct. By default the - construct is copied. - - :Returns: - - `str` - The construct identifier for the construct. - - **Examples:** - - >>> key = f.set_construct(c) - >>> key = f.set_construct(c, copy=False) - >>> key = f.set_construct(c, axes='domainaxis2') - >>> key = f.set_construct(c, key='cellmeasure0') - - """ - construct_type = construct.construct_type - - if not set_axes: - axes = None - - if construct_type in ( - "dimension_coordinate", - "auxiliary_coordinate", - "cell_measure", - ): - if construct.isscalar: - # Turn a scalar object into 1-d - if copy: - construct = construct.insert_dimension(0) - copy = False - else: - construct.insert_dimension(0, inplace=True) - - if set_axes: - axes = self._set_construct_parse_axes( - construct, axes, allow_scalar=False - ) - - elif construct_type in ("domain_ancillary", "field_ancillary"): - if set_axes: - axes = self._set_construct_parse_axes( - construct, axes, allow_scalar=True - ) - - if construct_type == "dimension_coordinate": - data_axes = self.constructs.data_axes() - for dim in self.dimension_coordinates(todict=True): - if dim == key: - continue - - if data_axes.get(dim) == tuple(axes): - self.del_construct(dim, default=None) - - out = super().set_construct(construct, key=key, axes=axes, copy=copy) - - if construct_type == "dimension_coordinate": - construct.autoperiod(inplace=True) - self._conform_coordinate_references(out) - self.autocyclic(key=out, coord=construct) - self._conform_cell_methods() - - elif construct_type == "auxiliary_coordinate": - construct.autoperiod(inplace=True) - self._conform_coordinate_references(out) - self._conform_cell_methods() - - elif construct_type == "cell_method": - self._conform_cell_methods() - - elif construct_type == "coordinate_reference": - for ckey in self.coordinates(todict=True): - self._conform_coordinate_references(ckey, coordref=construct) - - # Return the construct key - return out - def get_data_axes(self, identity=None, default=ValueError()): """Return the keys of the domain axis constructs spanned by the data of a metadata construct. diff --git a/cf/mixin/coordinate.py b/cf/mixin/coordinate.py index f4b1e61e0c..3b35ec6660 100644 --- a/cf/mixin/coordinate.py +++ b/cf/mixin/coordinate.py @@ -375,7 +375,7 @@ def positive(self): # Methods # ---------------------------------------------------------------- @_inplace_enabled(default=False) - def autoperiod(self, inplace=False): + def autoperiod(self, inplace=False, config={}): """TODO Set dimensions to be cyclic. TODO A dimension is set to be cyclic if it has a unique @@ -391,6 +391,12 @@ def autoperiod(self, inplace=False): TODO + config: `dict` + Additional parameters for optimizing the + operation. See the code for details. + + .. versionadded:: 3.9.0 + :Returns: TODO @@ -402,6 +408,9 @@ def autoperiod(self, inplace=False): """ c = _inplace_enabled_define_and_cleanup(self) + if "cyclic" in config and not config["cyclic"]: + return c + if c.period() is not None: return c diff --git a/cf/mixin/fielddomain.py b/cf/mixin/fielddomain.py index 1a112d31c7..6a87c9a137 100644 --- a/cf/mixin/fielddomain.py +++ b/cf/mixin/fielddomain.py @@ -885,7 +885,7 @@ def anchor( return f @_manage_log_level_via_verbosity - def autocyclic(self, key=None, coord=None, verbose=None): + def autocyclic(self, key=None, coord=None, verbose=None, config={}): """Set dimensions to be cyclic. A dimension is set to be cyclic if it has a unique longitude @@ -901,50 +901,84 @@ def autocyclic(self, key=None, coord=None, verbose=None): {{verbose: `int` or `str` or `None`, optional}} - :Returns: + config: `dict` + Additional parameters for optimizing the + operation. See the code for details. - `bool` + .. versionadded:: 3.9.0 - **Examples:** + :Returns: - >>> f.autocyclic() + `bool` """ + if "cyclic" in config: + cyclic = config["cyclic"] + if not cyclic: + return False + else: + cyclic = None + if coord is None: key, coord = self.dimension_coordinate( "X", item=True, default=(None, None) ) if coord is None: return False + elif "X" in config: + if not config["X"]: + return False elif not coord.X: return False - bounds = coord.get_bounds(None) - if bounds is None: - self.cyclic(key, iscyclic=False) - return False - - data = bounds.get_data(None, _fill_value=False) - if data is None: - self.cyclic(key, iscyclic=False) - return False + if cyclic: + self.cyclic(key, iscyclic=True, period=config["period"]) + return True - units = bounds.Units - if units.islongitude: - period = Data(360.0, units="degrees_east") - elif units == _units_degrees: - period = Data(360.0, units="degrees") + bounds_range = config.get("bounds_range") + if bounds_range is not None: + bounds_units = config["bounds_units"] else: - self.cyclic(key, iscyclic=False) - return False + bounds = coord.get_bounds(None) + if bounds is None: + self.cyclic(key, iscyclic=False) + return False + + data = bounds.get_data(None, _fill_value=False) + if data is None: + self.cyclic(key, iscyclic=False) + return False + + bounds_units = bounds.Units + + period = coord.period() + has_period = period is not None + if not has_period: + if bounds_units.islongitude: + period = Data(360.0, units="degrees_east") + elif bounds_units == _units_degrees: + period = Data(360.0, units="degrees") + else: + self.cyclic(key, iscyclic=False) + return False + + period.Units = bounds_units - period.Units = data.Units + if bounds_range is None: + bounds_range = abs(data.last_element() - data.first_element()) - if abs(data.last_element() - data.first_element()) != period.array: + if bounds_range != period: self.cyclic(key, iscyclic=False) return False - self.cyclic(key, iscyclic=True, period=period) + if has_period: + period = None + + axis = self.get_data_axes(key, default=(None,))[0] + + self.cyclic(key, iscyclic=True, + period=period, + config={"axis": axis, "dim": coord}) return True @@ -2438,6 +2472,172 @@ def replace_construct( return c + def set_construct( + self, + construct, + key=None, + axes=None, + set_axes=True, + copy=True, + autocyclic={}, + ): + """Set a metadata construct. + + When inserting a construct with data, the domain axes constructs + spanned by the data are either inferred, or specified with the + *axes* parameter. + + For a dimension coordinate construct, an existing dimension + coordinate construct is discarded if it spans the same domain axis + construct (since only one dimension coordinate construct can be + associated with a given domain axis construct). + + .. versionadded:: 3.0.0 + + .. seealso:: `constructs`, `creation_commands`, `del_construct`, + `get_construct`, `set_coordinate_reference`, + `set_data_axes` + + :Parameters: + + construct: + The metadata construct to be inserted. + + key: `str`, optional + The construct identifier to be used for the construct. If + not set then a new, unique identifier is created + automatically. If the identifier already exists then the + existing construct will be replaced. + + *Parameter example:* + ``key='cellmeasure0'`` + + axes: (sequence of) `str` or `int`, optional + Set the domain axes constructs that are spanned by the + construct's data. If unset, and the *set_axes* parameter + is True, then an attempt will be made to assign existing + domain axis constructs to the data. + + The contents of the *axes* parameter is mapped to domain + axis constructs by translating each element into a domain + axis construct key via the `domain_axis` method. + + *Parameter example:* + ``axes='domainaxis1'`` + + *Parameter example:* + ``axes='X'`` + + *Parameter example:* + ``axes=['latitude']`` + + *Parameter example:* + ``axes=['X', 'longitude']`` + + *Parameter example:* + ``axes=[1, 0]`` + + set_axes: `bool`, optional + If False then do not set the domain axes constructs that + are spanned by the data, even if the *axes* parameter has + been set. By default the axes are set either according to + the *axes* parameter, or an attempt will be made to assign + existing domain axis constructs to the data. + + copy: `bool`, optional + If True then set a copy of the construct. By default the + construct is copied. + + autocyclic: `dict`, optional + Additional parameters for optimizing the operation, + raelating to coordinate periodicity and cyclicity. See + the code for details. + + .. versionadded:: 3.9.0 + + :Returns: + + `str` + The construct identifier for the construct. + + **Examples:** + + >>> key = f.set_construct(c) + >>> key = f.set_construct(c, copy=False) + >>> key = f.set_construct(c, axes='domainaxis2') + >>> key = f.set_construct(c, key='cellmeasure0') + + """ + construct_type = construct.construct_type + + if not set_axes: + axes = None + + if construct_type in ( + "dimension_coordinate", + "auxiliary_coordinate", + "cell_measure", + ): + if construct.isscalar: + # Turn a scalar object into 1-d + if copy: + construct = construct.insert_dimension(0) + copy = False + else: + construct.insert_dimension(0, inplace=True) + + if set_axes: + axes = self._set_construct_parse_axes( + construct, axes, allow_scalar=False + ) + + if construct_type == "dimension_coordinate": + data_axes = self.constructs.data_axes() + for dim in self.dimension_coordinates(todict=True): + if dim == key: + continue + + if data_axes.get(dim) == tuple(axes): + self.del_construct(dim, default=None) + + elif construct_type in ("domain_ancillary", "field_ancillary"): + if set_axes: + axes = self._set_construct_parse_axes( + construct, axes, allow_scalar=True + ) + + out = super().set_construct(construct, key=key, axes=axes, copy=copy) + + if construct_type == "dimension_coordinate": + construct.autoperiod( + inplace=True, + config={"cyclic": autocyclic.get("cyclic", True)}) + self._conform_coordinate_references(out) + self.autocyclic(key=out, coord=construct, config=autocyclic) + try: + self._conform_cell_methods() + except AttributeError: + pass + + elif construct_type == "auxiliary_coordinate": + construct.autoperiod(inplace=True, + config={"cyclic": autocyclic.get("cyclic", True)}) + self._conform_coordinate_references(out) + try: + self._conform_cell_methods() + except AttributeError: + pass + + elif construct_type == "cell_method": + self._conform_cell_methods() + + elif construct_type == "coordinate_reference": + for ckey in self.coordinates(todict=True): + self._conform_coordinate_references(ckey, coordref=construct) + + # Return the construct key + return out + def set_coordinate_reference( self, coordinate_reference, key=None, parent=None, strict=True ): diff --git a/cf/mixin/propertiesdatabounds.py b/cf/mixin/propertiesdatabounds.py index 97080416b6..2dbb6fa68e 100644 --- a/cf/mixin/propertiesdatabounds.py +++ b/cf/mixin/propertiesdatabounds.py @@ -3680,16 +3680,14 @@ def period(self, *value): """ old = super().period(*value) - old2 = None - + if old is not None: + return old + bounds = self.get_bounds(None) - if bounds is not None: - old2 = bounds.period(*value) - - if old is None and old2 is not None: - return old2 + if bounds is None: + return - return old + return bounds.period(*value) @_deprecated_kwarg_check("i") @_inplace_enabled(default=False) diff --git a/cf/read_write/um/umread.py b/cf/read_write/um/umread.py index 5ce50aeb1c..cb8f11170c 100644 --- a/cf/read_write/um/umread.py +++ b/cf/read_write/um/umread.py @@ -1190,7 +1190,11 @@ def atmosphere_hybrid_height_coordinate(self, axiscode): dc = self.coord_axis(dc, axiscode) dc = self.coord_positive(dc, axiscode, _axis["z"]) self.implementation.set_dimension_coordinate( - field, dc, axes=[_axis["z"]], copy=False + field, + dc, + axes=[_axis["z"]], + copy=False, + autocyclic={"cyclic": False}, ) # "b" domain ancillary @@ -1271,9 +1275,11 @@ def depth_coordinate(self, axiscode): ac.long_name = "atmosphere_hybrid_height_coordinate_ak" # field.insert_aux(ac, axes=[zdim], copy=False) self.implementation.set_auxiliary_coordinate( - field, ac, axes=[_axis["z"]], copy=False + field, ac, + axes=[_axis["z"]], copy=False, + autocyclic={"cyclic": False} ) - + array = numpy_array( [rec.real_hdr[bhlev] for rec in self.z_recs], dtype=float ) @@ -1291,7 +1297,8 @@ def depth_coordinate(self, axiscode): ac.id = "UM_atmosphere_hybrid_height_coordinate_bk" ac.long_name = "atmosphere_hybrid_height_coordinate_bk" self.implementation.set_auxiliary_coordinate( - field, ac, axes=[_axis["z"]], copy=False + field, ac, axes=[_axis["z"]], copy=False, + autocyclic={"cyclic": False} ) return dc @@ -1370,7 +1377,11 @@ def atmosphere_hybrid_sigma_pressure_coordinate(self, axiscode): dc = self.coord_names(dc, axiscode) self.implementation.set_dimension_coordinate( - field, dc, axes=[_axis["z"]], copy=False + field, + dc, + axes=[_axis["z"]], + copy=False, + autocyclic={"cyclic": False}, ) ac = self.implementation.initialise_AuxiliaryCoordinate() @@ -1379,14 +1390,16 @@ def atmosphere_hybrid_sigma_pressure_coordinate(self, axiscode): ac.long_name = "atmosphere_hybrid_sigma_pressure_coordinate_ak" self.implementation.set_auxiliary_coordinate( - field, ac, axes=[_axis["z"]], copy=False + field, ac, axes=[_axis["z"]], copy=False, + autocyclic={"cyclic": False} ) ac = self.implementation.initialise_AuxiliaryCoordinate() ac = self.coord_data(ac, bk_array, bk_bounds, units=_Units["1"]) self.implementation.set_auxiliary_coordinate( - field, ac, axes=[_axis["z"]], copy=False + field, ac, axes=[_axis["z"]], copy=False, + autocyclic={"cyclic": False} ) ac.id = "UM_atmosphere_hybrid_sigma_pressure_coordinate_bk" @@ -2200,11 +2213,16 @@ def model_level_number_coordinate(self, aux=False): if aux: self.field.insert_aux(c, axes=[_axis["z"]], copy=True) self.implementation.set_auxiliary_coordinate( - self.field, c, axes=[_axis["z"]], copy=True + self.field, c, axes=[_axis["z"]], copy=True, + autocyclic={"cyclic": False} ) else: self.implementation.set_dimension_coordinate( - self.field, c, axes=[_axis["z"]], copy=True + self.field, + c, + axes=[_axis["z"]], + copy=True, + autocyclic={"cyclic": False}, ) else: array = numpy_array(array, dtype=self.int_hdr_dtype) @@ -2221,7 +2239,8 @@ def model_level_number_coordinate(self, aux=False): ac = self.coord_data(ac, array, units=Units("1")) ac = self.coord_names(ac, axiscode) self.implementation.set_auxiliary_coordinate( - self.field, ac, axes=[_axis["z"]], copy=False + self.field, ac, axes=[_axis["z"]], copy=False, + autocyclic={"cyclic": False} ) else: @@ -2230,7 +2249,11 @@ def model_level_number_coordinate(self, aux=False): dc = self.coord_names(dc, axiscode) dc = self.coord_axis(dc, axiscode) self.implementation.set_dimension_coordinate( - self.field, dc, axes=[_axis["z"]], copy=False + self.field, + dc, + axes=[_axis["z"]], + copy=False, + autocyclic={"cyclic": False}, ) _cached_model_level_number_coordinate[key] = c @@ -2325,7 +2348,11 @@ def pseudolevel_coordinate(self, LBUSER5): _axis["p"] = axisP self.implementation.set_dimension_coordinate( - self.field, dc, axes=[_axis["p"]], copy=False + self.field, + dc, + axes=[_axis["p"]], + copy=False, + autocyclic={"cyclic": False}, ) return dc @@ -2350,7 +2377,11 @@ def radiation_wavelength_coordinate(self, rwl, rwl_units): _axis["r"] = axisR self.implementation.set_dimension_coordinate( - self.field, dc, axes=[_axis["r"]], copy=False + self.field, + dc, + axes=[_axis["r"]], + copy=False, + autocyclic={"cyclic": False}, ) return dc @@ -2404,7 +2435,11 @@ def size_1_height_coordinate(self, axiscode, height, units): copy = False self.implementation.set_dimension_coordinate( - self.field, dc, axes=[_axis["z"]], copy=copy + self.field, + dc, + axes=[_axis["z"]], + copy=copy, + autocyclic={"cyclic": False}, ) return dc @@ -2561,7 +2596,11 @@ def time_coordinate(self, axiscode): dc = self.coord_names(dc, axiscode) self.implementation.set_dimension_coordinate( - self.field, dc, axes=[_axis["t"]], copy=False + self.field, + dc, + axes=[_axis["t"]], + copy=False, + autocyclic={"cyclic": False}, ) return dc @@ -2586,7 +2625,11 @@ def time_coordinate_from_extra_data(self, axiscode, axis): dc = self.coord_axis(dc, axiscode) dc = self.coord_names(dc, axiscode) self.implementation.set_dimension_coordinate( - self.field, dc, axes=[_axis[axis]], copy=False + self.field, + dc, + axes=[_axis[axis]], + copy=False, + autocyclic={"cyclic": False}, ) return dc @@ -2620,7 +2663,11 @@ def time_coordinate_from_um_timeseries(self, axiscode, axis): dc = self.coord_axis(dc, axiscode) dc = self.coord_names(dc, axiscode) self.implementation.set_dimension_coordinate( - self.field, dc, axes=[_axis[axis]], copy=False + self.field, + dc, + axes=[_axis[axis]], + copy=False, + autocyclic={"cyclic": False}, ) return dc @@ -2826,7 +2873,18 @@ def xy_coordinate(self, axiscode, axis): `str, `DimensionCoordinate` """ - if axis == "y": + X = axis == "x" + if X: + delta = self.bdx + origin = self.real_hdr[bzx] + size = self.lbnpt + + da = self.implementation.initialise_DomainAxis(size=size) + axis_key = self.implementation.set_domain_axis(self.field, da) + _axis["x"] = axis_key + + autocyclic = {"X": True} + else: delta = self.bdy origin = self.real_hdr[bzy] size = self.lbrow @@ -2834,14 +2892,8 @@ def xy_coordinate(self, axiscode, axis): da = self.implementation.initialise_DomainAxis(size=size) axis_key = self.implementation.set_domain_axis(self.field, da) _axis["y"] = axis_key - else: - delta = self.bdx - origin = self.real_hdr[bzx] - size = self.lbnpt - da = self.implementation.initialise_DomainAxis(size=size) - axis_key = self.implementation.set_domain_axis(self.field, da) - _axis["x"] = axis_key + autocyclic = {"cyclic": False} if abs(delta) > self.atol: # Create regular coordinates from header items @@ -2873,6 +2925,7 @@ def xy_coordinate(self, axiscode, axis): bounds = numpy_empty((size, 2), dtype=float) bounds[:, 0] = array - delta_by_2 bounds[:, 1] = array + delta_by_2 + else: # Create coordinate from extra data array = self.extra.get(axis, None) @@ -2889,8 +2942,13 @@ def xy_coordinate(self, axiscode, axis): dc = self.coord_axis(dc, axiscode) dc = self.coord_names(dc, axiscode) + if X and bounds is not None: + autocyclic["bounds_range"] = abs(bounds[0, 0] - bounds[-1, -1]) + autocyclic["bounds_units"] = dc.Units + + key = self.implementation.set_dimension_coordinate( - self.field, dc, axes=[axis_key], copy=False + self.field, dc, axes=[axis_key], copy=False, autocyclic=autocyclic ) return key, dc @@ -2961,7 +3019,11 @@ def z_coordinate(self, axiscode): dc = self.coord_names(dc, axiscode) self.implementation.set_dimension_coordinate( - self.field, dc, axes=[_axis["z"]], copy=copy + self.field, + dc, + axes=[_axis["z"]], + copy=copy, + autocyclic={"cyclic": False}, ) logger.info(" " + dc.dump(display=False)) # pragma: no cover @@ -3032,7 +3094,11 @@ def z_reference_coordinate(self, axiscode): copy = False self.implementation.set_dimension_coordinate( - self.field, dc, axes=[_axis["z"]], copy=copy + self.field, + dc, + axes=[_axis["z"]], + copy=copy, + autocyclic={"cyclic": False}, ) return dc diff --git a/cf/test/test_Constructs.py b/cf/test/test_Constructs.py index 5638f1bd6a..afef70ca08 100644 --- a/cf/test/test_Constructs.py +++ b/cf/test/test_Constructs.py @@ -40,7 +40,8 @@ def test_Constructs_filter_by_naxes(self): self.assertEqual(len(c.filter_by_naxes(1)), 7) self.assertEqual(len(c.filter_by_naxes(cf.ge(2))), 5) self.assertEqual(len(c.filter_by_naxes(1, cf.ge(2))), 12) - + + if __name__ == "__main__": print("Run date:", datetime.datetime.now()) cf.environment() From e605c6f991e85f371dd0fb9544fa332fb056da4c Mon Sep 17 00:00:00 2001 From: David Hassell Date: Fri, 16 Apr 2021 18:12:02 +0100 Subject: [PATCH 33/53] devs --- cf/field.py | 15 +++++------ cf/mixin/coordinate.py | 2 +- cf/mixin/fielddomain.py | 25 +++++++++-------- cf/mixin/propertiesdatabounds.py | 4 +-- cf/read_write/um/umread.py | 46 +++++++++++++++++++++----------- 5 files changed, 53 insertions(+), 39 deletions(-) diff --git a/cf/field.py b/cf/field.py index 001554721b..ee57e42a74 100644 --- a/cf/field.py +++ b/cf/field.py @@ -11,7 +11,6 @@ pass from numpy import arange as numpy_arange -from numpy import argmax as numpy_argmax from numpy import array as numpy_array from numpy import array_equal as numpy_array_equal @@ -6311,8 +6310,9 @@ def concatenate(cls, fields, axis=0, _preserve=True): return out - def cyclic(self, identity=None, iscyclic=True, period=None, - config={}, **kwargs): + def cyclic( + self, identity=None, iscyclic=True, period=None, config={}, **kwargs + ): """Set the cyclicity of an axis. .. versionadded:: 1.0 @@ -6385,18 +6385,17 @@ def cyclic(self, identity=None, iscyclic=True, period=None, if identity is None: return old - if "axis" in config: - axis = config.get('axis') + axis = config.get("axis") if axis is None: raise ValueError(f"Can't find config-supplied axis {axis!r}") else: axis = self.domain_axis(identity, key=True) - + data = self.get_data(None, _fill_value=False) if data is not None: try: - data_axes = self.get_data_axes() + data_axes = self.get_data_axes() data.cyclic(data_axes.index(axis), iscyclic) except ValueError: pass @@ -6411,7 +6410,7 @@ def cyclic(self, identity=None, iscyclic=True, period=None, dim = self.dimension_coordinate( filter_by_axis=(axis,), default=None ) - + if dim is not None: if period is not None: dim.period(period) diff --git a/cf/mixin/coordinate.py b/cf/mixin/coordinate.py index 3b35ec6660..838f19dfa6 100644 --- a/cf/mixin/coordinate.py +++ b/cf/mixin/coordinate.py @@ -410,7 +410,7 @@ def autoperiod(self, inplace=False, config={}): if "cyclic" in config and not config["cyclic"]: return c - + if c.period() is not None: return c diff --git a/cf/mixin/fielddomain.py b/cf/mixin/fielddomain.py index 6a87c9a137..2f0b52cbaf 100644 --- a/cf/mixin/fielddomain.py +++ b/cf/mixin/fielddomain.py @@ -178,8 +178,6 @@ def _indices(self, mode, data_axes, auxiliary_mask, **kwargs): # Initialize indices indices = {axis: slice(None) for axis in domain_axes} - construct_data_axes = self.constructs.data_axes() - parsed = {} unique_axes = set() n_axes = 0 @@ -807,9 +805,6 @@ def anchor( self, "anchor", kwargs ) # pragma: no cover - axis_in = axis - # axis = self._parse_axes(axis_in) - da_key, axis = self.domain_axis(axis, item=True) if dry_run: @@ -961,7 +956,7 @@ def autocyclic(self, key=None, coord=None, verbose=None, config={}): else: self.cyclic(key, iscyclic=False) return False - + period.Units = bounds_units if bounds_range is None: @@ -976,9 +971,12 @@ def autocyclic(self, key=None, coord=None, verbose=None, config={}): axis = self.get_data_axes(key, default=(None,))[0] - self.cyclic(key, iscyclic=True, - period=period, - config={"axis": axis, "dim": coord}) + self.cyclic( + key, + iscyclic=True, + period=period, + config={"axis": axis, "dim": coord}, + ) return True @@ -2610,8 +2608,8 @@ def set_construct( if construct_type == "dimension_coordinate": construct.autoperiod( - inplace=True, - config={"cyclic": autocyclic.get("cyclic", True)}) + inplace=True, config={"cyclic": autocyclic.get("cyclic", True)} + ) self._conform_coordinate_references(out) self.autocyclic(key=out, coord=construct, config=autocyclic) try: @@ -2620,8 +2618,9 @@ def set_construct( pass elif construct_type == "auxiliary_coordinate": - construct.autoperiod(inplace=True, - config={"cyclic": autocyclic.get("cyclic", True)}) + construct.autoperiod( + inplace=True, config={"cyclic": autocyclic.get("cyclic", True)} + ) self._conform_coordinate_references(out) try: self._conform_cell_methods() diff --git a/cf/mixin/propertiesdatabounds.py b/cf/mixin/propertiesdatabounds.py index 2dbb6fa68e..2b3d495e15 100644 --- a/cf/mixin/propertiesdatabounds.py +++ b/cf/mixin/propertiesdatabounds.py @@ -3682,10 +3682,10 @@ def period(self, *value): if old is not None: return old - + bounds = self.get_bounds(None) if bounds is None: - return + return return bounds.period(*value) diff --git a/cf/read_write/um/umread.py b/cf/read_write/um/umread.py index cb8f11170c..6412a321ca 100644 --- a/cf/read_write/um/umread.py +++ b/cf/read_write/um/umread.py @@ -1275,11 +1275,13 @@ def depth_coordinate(self, axiscode): ac.long_name = "atmosphere_hybrid_height_coordinate_ak" # field.insert_aux(ac, axes=[zdim], copy=False) self.implementation.set_auxiliary_coordinate( - field, ac, - axes=[_axis["z"]], copy=False, - autocyclic={"cyclic": False} + field, + ac, + axes=[_axis["z"]], + copy=False, + autocyclic={"cyclic": False}, ) - + array = numpy_array( [rec.real_hdr[bhlev] for rec in self.z_recs], dtype=float ) @@ -1297,8 +1299,11 @@ def depth_coordinate(self, axiscode): ac.id = "UM_atmosphere_hybrid_height_coordinate_bk" ac.long_name = "atmosphere_hybrid_height_coordinate_bk" self.implementation.set_auxiliary_coordinate( - field, ac, axes=[_axis["z"]], copy=False, - autocyclic={"cyclic": False} + field, + ac, + axes=[_axis["z"]], + copy=False, + autocyclic={"cyclic": False}, ) return dc @@ -1390,16 +1395,22 @@ def atmosphere_hybrid_sigma_pressure_coordinate(self, axiscode): ac.long_name = "atmosphere_hybrid_sigma_pressure_coordinate_ak" self.implementation.set_auxiliary_coordinate( - field, ac, axes=[_axis["z"]], copy=False, - autocyclic={"cyclic": False} + field, + ac, + axes=[_axis["z"]], + copy=False, + autocyclic={"cyclic": False}, ) ac = self.implementation.initialise_AuxiliaryCoordinate() ac = self.coord_data(ac, bk_array, bk_bounds, units=_Units["1"]) self.implementation.set_auxiliary_coordinate( - field, ac, axes=[_axis["z"]], copy=False, - autocyclic={"cyclic": False} + field, + ac, + axes=[_axis["z"]], + copy=False, + autocyclic={"cyclic": False}, ) ac.id = "UM_atmosphere_hybrid_sigma_pressure_coordinate_bk" @@ -2213,8 +2224,11 @@ def model_level_number_coordinate(self, aux=False): if aux: self.field.insert_aux(c, axes=[_axis["z"]], copy=True) self.implementation.set_auxiliary_coordinate( - self.field, c, axes=[_axis["z"]], copy=True, - autocyclic={"cyclic": False} + self.field, + c, + axes=[_axis["z"]], + copy=True, + autocyclic={"cyclic": False}, ) else: self.implementation.set_dimension_coordinate( @@ -2239,8 +2253,11 @@ def model_level_number_coordinate(self, aux=False): ac = self.coord_data(ac, array, units=Units("1")) ac = self.coord_names(ac, axiscode) self.implementation.set_auxiliary_coordinate( - self.field, ac, axes=[_axis["z"]], copy=False, - autocyclic={"cyclic": False} + self.field, + ac, + axes=[_axis["z"]], + copy=False, + autocyclic={"cyclic": False}, ) else: @@ -2945,7 +2962,6 @@ def xy_coordinate(self, axiscode, axis): if X and bounds is not None: autocyclic["bounds_range"] = abs(bounds[0, 0] - bounds[-1, -1]) autocyclic["bounds_units"] = dc.Units - key = self.implementation.set_dimension_coordinate( self.field, dc, axes=[axis_key], copy=False, autocyclic=autocyclic From c252afb2a71f47010f012cfe715bd0c56a7ea770 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Mon, 19 Apr 2021 22:53:09 +0100 Subject: [PATCH 34/53] devs --- cf/field.py | 61 ++++--- cf/mixin/coordinate.py | 25 ++- cf/mixin/fielddomain.py | 286 +++++-------------------------- cf/mixin/properties.py | 14 +- cf/mixin/propertiesdata.py | 21 ++- cf/mixin/propertiesdatabounds.py | 6 +- cf/read_write/um/umread.py | 82 +++++---- 7 files changed, 182 insertions(+), 313 deletions(-) diff --git a/cf/field.py b/cf/field.py index ee57e42a74..993814478e 100644 --- a/cf/field.py +++ b/cf/field.py @@ -2409,7 +2409,7 @@ def _binary_operation(self, other, method): return field0 def _conform_coordinate_references(self, key, coordref=None): - """Where possible replace the content of coordiante refence + """Where possible replace the content of coordiante reference construct coordinates with coordinate construct keys. .. versionadded:: 3.0.0 @@ -2442,8 +2442,7 @@ def _conform_coordinate_references(self, key, coordref=None): refs = [coordref] for ref in refs: - coordinates = ref.coordinates() - if identity in coordinates: + if identity in ref.coordinates(): ref.del_coordinate(identity, None) ref.set_coordinate(key) @@ -6381,15 +6380,17 @@ def cyclic( self, "cyclic", kwargs ) # pragma: no cover - old = self._cyclic.copy() + if not iscyclic and config.get("no-op"): + return self._cyclic.copy() + + old = None + cyclic = self._cyclic + if identity is None: - return old + return cyclic.copy() - if "axis" in config: - axis = config.get("axis") - if axis is None: - raise ValueError(f"Can't find config-supplied axis {axis!r}") - else: + axis = config.get("axis") + if axis is None: axis = self.domain_axis(identity, key=True) data = self.get_data(None, _fill_value=False) @@ -6400,29 +6401,40 @@ def cyclic( except ValueError: pass - # Never change _cyclic in-place if iscyclic: - self._cyclic = old.union((axis,)) - - if "dim" in config: - dim = config["dim"] - else: + dim = config.get("coord") + if dim is None: dim = self.dimension_coordinate( filter_by_axis=(axis,), default=None ) if dim is not None: - if period is not None: - dim.period(period) + if config.get("period") is not None: + dim.period(**config) + elif period is not None: + dim.period(period, **config) elif dim.period() is None: raise ValueError( "A cyclic dimension coordinate must have a period" ) - else: - cyclic = old.copy() + + if axis not in cyclic: + # Never change _cyclic in-place + old = cyclic.copy() + cyclic = cyclic.copy() + cyclic.add(axis) + self._cyclic = cyclic + + elif axis in cyclic: + # Never change _cyclic in-place + old = cyclic.copy() + cyclic = cyclic.copy() cyclic.discard(axis) self._cyclic = cyclic + if old is None: + old = cyclic.copy() + return old def weights( @@ -11624,7 +11636,6 @@ def _group_weights(weights, iaxis, index): if not coord.increasing: lb, ub = ub, lb - if group_span + lb != ub: # The span of this group is not the # same as group_span, so don't @@ -17684,9 +17695,13 @@ def regrids( f.set_data(new_data, axes=self.get_data_axes(), copy=False) # Set the cyclicity of the destination longitude - x = f.dimension_coordinate("X", default=None) + key, x = f.dimension_coordinate("X", default=(None, None), item=True) if x is not None and x.Units.equivalent(Units("degrees")): - f.cyclic("X", iscyclic=dst_cyclic, period=Data(360, "degrees")) + f.cyclic( + key, + iscyclic=dst_cyclic, + config={"coord": x, "period": Data(360.0, "degrees")}, + ) # Release old memory from ESMF (this ought to happen garbage # collection, but it doesn't seem to work there!) diff --git a/cf/mixin/coordinate.py b/cf/mixin/coordinate.py index 838f19dfa6..19d74d754c 100644 --- a/cf/mixin/coordinate.py +++ b/cf/mixin/coordinate.py @@ -6,9 +6,13 @@ _deprecated_kwarg_check, ) +from ..units import Units from ..data.data import Data +_units_degrees = Units("degrees") + + class Coordinate: """Mixin class for dimension or auxiliary coordinate constructs. @@ -411,16 +415,33 @@ def autoperiod(self, inplace=False, config={}): if "cyclic" in config and not config["cyclic"]: return c + if "X" in config: + X = config["X"] + if not X: + return c + else: + X = None + if c.period() is not None: return c - if not ( + if X is None and not ( c.Units.islongitude or c.get_property("standard_name", None) == "grid_longitude" ): return c - c.period(Data(360.0, units=c.Units)) + period = config.get("period") + if period is None: + units = c.Units + if units.islongitude: + period = Data(360.0, units="degrees_east") + else: + period = Data(360.0, units="degrees") + + period.Units = units + + c.period(period=period) return c diff --git a/cf/mixin/fielddomain.py b/cf/mixin/fielddomain.py index 2f0b52cbaf..1484f812d2 100644 --- a/cf/mixin/fielddomain.py +++ b/cf/mixin/fielddomain.py @@ -907,12 +907,22 @@ def autocyclic(self, key=None, coord=None, verbose=None, config={}): `bool` """ + noop = config.get("no-op") + if "cyclic" in config: - cyclic = config["cyclic"] - if not cyclic: + if not config["cyclic"]: + if not noop: + self.cyclic(key, iscyclic=False, config=config) return False - else: - cyclic = None + else: + period = coord.period() + if period is not None: + period = None + else: + period = config.get("period") + + self.cyclic(key, iscyclic=True, period=period, config=config) + return True if coord is None: key, coord = self.dimension_coordinate( @@ -922,39 +932,46 @@ def autocyclic(self, key=None, coord=None, verbose=None, config={}): return False elif "X" in config: if not config["X"]: + if not noop: + self.cyclic(key, iscyclic=False, config=config) return False elif not coord.X: + if not noop: + self.cyclic(key, iscyclic=False, config=config) return False - if cyclic: - self.cyclic(key, iscyclic=True, period=config["period"]) - return True - bounds_range = config.get("bounds_range") if bounds_range is not None: bounds_units = config["bounds_units"] else: bounds = coord.get_bounds(None) if bounds is None: - self.cyclic(key, iscyclic=False) + if not noop: + self.cyclic(key, iscyclic=False, config=config) return False data = bounds.get_data(None, _fill_value=False) if data is None: - self.cyclic(key, iscyclic=False) + if not noop: + self.cyclic(key, iscyclic=False, config=config) return False bounds_units = bounds.Units period = coord.period() - has_period = period is not None - if not has_period: + if period is not None: + has_period = True + else: + period = config.get("period") + has_period = False + + if period is None: if bounds_units.islongitude: period = Data(360.0, units="degrees_east") - elif bounds_units == _units_degrees: + elif bounds_units.equivalent(_units_degrees): period = Data(360.0, units="degrees") else: - self.cyclic(key, iscyclic=False) + self.cyclic(key, iscyclic=False, config=config) return False period.Units = bounds_units @@ -963,146 +980,20 @@ def autocyclic(self, key=None, coord=None, verbose=None, config={}): bounds_range = abs(data.last_element() - data.first_element()) if bounds_range != period: - self.cyclic(key, iscyclic=False) + if not noop: + self.cyclic(key, iscyclic=False, config=config) return False if has_period: period = None - axis = self.get_data_axes(key, default=(None,))[0] + config = config.copy() + config["axis"] = self.get_data_axes(key, default=(None,))[0] - self.cyclic( - key, - iscyclic=True, - period=period, - config={"axis": axis, "dim": coord}, - ) + self.cyclic(key, iscyclic=True, period=period, config=config) return True - def del_construct(self, identity=None, default=ValueError()): - """Remove a metadata construct. - - If a domain axis construct is selected for removal then it - can't be spanned by any metadata construct's data. See - `del_domain_axis` for more options in this case. - - A domain ancillary construct may be removed even if it is - referenced by coordinate reference construct. In this case the - reference is replace with `None`. - - .. versionadded:: 3.9.0 - - .. seealso:: `constructs`, `get_construct`, `has_construct`, - `set_construct`, `del_domain_axis`, - `del_coordinate_reference` - - :Parameters: - - identity: optional - Select the construct by one of - - * A metadata construct identity. - - {{construct selection identity}} - - * The key of a metadata construct - - * `None`. This is the default, which selects the - metadata construct when there is only one of them. - - *Parameter example:* - ``identity='latitude'`` - - *Parameter example:* - ``identity='T' - - *Parameter example:* - ``identity='long_name=Cell Area'`` - - *Parameter example:* - ``identity='cellmeasure1'`` - - *Parameter example:* - ``identity='measure:area'`` - - *Parameter example:* - ``identity=cf.eq('time')'`` - - *Parameter example:* - ``identity=re.compile('^lat')`` - - Select the construct to removed. Must be - - * The identity or key of a metadata construct. - - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); a `Query` object - (e.g. ``cf.eq('longitude')``); or a compiled regular - expression (e.g. ``re.compile('^atmosphere')``) that - selects the relevant constructs whose identities match via - `re.search`. - - A construct has a number of identities, and is selected if - any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: - - >>> x.identities() - ['time', - 'long_name=Time', - 'foo=bar', - 'standard_name=time', - 'ncvar%t', - 'T'] - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. - - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. - - *Parameter example:* - ``identity='measure:area'`` - - *Parameter example:* - ``identity='cell_area'`` - - *Parameter example:* - ``identity='long_name=Cell Area'`` - - *Parameter example:* - ``identity='cellmeasure1'`` - - default: optional - Return the value of the *default* parameter if the - construct can not be removed, or does not exist. If set to - an `Exception` instance then it will be raised instead. - - :Returns: - - The removed metadata construct. - - **Examples:** - - >>> f.del_construct('X') - - - """ - key = self.construct_key(identity, default=None) - if key is None: - return self._default( - default, - f"Can't identify construct to delete from {identity!r}", - ) - - return super().del_construct(key, default=default) - def del_coordinate_reference( self, identity=None, construct=None, default=ValueError() ): @@ -1177,6 +1068,9 @@ def del_coordinate_reference( key = self.coordinate_reference(identity, key=True, default=None) if key is None: + if default is None: + return + return self._default( default, f"Can't identify construct from {identity!r}", @@ -1197,6 +1091,9 @@ def del_coordinate_reference( c_key = self.construct(construct, key=True, default=None) if c_key is None: + if default is None: + return + return self._default( default, f"Can't identify construct from {construct!r}" ) @@ -2108,6 +2005,9 @@ def get_coordinate_reference( c_key = self.construct(construct, key=True, default=None) if c_key is None: + if default is None: + return + return self._default( default, f"Can't identify construct from {construct!r}" ) @@ -2129,94 +2029,6 @@ def get_coordinate_reference( return out - def has_construct(self, identity=None): - """Whether a metadata construct exists. - - .. versionadded:: 3.4.0 - - .. seealso:: `construct`, `del_construct`, `get_construct`, - `set_construct` - - :Parameters: - - identity: optional - Select the construct. Must be - - * The identity or key of a metadata construct. - - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); a `Query` object - (e.g. ``cf.eq('longitude')``); or a compiled regular - expression (e.g. ``re.compile('^atmosphere')``) that - selects the relevant constructs whose identities match via - `re.search`. - - A construct has a number of identities, and is selected if - any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: - - >>> x.identities() - ['time', - 'long_name=Time', - 'foo=bar', - 'standard_name=time', - 'ncvar%t', - 'T'] - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. - - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. - - *Parameter example:* - ``identity='T' - - *Parameter example:* - ``identity='measure:area'`` - - *Parameter example:* - ``identity='cell_area'`` - - *Parameter example:* - ``identity='long_name=Cell Area'`` - - *Parameter example:* - ``identity='cellmeasure1'`` - - :Returns: - - `bool` - `True` if the construct exists, otherwise `False`. - - **Examples:** - - >>> f = cf.example_field(0) - >>> print(f) - Field: specific_humidity (ncvar%q) - ---------------------------------- - Data : specific_humidity(latitude(5), longitude(8)) 1 - Cell methods : area: mean - Dimension coords: latitude(5) = [-75.0, ..., 75.0] degrees_north - : longitude(8) = [22.5, ..., 337.5] degrees_east - : time(1) = [2019-01-01 00:00:00] - - >>> f.has_construct('T') - True - >>> f.has_construct('longitude') - True - >>> f.has_construct('Z') - False - - """ - return self.construct(identity, default=None) is not None - def iscyclic(self, identity, **kwargs): """Returns True if the given axis is cyclic. @@ -2607,9 +2419,7 @@ def set_construct( out = super().set_construct(construct, key=key, axes=axes, copy=copy) if construct_type == "dimension_coordinate": - construct.autoperiod( - inplace=True, config={"cyclic": autocyclic.get("cyclic", True)} - ) + construct.autoperiod(inplace=True, config=autocyclic) self._conform_coordinate_references(out) self.autocyclic(key=out, coord=construct, config=autocyclic) try: @@ -2618,9 +2428,7 @@ def set_construct( pass elif construct_type == "auxiliary_coordinate": - construct.autoperiod( - inplace=True, config={"cyclic": autocyclic.get("cyclic", True)} - ) + construct.autoperiod(inplace=True, config=autocyclic) self._conform_coordinate_references(out) try: self._conform_cell_methods() diff --git a/cf/mixin/properties.py b/cf/mixin/properties.py index 09515555b8..9a8e0f020f 100644 --- a/cf/mixin/properties.py +++ b/cf/mixin/properties.py @@ -614,8 +614,11 @@ def get_property(self, prop, default=ValueError()): if prop in self._special_properties: try: return getattr(self, prop) - except AttributeError as err: - return self._default(default, err) + except AttributeError as error: + if default is None: + return + + return self._default(default, error) # Still here? Then get a non-special property return super().get_property(prop, default=default) @@ -714,8 +717,11 @@ def del_property(self, prop, default=ValueError()): if prop in self._special_properties: try: out = getattr(self, prop) - except AttributeError as err: - return self._default(default, err) + except AttributeError as error: + if default is None: + return + + return self._default(default, error) else: delattr(self, prop) return out diff --git a/cf/mixin/propertiesdata.py b/cf/mixin/propertiesdata.py index 76ef47b5ca..22c6675af0 100644 --- a/cf/mixin/propertiesdata.py +++ b/cf/mixin/propertiesdata.py @@ -1817,7 +1817,7 @@ def minimum(self): "ERROR: Can't get the minimum when there is no data array" ) - def period(self, *value): + def period(self, *value, **config): """Return or set the period of the data. This is distinct from the cyclicity of individual axes. @@ -1867,10 +1867,16 @@ def period(self, *value): """ - old = self._custom.get("period") + custom = self._custom + old = custom.get("period") if old is not None: old = old.copy() + period = config.get("period") + if period is not None: + custom["period"] = period.copy() + return old + if not value: return old @@ -1894,7 +1900,7 @@ def period(self, *value): value = abs(value) value.dtype = float - self._custom["period"] = value + custom["period"] = value return old @@ -3923,7 +3929,8 @@ def sin(self, inplace=False, i=False): @_deprecated_kwarg_check("i") @_inplace_enabled(default=False) def arctan(self, inplace=False): - """Take the trigonometric inverse tangent of the data element-wise. + """Take the trigonometric inverse tangent of the data element- + wise. Units are ignored in the calculation. The result has units of radians. @@ -4139,7 +4146,8 @@ def arcsinh(self, inplace=False): @_deprecated_kwarg_check("i") @_inplace_enabled(default=False) def arccos(self, inplace=False): - """Take the trigonometric inverse cosine of the data element- wise. + """Take the trigonometric inverse cosine of the data element- + wise. Units are ignored in the calculation. The result has units of radians. @@ -5164,8 +5172,7 @@ def override_units(self, units, inplace=False, i=False): # Override the Units on the period period = v.period() if period is not None: - # v._custom['period'] = period.override_units(units) - v.period(period.override_units(units)) + v.period(period=period.override_units(units)) return v diff --git a/cf/mixin/propertiesdatabounds.py b/cf/mixin/propertiesdatabounds.py index 2b3d495e15..ab9d1f19bd 100644 --- a/cf/mixin/propertiesdatabounds.py +++ b/cf/mixin/propertiesdatabounds.py @@ -3630,7 +3630,7 @@ def inspect(self): """ print(cf_inspect(self)) # pragma: no cover - def period(self, *value): + def period(self, *value, **config): """Return or set the period for cyclic values. .. seealso:: `cyclic` @@ -3678,7 +3678,7 @@ def period(self, *value): """ - old = super().period(*value) + old = super().period(*value, **config) if old is not None: return old @@ -3687,7 +3687,7 @@ def period(self, *value): if bounds is None: return - return bounds.period(*value) + return bounds.period(*value, **config) @_deprecated_kwarg_check("i") @_inplace_enabled(default=False) diff --git a/cf/read_write/um/umread.py b/cf/read_write/um/umread.py index 6412a321ca..c02d851d98 100644 --- a/cf/read_write/um/umread.py +++ b/cf/read_write/um/umread.py @@ -490,6 +490,8 @@ _axis = {"area": None} +_autocyclic_false = {"no-op": True, "X": False, "cyclic": False} + class UMField: """TODO.""" @@ -911,7 +913,7 @@ def __init__( axiscode, "y" ) else: - ykey, yc = self.xy_coordinate(axiscode, "y") + ykey, yc, yaxis = self.xy_coordinate(axiscode, "y") # -------------------------------------------------------- # Create the 'X' dimension coordinate @@ -932,7 +934,7 @@ def __init__( axiscode, "x" ) else: - xkey, xc = self.xy_coordinate(axiscode, "x") + xkey, xc, xaxis = self.xy_coordinate(axiscode, "x") # -10: rotated latitude (not an official axis code) # -11: rotated longitude (not an official axis code) @@ -1051,8 +1053,15 @@ def __init__( # Force cyclic X axis for particular values of LBHEM if xkey is not None and int_hdr[lbhem] in (0, 1, 2, 4): - # field.cyclic("X", period=360) - field.cyclic(xkey, period=360) + field.cyclic( + xkey, + iscyclic=True, + config={ + "axis": xaxis, + "coord": xc, + "period": Data(360.0, units=xc.Units), + }, + ) self.fields.append(field) @@ -1194,7 +1203,7 @@ def atmosphere_hybrid_height_coordinate(self, axiscode): dc, axes=[_axis["z"]], copy=False, - autocyclic={"cyclic": False}, + autocyclic=_autocyclic_false, ) # "b" domain ancillary @@ -1279,7 +1288,7 @@ def depth_coordinate(self, axiscode): ac, axes=[_axis["z"]], copy=False, - autocyclic={"cyclic": False}, + autocyclic=_autocyclic_false, ) array = numpy_array( @@ -1303,7 +1312,7 @@ def depth_coordinate(self, axiscode): ac, axes=[_axis["z"]], copy=False, - autocyclic={"cyclic": False}, + autocyclic=_autocyclic_false, ) return dc @@ -1386,7 +1395,7 @@ def atmosphere_hybrid_sigma_pressure_coordinate(self, axiscode): dc, axes=[_axis["z"]], copy=False, - autocyclic={"cyclic": False}, + autocyclic=_autocyclic_false, ) ac = self.implementation.initialise_AuxiliaryCoordinate() @@ -1399,7 +1408,7 @@ def atmosphere_hybrid_sigma_pressure_coordinate(self, axiscode): ac, axes=[_axis["z"]], copy=False, - autocyclic={"cyclic": False}, + autocyclic=_autocyclic_false, ) ac = self.implementation.initialise_AuxiliaryCoordinate() @@ -1410,7 +1419,7 @@ def atmosphere_hybrid_sigma_pressure_coordinate(self, axiscode): ac, axes=[_axis["z"]], copy=False, - autocyclic={"cyclic": False}, + autocyclic=_autocyclic_false, ) ac.id = "UM_atmosphere_hybrid_sigma_pressure_coordinate_bk" @@ -2228,7 +2237,7 @@ def model_level_number_coordinate(self, aux=False): c, axes=[_axis["z"]], copy=True, - autocyclic={"cyclic": False}, + autocyclic=_autocyclic_false, ) else: self.implementation.set_dimension_coordinate( @@ -2236,7 +2245,7 @@ def model_level_number_coordinate(self, aux=False): c, axes=[_axis["z"]], copy=True, - autocyclic={"cyclic": False}, + autocyclic=_autocyclic_false, ) else: array = numpy_array(array, dtype=self.int_hdr_dtype) @@ -2257,7 +2266,7 @@ def model_level_number_coordinate(self, aux=False): ac, axes=[_axis["z"]], copy=False, - autocyclic={"cyclic": False}, + autocyclic=_autocyclic_false, ) else: @@ -2270,7 +2279,7 @@ def model_level_number_coordinate(self, aux=False): dc, axes=[_axis["z"]], copy=False, - autocyclic={"cyclic": False}, + autocyclic=_autocyclic_false, ) _cached_model_level_number_coordinate[key] = c @@ -2369,7 +2378,7 @@ def pseudolevel_coordinate(self, LBUSER5): dc, axes=[_axis["p"]], copy=False, - autocyclic={"cyclic": False}, + autocyclic=_autocyclic_false, ) return dc @@ -2398,7 +2407,7 @@ def radiation_wavelength_coordinate(self, rwl, rwl_units): dc, axes=[_axis["r"]], copy=False, - autocyclic={"cyclic": False}, + autocyclic=_autocyclic_false, ) return dc @@ -2456,7 +2465,7 @@ def size_1_height_coordinate(self, axiscode, height, units): dc, axes=[_axis["z"]], copy=copy, - autocyclic={"cyclic": False}, + autocyclic=_autocyclic_false, ) return dc @@ -2617,7 +2626,7 @@ def time_coordinate(self, axiscode): dc, axes=[_axis["t"]], copy=False, - autocyclic={"cyclic": False}, + autocyclic=_autocyclic_false, ) return dc @@ -2646,7 +2655,7 @@ def time_coordinate_from_extra_data(self, axiscode, axis): dc, axes=[_axis[axis]], copy=False, - autocyclic={"cyclic": False}, + autocyclic=_autocyclic_false, ) return dc @@ -2684,7 +2693,7 @@ def time_coordinate_from_um_timeseries(self, axiscode, axis): dc, axes=[_axis[axis]], copy=False, - autocyclic={"cyclic": False}, + autocyclic=_autocyclic_false, ) return dc @@ -2890,8 +2899,14 @@ def xy_coordinate(self, axiscode, axis): `str, `DimensionCoordinate` """ - X = axis == "x" + X = axiscode in (11, -11) + if X: + autocyclic = {"X": True} + else: + autocyclic = _autocyclic_false + + if axis == "x": delta = self.bdx origin = self.real_hdr[bzx] size = self.lbnpt @@ -2899,8 +2914,6 @@ def xy_coordinate(self, axiscode, axis): da = self.implementation.initialise_DomainAxis(size=size) axis_key = self.implementation.set_domain_axis(self.field, da) _axis["x"] = axis_key - - autocyclic = {"X": True} else: delta = self.bdy origin = self.real_hdr[bzy] @@ -2910,7 +2923,7 @@ def xy_coordinate(self, axiscode, axis): axis_key = self.implementation.set_domain_axis(self.field, da) _axis["y"] = axis_key - autocyclic = {"cyclic": False} + autocyclic = _autocyclic_false if abs(delta) > self.atol: # Create regular coordinates from header items @@ -2948,26 +2961,25 @@ def xy_coordinate(self, axiscode, axis): array = self.extra.get(axis, None) bounds = self.extra.get(axis + "_bounds", None) + units = _axiscode_to_Units.setdefault(axiscode, None) + dc = self.implementation.initialise_DimensionCoordinate() - dc = self.coord_data( - dc, - array, - bounds, - units=_axiscode_to_Units.setdefault(axiscode, None), - ) + dc = self.coord_data(dc, array, bounds, units=units) dc = self.coord_positive(dc, axiscode, axis_key) # _axis[axis]) dc = self.coord_axis(dc, axiscode) dc = self.coord_names(dc, axiscode) if X and bounds is not None: - autocyclic["bounds_range"] = abs(bounds[0, 0] - bounds[-1, -1]) - autocyclic["bounds_units"] = dc.Units + autocyclic["cyclic"] = abs(bounds[0, 0] - bounds[-1, -1]) == 360.0 + autocyclic["period"] = Data(360.0, units=units) + autocyclic["axis"] = axis_key + autocyclic["coord"] = dc key = self.implementation.set_dimension_coordinate( self.field, dc, axes=[axis_key], copy=False, autocyclic=autocyclic ) - return key, dc + return key, dc, axis_key @_manage_log_level_via_verbose_attr def z_coordinate(self, axiscode): @@ -3039,7 +3051,7 @@ def z_coordinate(self, axiscode): dc, axes=[_axis["z"]], copy=copy, - autocyclic={"cyclic": False}, + autocyclic=_autocyclic_false, ) logger.info(" " + dc.dump(display=False)) # pragma: no cover @@ -3114,7 +3126,7 @@ def z_reference_coordinate(self, axiscode): dc, axes=[_axis["z"]], copy=copy, - autocyclic={"cyclic": False}, + autocyclic=_autocyclic_false, ) return dc From 5701364468d4c47cda40a03e4dd7e2ff2bf1a6a8 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Wed, 21 Apr 2021 23:42:29 +0100 Subject: [PATCH 35/53] devs --- cf/__init__.py | 3 +- cf/aggregate.py | 6 +- cf/constructlist.py | 745 +++++++++++ cf/constructs.py | 10 +- cf/datum.py | 3 - cf/dimensioncoordinate.py | 9 +- cf/docstring/docstring.py | 16 +- cf/examplefield.py | 14 + cf/field.py | 259 +--- cf/fieldlist.py | 1194 +----------------- cf/mixin/__init__.py | 1 + cf/mixin/fielddomain.py | 142 +-- cf/mixin/fielddomainlist.py | 339 +++++ cf/mixin/properties.py | 32 +- cf/mixin/propertiesdata.py | 19 +- cf/mixin/propertiesdatabounds.py | 14 +- cf/query.py | 4 +- cf/read_write/netcdf/netcdfread.py | 7 - cf/read_write/read.py | 10 - cf/read_write/um/filearray.py | 4 - cf/read_write/um/functions.py | 1 - cf/test/test_Field.py | 11 +- cf/test/test_FieldList.py | 2 +- cf/umread_lib/cInterface.py | 9 - cf/umread_lib/umfile.py | 10 - docs/source/class/cf.AuxiliaryCoordinate.rst | 15 + docs/source/class/cf.Constructs.rst | 3 + docs/source/class/cf.DimensionCoordinate.rst | 15 + docs/source/class/cf.Domain.rst | 27 +- docs/source/class/cf.Field.rst | 26 +- docs/source/field_analysis.rst | 4 +- docs/source/function.rst | 2 + docs/source/tutorial.py | 40 +- docs/source/tutorial.rst | 86 +- 34 files changed, 1467 insertions(+), 1615 deletions(-) create mode 100644 cf/constructlist.py create mode 100644 cf/mixin/fielddomainlist.py diff --git a/cf/__init__.py b/cf/__init__.py index f48079656d..ef5ecd4a43 100644 --- a/cf/__init__.py +++ b/cf/__init__.py @@ -236,6 +236,7 @@ from .timeduration import TimeDuration, Y, M, D, h, m, s from .units import Units +from .constructlist import ConstructList from .fieldlist import FieldList from .dimensioncoordinate import DimensionCoordinate @@ -299,7 +300,7 @@ from .constants import * # noqa: F403 from .functions import * # noqa: F403 from .maths import relative_vorticity, histogram -from .examplefield import example_field +from .examplefield import example_field, example_fields, example_domain from .cfimplementation import CFImplementation, implementation diff --git a/cf/aggregate.py b/cf/aggregate.py index d7b75b0d9f..e26fa8696c 100644 --- a/cf/aggregate.py +++ b/cf/aggregate.py @@ -2706,8 +2706,10 @@ def _hash_values(m): # Zero axes have different 1-d coordinate values, so don't # aggregate anything in this entire group. # -------------------------------------------------------- - meta[0].message = ( - "indistinguishable coordinates or other " "domain information" + meta[ + 0 + ].message = ( + "No corresponding axes have different 1-d coordinate values." ) return () diff --git a/cf/constructlist.py b/cf/constructlist.py new file mode 100644 index 0000000000..227516c1f2 --- /dev/null +++ b/cf/constructlist.py @@ -0,0 +1,745 @@ +import logging + +import cfdm + +from .mixin_container import Container + +from .functions import ( + _DEPRECATION_ERROR, + _DEPRECATION_ERROR_KWARGS, + _DEPRECATION_ERROR_DICT, +) + +from .decorators import ( + _deprecated_kwarg_check, + _manage_log_level_via_verbosity, +) + + +logger = logging.getLogger(__name__) + + +class ConstructList(list, Container, cfdm.Container): + """An ordered sequence of constructs. + + The elements of the list are construct of the same type. + + The list supports the python list-like operations (such as + indexing and methods like `!append`). + + >>> fl = cf.{{class}}() + >>> len(fl) + 0 + >>> fl = cf.FieldList(f) + >>> len(fl) + 1 + >>> fl = cf.FieldList([f, g]) + >>> len(fl) + 2 + >>> fl = cf.FieldList(cf.FieldList([f] * 3)) + >>> len(fl) + 3 + >>> len(fl + fl) + 6 + + Such methods provide functionality similar to that of a + :ref:`built-in list `. The main difference + is that when an element needs to be assesed for equality its + `!equals` method is used, rather than the ``==`` operator. + + """ + + def __init__(self, constructs=None): + """**Initialization** + + :Parameters: + + constructs: (sequence of) constructs + Create a new list with these constructs. + + """ + super(cfdm.Container, self).__init__() + + if constructs is not None: + if getattr(constructs, "construct_type", None) is not None: + self.append(constructs) + else: + self.extend(constructs) + + def __call__(self, *identities): + """Alias for `cf.{{class}}.select_by_identity`.""" + return self.select_by_identity(*identities) + + def __deepcopy__(self, memo): + """Called by the `copy.deepcopy` standard library function.""" + return self.copy() + + def __repr__(self): + """Called by the `repr` built-in function. + + x.__repr__() <==> repr(x) + + """ + out = [repr(f) for f in self] + out = ",\n ".join(out) + return "[" + out + "]" + + def __str__(self): + """Called by the `str` built-in function. + + x.__str__() <==> str(x) + + """ + return repr(self) + + def __docstring_method_exclusions__(self): + """Return the names of methods to exclude from docstring + substitutions. + + See `_docstring_method_exclusions` for details. + + """ + return ( + "append", + "extend", + "insert", + "pop", + "reverse", + "clear", + ) + + # ---------------------------------------------------------------- + # Overloaded list methods + # ---------------------------------------------------------------- + def __add__(self, x): + """The binary arithmetic operation ``+`` + + f.__add__(x) <==> f + x + + :Returns: + + `{{class}}` + The concatenation of the list and another sequence. + + **Examples:** + + >>> h = f + g + >>> f += g + + """ + return type(self)(list.__add__(self, x)) + + def __contains__(self, y): + """Called to implement membership test operators. + + x.__contains__(y) <==> y in x + + {{List comparison}} + + Note that ``x in fl`` is equivalent to + ``any(f.equals(x) for f in fl)``. + + """ + for f in self: + if f.equals(y): + return True + + return False + + def __mul__(self, x): + """The binary arithmetic operation ``*`` + + f.__mul__(x) <==> f * x + + :Returns: + + `{{class}}` + The list added to itself *n* times. + + **Examples:** + + >>> h = f * 2 + >>> f *= 2 + + """ + return type(self)(list.__mul__(self, x)) + + def __eq__(self, other): + """The rich comparison operator ``==`` + + f.__eq__(x) <==> f == x + + {{List comparison}} + + Note that ``f == x`` is equivalent to ``f.equals(x)``. + + :Returns: + + `bool` + + """ + return self.equals(other) + + def __getslice__(self, i, j): + """Called to implement evaluation of f[i:j] + + f.__getslice__(i, j) <==> f[i:j] + + :Returns: + + `{{class}}` + Slice of the list from *i* to *j*. + + **Examples:** + + >>> g = f[0:1] + >>> g = f[1:-4] + >>> g = f[:1] + >>> g = f[1:] + + """ + return type(self)(list.__getslice__(self, i, j)) + + def __getitem__(self, index): + """Called to implement evaluation of f[index] + + f.__getitem_(index) <==> f[index] + + :Returns: + + If *index* is an integer then the corresponding + construct element is returned. If *index* is a slice + then a new {{class}} is returned, which may be empty. + + **Examples:** + + >>> g = f[0] + >>> g = f[-1:-4:-1] + >>> g = f[2:2:2] + + """ + out = list.__getitem__(self, index) + + if isinstance(out, list): + return type(self)(out) + + return out + + def __ne__(self, other): + """The rich comparison operator ``!=`` + + f.__ne__(x) <==> f != x + + {{List comparison}} + + Note that ``f != x`` is equivalent to ``not f.equals(x)``. + + :Returns: + + `bool` + + """ + return not self.equals(other) + + # ??? + __len__ = list.__len__ + __setitem__ = list.__setitem__ + append = list.append + extend = list.extend + insert = list.insert + pop = list.pop + reverse = list.reverse + sort = list.sort + + # ---------------------------------------------------------------- + # Methods + # ---------------------------------------------------------------- + def close(self): + """Close all files referenced by each construct in the list. + + Note that a closed file will be automatically reopened if its + contents are subsequently required. + + :Returns: + + `None` + + **Examples:** + + >>> f.close() + + """ + for f in self: + f.close() + + def count(self, value): + """Return number of occurrences of value. + + {{List comparison}} + + Note that ``fl.count(value)`` is equivalent to + ``sum(f.equals(value) for f in fl)``. + + .. seealso:: `list.count` + + **Examples:** + + >>> f = cf.{{class}}([a, b, c, a]) + >>> f.count(a) + 2 + >>> f.count(b) + 1 + >>> f.count('a string') + 0 + + """ + return len([None for f in self if f.equals(value)]) + + def index(self, value, start=0, stop=None): + """Return first index of value. + + {{List comparison}} + + An exception is raised if there is no such construct. + + .. seealso:: `list.index` + + """ + if start < 0: + start = len(self) + start + + if stop is None: + stop = len(self) + elif stop < 0: + stop = len(self) + stop + + for i, f in enumerate(self[start:stop]): + if f.equals(value): + return i + start + + raise ValueError( + "{0!r} is not in {1}".format(value, self.__class__.__name__) + ) + + def remove(self, value): + """Remove first occurrence of value. + + {{List comparison}} + + .. seealso:: `list.remove` + + """ + for i, f in enumerate(self): + if f.equals(value): + del self[i] + return + + raise ValueError( + "{0}.remove(x): x not in {0}".format(self.__class__.__name__) + ) + + def sort(self, key=None, reverse=False): + """Sort of the list in place. + + By default the list is sorted by the identities of its + constructs, but any sort criteria cna be specified with the + *key* parameter. + + The sort is stable. + + .. versionadded:: 1.0.4 + + .. seealso:: `reverse` + + :Parameters: + + key: function, optional + Specify a function of one argument that is used to + extract a comparison key from each construct. By + default the list is sorted by construct identity, + i.e. the default value of *key* is ``lambda x: + x.identity()``. + + reverse: `bool`, optional + If set to `True`, then the list elements are sorted as + if each comparison were reversed. + + :Returns: + + `None` + + """ + if key is None: + key = lambda f: f.identity() + + return super().sort(key=key, reverse=reverse) + + def copy(self, data=True): + """Return a deep copy. + + ``f.copy()`` is equivalent to ``copy.deepcopy(f)``. + + :Returns: + + `{{class}}` + The deep copy. + + **Examples:** + + >>> g = f.copy() + >>> g is f + False + >>> f.equals(g) + True + >>> import copy + >>> h = copy.deepcopy(f) + >>> h is f + False + >>> f.equals(g) + True + + """ + return type(self)([f.copy(data=data) for f in self]) + + @_deprecated_kwarg_check("traceback") + @_manage_log_level_via_verbosity + def equals( + self, + other, + rtol=None, + atol=None, + verbose=None, + ignore_data_type=False, + ignore_fill_value=False, + ignore_properties=(), + ignore_compression=False, + ignore_type=False, + ignore=(), + traceback=False, + unordered=False, + ): + """Whether two lists are the same. + + Equality requires the two lists to have the same length and + for the construct elements to be equal pair-wise, using their + `!equals` methods. + + Any type of object may be tested but, in general, equality is + only possible with another {{class}}, or a subclass of + one. See the *ignore_type* parameter. + + Equality is between the constructs is strict by default. This + means that for two constructs to be considered equal they must + have corresponding metadata constructs and for each pair of + constructs: + + * the same descriptive properties must be present, with the + same values and data types, and vector-valued properties + must also have same the size and be element-wise equal (see + the *ignore_properties* and *ignore_data_type* parameters), + and + + .. + + * if there are data arrays then they must have same shape and + data type, the same missing data mask, and be element-wise + equal (see the *ignore_data_type* parameter). + + {{equals tolerance}} + + If data arrays are compressed then the compression type and + the underlying compressed arrays must be the same, as well as + the arrays in their uncompressed forms. See the + *ignore_compression* parameter. + + NetCDF elements, such as netCDF variable and dimension names, + do not constitute part of the CF data model and so are not + checked on any construct. + + :Parameters: + other: + The object to compare for equality. + + {{atol: number, optional}} + + {{rtol: number, optional}} + + {{ignore_fill_value: `bool`, optional}} + + {{verbose: `int` or `str` or `None`, optional}} + + ignore_properties: sequence of `str`, optional + The names of properties of the field construct (not + the TODO metadata constructs) to omit from the + comparison. Note that the "Conventions" property is + always omitted by default. + + {{ignore_data_type: `bool`, optional}} + + {{ignore_compression: `bool`, optional}} + + unordered: `bool`, optional + TODO + + :Returns: + + `bool` + Whether the two lists are equal. + + **Examples:** + + >>> fl.equals(fl) + True + >>> fl.equals(fl.copy()) + True + >>> fl.equals(fl[:]) + True + >>> fl.equals('a string') + False + + """ + if ignore: + _DEPRECATION_ERROR_KWARGS( + self, + "equals", + {"ignore": ignore}, + "Use keyword 'ignore_properties' instead.", + ) # pragma: no cover + + # Check for object identity + if self is other: + return True + + # Check that each object is of compatible type + if ignore_type: + if not isinstance(other, self.__class__): + other = type(self)(source=other, copy=False) + elif not isinstance(other, self.__class__): + logger.info( + "{0}: Incompatible type: {1}".format( + self.__class__.__name__, other.__class__.__name__ + ) + ) # pragma: no cover + return False + + # Check that there are equal numbers of constructs + len_self = len(self) + if len_self != len(other): + logger.info( + "{0}: Different numbers of constructs: " + "{1}, {2}".format( + self.__class__.__name__, len_self, len(other) + ) + ) # pragma: no cover + return False + + if not unordered or len_self == 1: + # ---------------------------------------------------- + # Check the lists pair-wise + # ---------------------------------------------------- + for i, (f, g) in enumerate(zip(self, other)): + if not f.equals( + g, + rtol=rtol, + atol=atol, + ignore_fill_value=ignore_fill_value, + ignore_properties=ignore_properties, + ignore_compression=ignore_compression, + ignore_data_type=ignore_data_type, + ignore_type=ignore_type, + verbose=verbose, + ): + logger.info( + "{0}: Different constructs at element {1}: " + "{2!r}, {3!r}".format(self.__class__.__name__, i, f, g) + ) # pragma: no cover + return False + else: + # ---------------------------------------------------- + # Check the lists set-wise + # ---------------------------------------------------- + # Group the variables by identity + self_identity = {} + for f in self: + self_identity.setdefault(f.identity(), []).append(f) + + other_identity = {} + for f in other: + other_identity.setdefault(f.identity(), []).append(f) + + # Check that there are the same identities + if set(self_identity) != set(other_identity): + logger.info( + "{}: Different sets of identities: {}, {}".format( + self.__class__.__name__, + set(self_identity), + set(other_identity), + ) + ) # pragma: no cover + return False + + # Check that there are the same number of variables + # for each identity + for identity, fl in self_identity.items(): + gl = other_identity[identity] + if len(fl) != len(gl): + logger.info( + "{0}: Different numbers of {1!r} {2}s: " + "{3}, {4}".format( + self.__class__.__name__, + identity, + fl[0].__class__.__name__, + len(fl), + len(gl), + ) + ) # pragma: no cover + return False + + # For each identity, check that there are matching pairs + # of equal constructs. + for identity, fl in self_identity.items(): + gl = other_identity[identity] + + for f in fl: + found_match = False + for i, g in enumerate(gl): + if f.equals( + g, + rtol=rtol, + atol=atol, + ignore_fill_value=ignore_fill_value, + ignore_properties=ignore_properties, + ignore_compression=ignore_compression, + ignore_data_type=ignore_data_type, + ignore_type=ignore_type, + verbose=verbose, + ): + found_match = True + del gl[i] + break + + if not found_match: + logger.info( + "{0}: No {1} equal to: {2!r}".format( + self.__class__.__name__, g.__class__.__name__, f + ) + ) # pragma: no cover + return False + + # ------------------------------------------------------------ + # Still here? Then the lists are equal + # ------------------------------------------------------------ + return True + + def select_by_identity(self, *identities): + """Select list elements constructs by identity. + + To find the inverse of the selection, use a list comprehension + with the `!match_by_identity` method of the constucts. For + example, to select all constructs whose identity is *not* + ``'air_temperature'``: + + >>> gl = cf.{{class}}( + ... x for x in fl if not f.match_by_identity('air_temperature') + ... ) + + .. versionadded:: 3.0.0 + + .. seealso:: `select`, `__call__`, `select_by_ncvar`, + `select_by_property`, + `{{package}}.{{class}}.match_by_identity` + + :Parameters: + + identities: optional + Select constructs from the list. By default all + constructs are selected. May be one or more of: + + * A construct identity. + + {{construct selection identity}} + + If no identities are provided then all list elements are + selected. + + *Parameter example:* + ``'latitude'`` + + *Parameter example:* + ``'long_name=Air Temperature'`` + + *Parameter example:* + ``'air_pressure', 'longitude'`` + + :Returns: + + `{{class}}` + The matching constructs. + + **Examples:** + + See `{{package}}.{{class}}.match_by_identity` + + """ + return type(self)(f for f in self if f.match_by_identity(*identities)) + + # ---------------------------------------------------------------- + # Aliases + # ---------------------------------------------------------------- + def select(self, *identities, **kwargs): + """Alias of `cf.{{class}}.select_by_identity`. + + To find the inverse of the selection, use a list comprehension + with the `!match_by_identity` method of the constucts. For + example, to select all constructs whose identity is *not* + ``'air_temperature'``: + + >>> gl = cf.{{class}}( + ... f for f in fl if not f.match_by_identity('air_temperature') + ... ) + + .. seealso:: `select_by_identity`, `__call__` + + """ + if kwargs: + _DEPRECATION_ERROR_KWARGS( + self, + "select", + kwargs, + "Use methods 'select_by_units', 'select_by_construct', " + "'select_by_properties', 'select_by_naxes', 'select_by_rank' " + "instead.", + ) # pragma: no cover + + if identities and isinstance(identities[0], (list, tuple, set)): + _DEPRECATION_ERROR( + "Use of a {!r} for identities has been deprecated. Use the " + "* operator to unpack the arguments instead.".format( + identities[0].__class__.__name__ + ) + ) # pragma: no cover + + for i in identities: + if isinstance(i, dict): + _DEPRECATION_ERROR_DICT( + "Use methods 'select_by_units', 'select_by_construct', " + "'select_by_properties', 'select_by_naxes', " + "'select_by_rank' instead." + ) # pragma: no cover + + if isinstance(i, str) and ":" in i: + error = True + if "=" in i: + index0 = i.index("=") + index1 = i.index(":") + error = index0 > index1 + + if error: + _DEPRECATION_ERROR( + "The identity format {!r} has been deprecated at " + "version 3.0.0. Try {!r} instead.".format( + i, i.replace(":", "=", 1) + ) + ) # pragma: no cover + + return self.select_by_identity(*identities) diff --git a/cf/constructs.py b/cf/constructs.py index bdf9bb5812..f9fb9f910d 100644 --- a/cf/constructs.py +++ b/cf/constructs.py @@ -155,7 +155,7 @@ def _matching_values(cls, value0, construct, value1, basic=False): # # return self.constructs[da_key] - def _filter_by_identity(self, arg, todict, _config, identities): + def _filter_by_identity(self, arg, identities, todict, _config): """Worker function for `filter_by_identity` and `filter`. See `filter_by_identity` for details. @@ -165,17 +165,13 @@ def _filter_by_identity(self, arg, todict, _config, identities): """ ctypes = [i for i in "XTYZ" if i in identities] - # if len(ctypes) == len(identities): - # # All identities are coordinate types (X, T, Y or Z) - # return self._filter_by_coordinate_type(arg, todict, ctypes) - config = {"identities_kwargs": {"ctypes": ctypes}} if _config: config.update(_config) - return super()._filter_by_identity(arg, todict, config, identities) + return super()._filter_by_identity(arg, identities, todict, config) - # def _filter_by_coordinate_type(self, arg, todict, ctypes): + # def _filter_by_coordinate_type(self, arg, ctypes, todict): # """Worker function for `filter_by_identity` and `filter`. # # See `filter_by_identity` for details. diff --git a/cf/datum.py b/cf/datum.py index 371d94354e..92ab35f1fc 100644 --- a/cf/datum.py +++ b/cf/datum.py @@ -30,6 +30,3 @@ def __repr__(self): """ return super().__repr__().replace("<", "] + # be added to field0 refs_to_add_from_field1 = [] # Check that the two fields are combinable @@ -2230,7 +2219,7 @@ def _binary_operation(self, other, method): a = out0[identity] if y.size > 1 and a.size == 1: for key0, c in field0.constructs.filter_by_axis( - "or", a.axis, todict=True + a.axis, axis_mode="or", todict=True ).items(): removed_refs0 = field0.del_coordinate_reference( construct=key0, default=None @@ -7158,7 +7147,7 @@ def weights( field.del_coordinate_reference(key) for key in field.constructs.filter_by_axis( - "or", *not_needed_axes, todict=True + *not_needed_axes, axis_mode="or", todict=True ): field.del_construct(key) @@ -10311,9 +10300,6 @@ def collapse( f"Changing axis size to 1: {axis}" ) # pragma: no cover - # dim = dimension_coordinates.filter_by_axis( - # axis, mode="exact", view=True - # ).value(None) dim = f.dimension_coordinate( filter_by_axis=(axis,), default=None ) @@ -12619,7 +12605,6 @@ def indices(self, *mode, **kwargs): logger.debug( " mask.shape = {}".format(mask.shape) ) # pragma: no cover - # --- End: for indices = tuple(parse_indices(self.shape, tuple(indices))) @@ -12863,95 +12848,6 @@ def set_data( return f - # def domain_axis( - # self, - # *identity, - # default=ValueError(), - # key=False, - # item=False, - # **filter_kwargs, - # ): - # """Select a domain axis construct. - # - # {{unique construct}} - # - # .. versionadded:: 1.8.9.0 - # - # .. seealso:: `construct`, `domain_axes` - # - # :Parameters: - # - # identity: optional - # Select domain axis constructs that have an identity, - # defined by their `!identities` methods, that matches - # any of the given values. - # - # Additionally, the values are matched against construct - # identifiers, with or without the ``'key%'`` prefix. - # - # Additionally, if for a given value - # ``f.coordinates(value, filter_by_naxes=(1,))`` returns - # 1-d coordinate constructs that all span the same - # domain axis construct then that domain axis construct - # is selected. See `coordinates` for details. - # - # Additionally, if there is a `Field` data array and a - # value matches the integer position of an array - # dimension, then the corresponding domain axis - # construct is selected. - # - # If no values are provided then all domain axis - # constructs are selected. - # - # {{value match}} - # - # {{displayed identity}} - # - # {{key: `bool`, optional}} - # - # {{item: `bool`, optional}} - # - # default: optional - # Return the value of the *default* parameter if there - # is no unique construct. - # - # {{default Exception}} - # - # {{filter_kwargs: optional}} - # - # :Returns: - # - # {{Returns construct}} - # - # - # **Examples:** - # - # """ - # filter_kwargs["todict"] = True - # - # c = self.domain_axes(*identity, **filter_kwargs) - # - # # Return construct, or key, or both, or default - # n = len(c) - # if n == 1: - # k, construct = c.popitem() - # if key: - # return k - # - # if item: - # return k, construct - # - # return construct - # - # if default is None: - # return default - # - # return self._default( - # default, - # f"{self.__class__.__name__}.domain_axis() can't return {n} " - # "constructs", - # ) - def domain_mask(self, **kwargs): """Return a boolean field that is True where criteria are met. @@ -13155,10 +13051,9 @@ def compute_vertical_coordinates( cr.set_coordinate(key) logger.debug( - "Non-parametric coordinates construct key: {!r}\n" - "Updated coordinate reference construct:\n{}".format( - key, cr.dump(display=False, _level=1) - ) + "Non-parametric coordinates construct key: {key!r}\n" + "Updated coordinate reference construct:\n" + f"{cr.dump(display=False, _level=1)}" ) # pragma: no cover return f @@ -13316,7 +13211,9 @@ def match_by_construct(self, *identities, OR=False, **conditions): n = 0 - self_cell_methods = self.cell_methods(todict=True) # TODO + # TODO - replace ().ordered() with (todict=True) when Python + # 3.6 is deprecated + self_cell_methods = self.cell_methods().ordered() for identity in identities: cms = False @@ -13344,9 +13241,9 @@ def match_by_construct(self, *identities, OR=False, **conditions): }: key = tuple(self_cell_methods)[-1] filtered = self.cell_method( - identity, filter_by_key=(key,) + identity, filter_by_key=(key,), default=None ) - if not filtered: + if filtered is None: if not OR: return False @@ -14379,7 +14276,7 @@ def cumsum( # Retrieve the axis axis_key = self.domain_axis(axis, key=True) if axis_key is None: - raise ValueError("Invalid axis specifier: {!r}".format(axis)) + raise ValueError(f"Invalid axis specifier: {axis!r}") # Construct new field f = _inplace_enabled_define_and_cleanup(self) @@ -14433,11 +14330,11 @@ def flip(self, axes=None, inplace=False, i=False, **kwargs): :Parameters: axes: (sequence of) `str` or `int`, optional - Select the domain axes to flip, defined by the domain axes - that would be selected by passing each given axis + Select the domain axes to flip, defined by the domain + axes that would be selected by passing each given axis description to a call of the field construct's - `domain_axis` method. For example, for a value of ``'X'``, - the domain axis construct returned by + `domain_axis` method. For example, for a value of + ``'X'``, the domain axis construct returned by ``f.domain_axis('X')`` is selected. If no axes are provided then all axes are flipped. @@ -14451,8 +14348,8 @@ def flip(self, axes=None, inplace=False, i=False, **kwargs): :Returns: `Field` or `None` - The field construct with flipped axes, or `None` if the - operation was in-place. + The field construct with flipped axes, or `None` if + the operation was in-place. **Examples:** @@ -14711,16 +14608,12 @@ def swapaxes(self, axis0, axis1, inplace=False, i=False): if da_key0 not in data_axes: raise ValueError( - "Can't swapaxes {}: Bad axis specification: {!r}".format( - self.__class__.__name__, axis0 - ) + f"Can't swapaxes: Bad axis specification: {axis0!r}" ) if da_key1 not in data_axes: raise ValueError( - "Can't swapaxes {}: Bad axis specification: {!r}".format( - self.__class__.__name__, axis1 - ) + f"Can't swapaxes: Bad axis specification: {axis1!r}" ) axis0 = data_axes.index(da_key0) @@ -14838,54 +14731,6 @@ def transpose( # Transpose the field's data array return super().transpose(iaxes, constructs=constructs, inplace=inplace) - # @_inplace_enabled(default=False) - # def uncompress(self, inplace=False): - # '''Uncompress the construct. - # - # Compression saves space by identifying and removing unwanted - # missing data. Such compression techniques store the data more - # efficiently and result in no precision loss. - # - # Whether or not the construct is compressed does not alter its - # functionality nor external appearance. - # - # The following type of compression are available: - # - # * Ragged arrays for discrete sampling geometries (DSG). Three - # different types of ragged array representation are - # supported. - # - # .. - # - # * Compression by gathering. - # - # .. versionadded:: 3.0.6 - # - # .. seealso:: `cf.write`, `compress`, `flatten`, `varray` - # - # :Parameters: - # - # {{inplace: `bool`, optional}} - # - # :Returns: - # - # `Field` or `None` - # The uncompressed field construct, or `None` if the - # operation was in-place. - # - # **Examples:** - # - # TODO - # - # ''' - # f = _inplace_enabled_define_and_cleanup(self) - # super(Field, f).uncompress(inplace=True) - # - # for c in f.constructs.filter_by_data().values(): - # c.uncompress(inplace=True) - # - # return f - @_deprecated_kwarg_check("i") @_inplace_enabled(default=False) def unsqueeze(self, inplace=False, i=False, axes=None, **kwargs): @@ -14987,6 +14832,8 @@ def cell_method( {{item: `bool`, optional}} + .. versionadded:: (cfdm) 3.9.0 + default: optional Return the value of the *default* parameter if there is no unique construct. @@ -14995,6 +14842,8 @@ def cell_method( {{filter_kwargs: optional}} + .. versionadded:: (cfdm) 3.9.0 + :Returns: {{Returns construct}} @@ -15049,6 +14898,8 @@ def field_ancillary( {{item: `bool`, optional}} + .. versionadded:: (cfdm) 3.9.0 + default: optional Return the value of the *default* parameter if there is no unique construct. @@ -15057,6 +14908,8 @@ def field_ancillary( {{filter_kwargs: optional}} + .. versionadded:: (cfdm) 3.9.0 + :Returns: {{Returns construct}} @@ -15627,11 +15480,6 @@ def halo( [0.006 0.006 0.036 0.019 0.035 0.018 0.037 0.034 0.013 0.013]] """ - if verbose: - _kwargs = ["{}={!r}".format(k, v) for k, v in locals().items()] - _ = "{}.halo(".format(self.__class__.__name__) - print("{}{})".format(_, (",\n" + " " * len(_)).join(_kwargs))) - f = _inplace_enabled_define_and_cleanup(self) # Set the halo size for each axis. @@ -15646,9 +15494,7 @@ def halo( if not set(data_axes).issuperset(axis_halo): raise ValueError( - "Can't apply halo: Bad axis specification: {!r}".format( - size - ) + f"Can't apply halo: Bad axis specification: {size!r}" ) else: if axes is None: @@ -15677,16 +15523,12 @@ def halo( try: i_X = data_axes.index(X) except ValueError: - raise ValueError( - "Axis {!r} is not spanned by the data".format(X_axis) - ) + raise ValueError(f"Axis {X_axis!r} is not spanned by the data") try: i_Y = data_axes.index(Y) except ValueError: - raise ValueError( - "Axis {!r} is not spanned by the data".format(Y_axis) - ) + raise ValueError(f"Axis {Y_axis!r} is not spanned by the data") tripolar["X"] = i_X tripolar["Y"] = i_Y @@ -15737,9 +15579,6 @@ def halo( verbose=verbose, ) - if verbose: - print("Returns:{!r}".format(f)) # pragma: no cover - return f def percentile( @@ -16020,7 +15859,7 @@ def percentile( # ------------------------------------------------------------ if other_axes: for key, c in self.constructs.filter_by_axis( - "subset", *other_axes, todict=True + *other_axes, axis_mode="subset", todict=True ).items(): c_axes = self.get_data_axes(key) out.set_construct(c, axes=c_axes, key=key) @@ -16330,7 +16169,7 @@ def flatten(self, axes=None, return_axis=False, inplace=False): # Flatten the constructs that span all of the flattened axes, # and no others. for key, c in f.constructs.filter_by_axis( - "and", *axes, todict=True + *axes, axis_mode="and", todict=True ).items(): c_axes = f.get_data_axes(key) c_iaxes = sorted( @@ -16345,7 +16184,9 @@ def flatten(self, axes=None, return_axis=False, inplace=False): # Remove constructs that span some, but not all, of the # flattened axes - for key in f.constructs.filter_by_axis("or", *axes, todict=True): + for key in f.constructs.filter_by_axis( + *axes, axis_mode="or", todict=True + ): f.del_construct(key) # Remove the domain axis constructs for the flattened axes @@ -16722,7 +16563,7 @@ def where( key = g.construct_key( construct, default=ValueError( - "Can't identify unique {!r} construct".format(construct) + f"Can't identify unique {construct!r} construct" ), ) construct = g.constructs[key] @@ -16735,7 +16576,7 @@ def where( construct_data = construct.get_data(None, _fill_value=False) if construct_data is None: - raise ValueError("{!r} has no data".format(construct)) + raise ValueError(f"{construct!r} has no data") if construct_data_axes != data_axes: s = [ @@ -18057,9 +17898,7 @@ def regridc( try: dst_coords.append(dst[axis]) except KeyError: - raise ValueError( - "Axis {!r} not specified in dst.".format(axis) - ) + raise ValueError(f"Axis {axis!r} not specified in dst.") dst_axis_keys = None else: @@ -18076,9 +17915,7 @@ def regridc( else: raise ValueError( "Units of source and destination domains are not " - "equivalent: {!r}, {!r}".format( - src_coord.Units, dst_coord.Units - ) + f"equivalent: {src_coord.Units!r}, {dst_coord.Units!r}" ) # Get the axis indices and their order for the source field @@ -18479,9 +18316,9 @@ def derivative( long_name = f.get_property("long_name", None) if standard_name is not None: del f.standard_name - f.long_name = "derivative of {}".format(standard_name) + f.long_name = f"derivative of {standard_name}" elif long_name is not None: - f.long_name = "derivative of {}".format(long_name) + f.long_name = f"derivative of {long_name}" return f diff --git a/cf/fieldlist.py b/cf/fieldlist.py index c15810908b..633791e903 100644 --- a/cf/fieldlist.py +++ b/cf/fieldlist.py @@ -1,54 +1,24 @@ -from copy import copy - -import logging - -import cfdm +from . import mixin +from . import ConstructList from .functions import ( _DEPRECATION_ERROR, - _DEPRECATION_ERROR_ARG, _DEPRECATION_ERROR_KWARGS, _DEPRECATION_ERROR_METHOD, _DEPRECATION_ERROR_DICT, ) -from .decorators import ( - _deprecated_kwarg_check, - _manage_log_level_via_verbosity, -) - - -logger = logging.getLogger(__name__) - -class FieldList(list, cfdm.Container): +class FieldList(mixin.FieldDomainList, ConstructList): """An ordered sequence of fields. Each element of a field list is a field construct. A field list supports the python list-like operations (such as - indexing and methods like `!append`). - - >>> fl = cf.FieldList() - >>> len(fl) - 0 - >>> f - - >>> fl = cf.FieldList(f) - >>> len(fl) - 1 - >>> fl = cf.FieldList([f, f]) - >>> len(fl) - 2 - >>> fl = cf.FieldList(cf.FieldList([f] * 3)) - >>> len(fl) - 3 - >>> len(fl + fl) - 6 - - These methods provide functionality similar to that of a - :ref:`built-in list `. The main difference - is that when a field element needs to be assessed for equality its + indexing and methods like `!append`). These methods provide + functionality similar to that of a :ref:`built-in list + `. The main difference is that when a field + construct element needs to be assesed for equality its `~cf.Field.equals` method is used, rather than the ``==`` operator. @@ -60,377 +30,20 @@ def __init__(self, fields=None): :Parameters: fields: (sequence of) `Field`, optional - Create a new field list with these fields. - - """ - super(cfdm.Container, self).__init__() - - if fields is not None: - if getattr(fields, "construct_type", None) == "field": - self.append(fields) - else: - self.extend(fields) - - def __call__(self, *identities): - """Alias for `cf.FieldList.select_by_identity`.""" - return self.select_by_identity(*identities) - - def __repr__(self): - """Called by the `repr` built-in function. - - x.__repr__() <==> repr(x) - - """ - out = [repr(f) for f in self] - out = ",\n ".join(out) - return "[" + out + "]" - - def __str__(self): - """Called by the `str` built-in function. - - x.__str__() <==> str(x) - - """ - return repr(self) - - def __docstring_method_exclusions__(self): - """Return the names of methods to exclude from docstring - substitutions. - - See `_docstring_method_exclusions` for details. + Create a new list with these field constructs. """ - return ( - "append", - "extend", - "insert", - "pop", - "reverse", - "clear", - ) - - # ---------------------------------------------------------------- - # Overloaded list methods - # ---------------------------------------------------------------- - def __add__(self, x): - """The binary arithmetic operation ``+`` - - f.__add__(x) <==> f + x - - :Returns: - - `FieldList` - - **Examples:** - - >>> h = f + g - >>> f += g - - """ - return type(self)(list.__add__(self, x)) - - def __contains__(self, y): - """Called to implement membership test operators. - - x.__contains__(y) <==> y in x - - Each field in the field list is compared with the field's - `~cf.Field.equals` method, as opposed to the ``==`` operator. - - Note that ``y in fl`` is equivalent to ``any(f.equals(y) for f in fl)``. - - """ - for f in self: - if f.equals(y): - return True - - return False - - def __mul__(self, x): - """The binary arithmetic operation ``*`` - - f.__mul__(x) <==> f * x - - :Returns: - - `FieldList` - - **Examples:** - - >>> h = f * 2 - >>> f *= 2 - - """ - return type(self)(list.__mul__(self, x)) - - def __eq__(self, other): - """The rich comparison operator ``==`` - - f.__eq__(x) <==> f==x - - Each field in the field list is compared with the field's - `~cf.Field.equals` method, as opposed to the ``==`` operator. - - Note that ``f==x`` is equivalent to ``f.equals(x)``. - - :Returns: - - `bool` - - """ - return self.equals(other) - - def __getslice__(self, i, j): - """Called to implement evaluation of f[i:j] - - f.__getslice__(i, j) <==> f[i:j] - - :Returns: - - `FieldList` - - **Examples:** - - >>> g = f[0:1] - >>> g = f[1:-4] - >>> g = f[:1] - >>> g = f[1:] - - """ - return type(self)(list.__getslice__(self, i, j)) - - def __getitem__(self, index): - """Called to implement evaluation of f[index] - - f.__getitem_(index) <==> f[index] - - :Returns: - - `Field` or `FieldList` - If *index* is an integer then a field construct is - returned. If *index* is a slice then a field list is returned, - which may be empty. - - **Examples:** - - >>> g = f[0] - >>> g = f[-1:-4:-1] - >>> g = f[2:2:2] - - """ - out = list.__getitem__(self, index) - - if isinstance(out, list): - return type(self)(out) - - return out - - def __ne__(self, other): - """The rich comparison operator ``!=`` - - f.__ne__(x) <==> f!=x - - Each field in the field list is compared with the field's - `~cf.Field.equals` method, as opposed to the ``==`` operator. - - Note that ``f!=x`` is equivalent to ``not f.equals(x)``. - - :Returns: - - `bool` - - """ - return not self.equals(other) - - # ??? - __len__ = list.__len__ - __setitem__ = list.__setitem__ - append = list.append - extend = list.extend - insert = list.insert - pop = list.pop - reverse = list.reverse - sort = list.sort + super().__init__(constructs=fields) # ---------------------------------------------------------------- # Methods # ---------------------------------------------------------------- - def close(self): - """Close all files referenced by each field construct. - - Note that a closed file will be automatically reopened if its - contents are subsequently required. - - :Returns: - - `None` - - **Examples:** - - >>> fl.close() - - """ - for f in self: - f.close() - - def count(self, value): - """Return number of occurrences of value. - - Each field in the field list is compared with the field's - `~cf.Field.equals` method, as opposed to the ``==`` operator. - - Note that ``fl.count(value)`` is equivalent to - ``sum(f.equals(value) for f in fl)``. - - .. seealso:: `cf.Field.equals`, `list.count` - - **Examples:** - - >>> f = cf.FieldList([a, b, c, a]) - >>> f.count(a) - 2 - >>> f.count(b) - 1 - >>> f.count(a+1) - 0 - - """ - return len([None for f in self if f.equals(value)]) - - def index(self, value, start=0, stop=None): - """Return first index of value. - - Each field in the field list is compared with the field's - `~cf.Field.equals` method, as opposed to the ``==`` operator. - - An exception is raised if there is no such field. - - .. seealso:: `list.index` - - """ - if start < 0: - start = len(self) + start - - if stop is None: - stop = len(self) - elif stop < 0: - stop = len(self) + stop - - for i, f in enumerate(self[start:stop]): - if f.equals(value): - return i + start - - raise ValueError( - "{0!r} is not in {1}".format(value, self.__class__.__name__) - ) - - def remove(self, value): - """Remove first occurrence of value. - - Each field in the field list is compared with its - `~cf.Field.equals` method, as opposed to the ``==`` operator. - - .. seealso:: `list.remove` - - """ - for i, f in enumerate(self): - if f.equals(value): - del self[i] - return - - raise ValueError( - "{0}.remove(x): x not in {0}".format(self.__class__.__name__) - ) - - def sort(self, key=None, reverse=False): - """Sort of the field list in place. - - By default the field list is sorted by the identities of its field - construct elements. - - The sort is stable. - - .. versionadded:: 1.0.4 - - .. seealso:: `reverse` - - :Parameters: - - key: function, optional - Specify a function of one argument that is used to extract - a comparison key from each field construct. By default the - field list is sorted by field identity, i.e. the default - value of *key* is ``lambda f: f.identity()``. - - reverse: `bool`, optional - If set to `True`, then the field list elements are sorted - as if each comparison were reversed. - - :Returns: - - `None` - - **Examples:** - - >>> fl - [, - , - , - ] - >>> fl.sort() - >>> fl - [, - , - , - ] - >>> fl.sort(reverse=True) - >>> fl - [, - , - , - ] - - >>> [f.datum(0) for f in fl] - [masked, - -0.12850454449653625, - -0.12850454449653625, - 236.51275634765625] - >>> fl.sort(key=lambda f: f.datum(0), reverse=True) - >>> [f.datum(0) for f in fl] - [masked, - 236.51275634765625, - -0.12850454449653625, - -0.12850454449653625] - - >>> from operator import attrgetter - >>> [f.long_name for f in fl] - ['Meridional Overturning Streamfunction', - 'U COMPNT OF WIND ON PRESSURE LEVELS', - 'U COMPNT OF WIND ON PRESSURE LEVELS', - 'air_temperature'] - >>> fl.sort(key=attrgetter('long_name')) - >>> [f.long_name for f in fl] - ['air_temperature', - 'Meridional Overturning Streamfunction', - 'U COMPNT OF WIND ON PRESSURE LEVELS', - 'U COMPNT OF WIND ON PRESSURE LEVELS'] - - """ - if key is None: - key = lambda f: f.identity() - - return super().sort(key=key, reverse=reverse) - - def __deepcopy__(self, memo): - """Called by the `copy.deepcopy` standard library function.""" - return self.copy() - def concatenate(self, axis=0, _preserve=True): """Join the sequence of fields together. - This is different to `cf.aggregate` because it does not account - for all metadata. For example, it assumes that the axis order is - the same in each field. + This is different to `cf.aggregate` because it does not + account for all metadata. For example, it assumes that the + axis order is the same in each field. .. versionadded:: 1.0 @@ -449,510 +62,17 @@ def concatenate(self, axis=0, _preserve=True): """ return self[0].concatenate(self, axis=axis, _preserve=_preserve) - def copy(self, data=True): - """Return a deep copy. - - ``f.copy()`` is equivalent to ``copy.deepcopy(f)``. - - :Returns: - - The deep copy. - - **Examples:** - - >>> g = f.copy() - >>> g is f - False - >>> f.equals(g) - True - >>> import copy - >>> h = copy.deepcopy(f) - >>> h is f - False - >>> f.equals(g) - True - - """ - return type(self)([f.copy(data=data) for f in self]) - - @_deprecated_kwarg_check("traceback") - @_manage_log_level_via_verbosity - def equals( - self, - other, - rtol=None, - atol=None, - verbose=None, - ignore_data_type=False, - ignore_fill_value=False, - ignore_properties=(), - ignore_compression=False, - ignore_type=False, - ignore=(), - traceback=False, - unordered=False, - ): - """Whether two field lists are the same. - - Equality requires the two field lists to have the same length and - for the field construct elements to be equal pair-wise, using - their `~cf.Field.equals` methods. - - Any type of object may be tested but, in general, equality is only - possible with another field list, or a subclass of one. See the - *ignore_type* parameter. - - Equality is between two field constructs is strict by - default. This means that for two field constructs to be considered - equal they must have corresponding metadata constructs and for - each pair of constructs: - - * the same descriptive properties must be present, with the same - values and data types, and vector-valued properties must also - have same the size and be element-wise equal (see the - *ignore_properties* and *ignore_data_type* parameters), and - - .. - - * if there are data arrays then they must have same shape and data - type, the same missing data mask, and be element-wise equal (see - the *ignore_data_type* parameter). - - {{equals tolerance}} - - If data arrays are compressed then the compression type and the - underlying compressed arrays must be the same, as well as the - arrays in their uncompressed forms. See the *ignore_compression* - parameter. - - NetCDF elements, such as netCDF variable and dimension names, do - not constitute part of the CF data model and so are not checked on - any construct. - - :Parameters: - other: - The object to compare for equality. - - {{atol: number, optional}} - - {{rtol: number, optional}} - - ignore_fill_value: `bool`, optional - If `True` then the "_FillValue" and "missing_value" - properties are omitted from the comparison, for the field - construct and metadata constructs. - - {{verbose: `int` or `str` or `None`, optional}} - - ignore_properties: sequence of `str`, optional - The names of properties of the field construct (not the - metadata constructs) to omit from the comparison. Note - that the "Conventions" property is always omitted by - default. - - {{ignore_data_type: `bool`, optional}} - - {{ignore_compression: `bool`, optional}} - - unordered: `bool`, optional - TODO - - :Returns: - - `bool` - Whether the two field lists are equal. - - **Examples:** - - >>> fl.equals(fl) - True - >>> fl.equals(fl.copy()) - True - >>> fl.equals(fl[:]) - True - >>> fl.equals('not a FieldList instance') - False - - """ - if ignore: - _DEPRECATION_ERROR_KWARGS( - self, - "equals", - {"ignore": ignore}, - "Use keyword 'ignore_properties' instead.", - ) # pragma: no cover - - # Check for object identity - if self is other: - return True - - # Check that each object is of compatible type - if ignore_type: - if not isinstance(other, self.__class__): - other = type(self)(source=other, copy=False) - elif not isinstance(other, self.__class__): - logger.info( - "{0}: Incompatible type: {1}".format( - self.__class__.__name__, other.__class__.__name__ - ) - ) # pragma: no cover - return False - - # Check that there are equal numbers of fields - len_self = len(self) - if len_self != len(other): - logger.info( - "{0}: Different numbers of field construct: " - "{1}, {2}".format( - self.__class__.__name__, len_self, len(other) - ) - ) # pragma: no cover - return False - - if not unordered or len_self == 1: - # ---------------------------------------------------- - # Check the lists pair-wise - # ---------------------------------------------------- - for i, (f, g) in enumerate(zip(self, other)): - if not f.equals( - g, - rtol=rtol, - atol=atol, - ignore_fill_value=ignore_fill_value, - ignore_properties=ignore_properties, - ignore_compression=ignore_compression, - ignore_data_type=ignore_data_type, - ignore_type=ignore_type, - verbose=verbose, - ): - logger.info( - "{0}: Different field constructs at element {1}: " - "{2!r}, {3!r}".format(self.__class__.__name__, i, f, g) - ) # pragma: no cover - return False - else: - # ---------------------------------------------------- - # Check the lists set-wise - # ---------------------------------------------------- - # Group the variables by identity - self_identity = {} - for f in self: - self_identity.setdefault(f.identity(), []).append(f) - - other_identity = {} - for f in other: - other_identity.setdefault(f.identity(), []).append(f) - - # Check that there are the same identities - if set(self_identity) != set(other_identity): - logger.info( - "{}: Different sets of identities: {}, {}".format( - self.__class__.__name__, - set(self_identity), - set(other_identity), - ) - ) # pragma: no cover - return False - - # Check that there are the same number of variables - # for each identity - for identity, fl in self_identity.items(): - gl = other_identity[identity] - if len(fl) != len(gl): - logger.info( - "{0}: Different numbers of {1!r} {2}s: " - "{3}, {4}".format( - self.__class__.__name__, - identity, - fl[0].__class__.__name__, - len(fl), - len(gl), - ) - ) # pragma: no cover - return False - - # For each identity, check that there are matching pairs - # of equal fields. - for identity, fl in self_identity.items(): - gl = other_identity[identity] - - for f in fl: - found_match = False - for i, g in enumerate(gl): - if f.equals( - g, - rtol=rtol, - atol=atol, - ignore_fill_value=ignore_fill_value, - ignore_properties=ignore_properties, - ignore_compression=ignore_compression, - ignore_data_type=ignore_data_type, - ignore_type=ignore_type, - verbose=verbose, - ): - found_match = True - del gl[i] - break - - if not found_match: - logger.info( - "{0}: No {1} equal to: {2!r}".format( - self.__class__.__name__, g.__class__.__name__, f - ) - ) # pragma: no cover - return False - - # ------------------------------------------------------------ - # Still here? Then the field lists are equal - # ------------------------------------------------------------ - return True - - def select_by_construct(self, *identities, OR=False, **conditions): - """Select field constructs by metadata constructs. - - To find the inverse of the selection, use a list comprehension - with the `~cf.Field.match_by_construct` method of the field - constucts. For example, to select all field constructs that do - *not* have a "latitude" metadata construct: - - >>> gl = cf.FieldList(f for f in fl - ... if not f.match_by_constructs('latitude')) - - .. note:: The API changed at version 3.1.0 - - .. versionadded:: 3.0.0 - - .. seealso: `select`, `__call__`, `select_by_units`, - `select_by_naxes`, `select_by_rank`, - `select_by_property`, `cf.Field.match_by_identity`, - `cf.Field.subspace` - - :Parameters: - - identities: optional - Identify the metadata constructs that have any of the - given identities or construct keys. - - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); or a compiled regular expression - (e.g. ``re.compile('^atmosphere')``) that selects the - relevant constructs whose identities match via - `re.search`. - - Each construct has a number of identities, and is selected - if any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: - - >>> x.identities() - ['time', - 'long_name=Time', - 'foo=bar', - 'standard_name=time', - 'ncvar%t', - 'T'] - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. - - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. - - If a cell method construct identity is given (such as - ``'method:mean'``) then it will only be compared with the - most recently applied cell method operation. - - Alternatively, one or more cell method constucts may be - identified in a single string with a CF-netCDF cell - methods-like syntax for describing both the collapse - dimensions, the collapse method, and any cell method - construct qualifiers. If N cell methods are described in - this way then they will collectively identify the N most - recently applied cell method operations. For example, - ``'T: maximum within years T: mean over years'`` will be - compared with the most two most recently applied cell - method operations. - - *Parameter example:* - ``'measure:area'`` - - *Parameter example:* - ``'latitude'`` - - *Parameter example:* - ``'long_name=Longitude'`` - - *Parameter example:* - ``'domainancillary2', 'ncvar%areacello'`` - - conditions: optional - Identify the metadata constructs that have any of the - given identities or construct keys, and whose data satisfy - conditions. - - A construct identity or construct key (as defined by the - *identities* parameter) is given as a keyword name and a - condition on its data is given as the keyword value. - - The condition is satisfied if any of its data values - equals the value provided. - - *Parameter example:* - ``longitude=180.0`` - - *Parameter example:* - ``time=cf.dt('1959-12-16')`` - - *Parameter example:* - ``latitude=cf.ge(0)`` - - *Parameter example:* - ``latitude=cf.ge(0), air_pressure=500`` - - *Parameter example:* - ``**{'latitude': cf.ge(0), 'long_name=soil_level': 4}`` - - OR: `bool`, optional - If True then return `True` if at least one metadata - construct matches at least one of the criteria given by - the *identities* or *conditions* arguments. By default - `True` is only returned if the field constructs matches - each of the given criteria. - - mode: deprecated at version 3.1.0 - Use the *OR* parameter instead. - - constructs: deprecated at version 3.1.0 - - :Returns: - - `bool` - The matching field constructs. - - **Examples:** - - TODO - - """ - # if constructs: - # for key, value in constructs.items(): - # if value is None: - # message = ("Since its value is None, use {!r} as a " - # "positional argument instead".format(value)) - # else: - # message = ("Evaluating criteria on data values is no " - # "longer possible with this method.") - # - # _DEPRECATION_ERROR_KWARGS(self, 'select_by_construct', - # kwargs={key: value}, - # message=message, - # version='3.1.0') # pragma: no cover - - if identities: - if identities[0] == "or": - _DEPRECATION_ERROR_ARG( - self, - "select_by_construct", - "or", - message="Use 'OR=True' instead.", - version="3.1.0", - ) # pragma: no cover - - if identities[0] == "and": - _DEPRECATION_ERROR_ARG( - self, - "select_by_construct", - "and", - message="Use 'OR=False' instead.", - version="3.1.0", - ) # pragma: no cover - - return type(self)( - f - for f in self - if f.match_by_construct(*identities, OR=OR, **conditions) - ) - - def select_by_identity(self, *identities): - """Select field constructs by identity. - - To find the inverse of the selection, use a list comprehension - with the `~cf.Field.match_by_identity` method of the field - constucts. For example, to select all field constructs whose - identity is *not* ``'air_temperature'``: - - >>> gl = cf.FieldList(f for f in fl - ... if not f.match_by_identity('air_temperature')) - - .. versionadded:: 3.0.0 - - .. seealso:: `select`, `__call__`, `select_by_units`, - `select_by_construct`, `select_by_naxes`, - `select_by_rank`, `select_by_property`, - `cf.Field.match_by_identity` - - :Parameters: - - identities: optional - Select field constructs. By default all field constructs - are selected. May be one or more of: - - * The identity of a field construct. - - A construct identity is specified by a string (e.g. - ``'air_temperature'``, ``'long_name=Air Temperature', - ``'ncvar%tas'``, etc.); or a compiled regular expression - (e.g. ``re.compile('^air_')``) that selects the relevant - constructs whose identities match via `re.search`. - - Each construct has a number of identities, and is selected - if any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - five identities: - - >>> x.identities() - ['air_temperature', - 'long_name=Air Temperature', - 'foo=bar', - 'standard_name=air_temperature', - 'ncvar%tas'] - - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identities* argument. - - :Returns: - - `FieldList` - The matching field constructs. - - **Examples:** - - >>> fl - [, - ] - >>> fl.select('air_temperature') - [] - - """ - return type(self)(f for f in self if f.match_by_identity(*identities)) - def select_by_naxes(self, *naxes): """Select field constructs by property. To find the inverse of the selection, use a list comprehension - with `~cf.Field.match_by_naxes` method of the field constucts. For - example, to select all field constructs which do *not* have - 3-dimensional data: + with `~cf.Field.match_by_naxes` method of the constuct + elements. For example, to select all constructs which do *not* + have 3-dimensional data: - >>> gl = cf.FieldList(f for f in fl if not f.match_by_naxes(3)) + >>> gl = cf.FieldList( + ... f for f in fl if not f.match_by_naxes(3) + ... ) .. versionadded:: 3.0.0 @@ -966,101 +86,10 @@ def select_by_naxes(self, *naxes): Select field constructs whose data spans a particular number of domain axis constructs. - A number of domain axis constructs is given by an `int`. - - If no numbers are provided then all field constructs are - selected. - - :Returns: - - `FieldList` - The matching field constructs. - - **Examples:** - - TODO - - """ - return type(self)(f for f in self if f.match_by_naxes(*naxes)) - - def select_by_rank(self, *ranks): - """Select field constructs by the number of domain axis - constructs. - - .. versionadded:: 3.0.0 - - .. seealso: `select`, `__call__`, `select_by_units`, - `select_by_naxes`, `select_by_construct`, - `select_by_property`, `cf.Field.match_by_identity` - - :Parameters: - - ranks: optional - Define conditions on the number of domain axis constructs. - - A condition is one of: - - * `int` - * a `Query` object - - The condition is satisfied if the number of domain axis - constructs equals the condition value. - - *Parameter example:* - To see if the field construct has 4 domain axis - constructs: ``4`` - - *Parameter example:* - To see if the field construct has at least 3 domain axis - constructs: ``cf.ge(3)`` - - :Returns: - - `bool` - The matching field constructs. - - **Examples:** - - TODO - - """ - - return type(self)(f for f in self if f.match_by_rank(*ranks)) - - def select_by_ncvar(self, *ncvars): - """Select field constructs by netCDF variable name. - - To find the inverse of the selection, use a list comprehension - with `~cf.Field.match_by_ncvar` method of the field constucts. For - example, to select all field constructs which do *not* have a - netCDF name of 'tas': - - >>> gl = cf.FieldList(f for f in fl if not f.match_by_ncvar('tas')) - - .. versionadded:: 3.0.0 - - .. seealso:: `select`, `select_by_identity`, - `select_by_construct`, `select_by_naxes`, - `select_by_rank`, `select_by_units` - - :Parameters: - - ncvars: optional - Select field constructs. May be one or more: - - * The netCDF name of a field construct. - - A field construct is selected if it matches any of the - given names. - - A netCDF variable name is specified by a string (e.g. - ``'tas'``, etc.); a `Query` object - (e.g. ``cf.eq('tas')``); or a compiled regular expression - (e.g. ``re.compile('^air_')``) that selects the field - constructs whose netCDF variable names match via - `re.search`. + A number of domain axis constructs is given by an + `int`. - If no netCDF variable names are provided then all field + If no numbers are provided then all field constructs are selected. :Returns: @@ -1070,100 +99,22 @@ def select_by_ncvar(self, *ncvars): **Examples:** - >>> fl = cf.FieldList([cf.example_field(0), cf.example_field(1)]) - >>> fl - [, - ] - >>> f[0].nc_get_variable() - 'humidity' - >>> f[1].nc_get_variable() - 'temp' - - >>> fl.select_by_ncvar('humidity') - [] - >>> fl.select_by_ncvar('humidity', 'temp') - [, - ] - >>> fl.select_by_ncvar() - [, - ] - - >>> import re - >>> fl.select_by_ncvar(re.compile('^hum')) - [] - - """ - return type(self)(f for f in self if f.match_by_ncvar(*ncvars)) - - def select_by_property(self, *mode, **properties): - """Select field constructs by property. - - To find the inverse of the selection, use a list comprehension - with `~cf.Field.match_by_property` method of the field - constucts. For example, to select all field constructs which do - *not* have a long_name property of 'Air Pressure': - - >>> gl = cf.FieldList(f for f in fl if not - ... f.match_by_property(long_name='Air Pressure')) - - .. versionadded:: 3.0.0 - - .. seealso:: `select`, `select_by_identity`, - `select_by_construct`, `select_by_naxes`, - `select_by_rank`, `select_by_units` - - :Parameters: - - mode: optional - Define the behaviour when multiple properties are - provided. - - By default (or if the *mode* parameter is ``'and'``) a - field construct is selected if it matches all of the given - properties, but if the *mode* parameter is ``'or'`` then a - field construct will be selected when at least one of its - properties matches. - - properties: optional - Select field constructs. May be one or more of: - - * The property of a field construct. - - By default a field construct is selected if it matches all - of the given properties, but it may alternatively be - selected when at least one of its properties matches (see - the *mode* positional parameter). - - A property value is given by a keyword parameter of the - property name. The value may be a scalar or vector - (e.g. ``'air_temperature'``, ``4``, ``['foo', 'bar']``); - or a compiled regular expression - (e.g. ``re.compile('^ocean')``), for which all constructs - whose methods match (via `re.search`) are selected. - - :Returns: - - `FieldList` - The matching field constructs. - - **Examples:** - TODO """ - return type(self)( - f for f in self if f.match_by_property(*mode, **properties) - ) + return type(self)(f for f in self if f.match_by_naxes(*naxes)) def select_by_units(self, *units, exact=True): """Select field constructs by units. To find the inverse of the selection, use a list comprehension - with `~cf.Field.match_by_units` method of the field constucts. For - example, to select all field constructs whose units are *not* - ``'km'``: + with `~cf.Field.match_by_units` method of the constuct + elements. For example, to select all constructs whose units + are *not* ``'km'``: - >>> gl = cf.FieldList(f for f in fl if not f.match_by_units('km')) + >>> gl = cf.FieldList( + ... f for f in fl if not f.match_by_units('km') + ... ) .. versionadded:: 3.0.0 @@ -1174,24 +125,24 @@ def select_by_units(self, *units, exact=True): :Parameters: units: optional - Select field constructs. By default all field constructs - are selected. May be one or more of: + Select field constructs. By default all field + constructs are selected. May be one or more of: * The units of a field construct. Units are specified by a string or compiled regular - expression (e.g. 'km', 'm s-1', ``re.compile('^kilo')``, - etc.) or a `Units` object (e.g. ``Units('km')``, - ``Units('m s-1')``, etc.). + expression (e.g. 'km', 'm s-1', + ``re.compile('^kilo')``, etc.) or a `Units` object + (e.g. ``Units('km')``, ``Units('m s-1')``, etc.). exact: `bool`, optional - If `False` then select field constructs whose units are - equivalent to any of those given by *units*. For example, - metres and are equivalent to kilometres. By default, field - constructs whose units are exactly one of those given by - *units* are selected. Note that the format of the units is - not important, i.e. 'm' is exactly the same as 'metres' - for this purpose. + If `False` then select field constructs whose units + are equivalent to any of those given by *units*. For + example, metres and are equivalent to kilometres. By + default, field constructs whose units are exactly one + of those given by *units* are selected. Note that the + format of the units is not important, i.e. 'm' is + exactly the same as 'metres' for this purpose. :Returns: @@ -1215,7 +166,7 @@ def select_by_units(self, *units, exact=True): f for f in self if f.match_by_units(*units, exact=exact) ) - def select_field(self, identity, default=ValueError()): + def select_field(self, *identities, default=ValueError()): """Select a unique field construct by its identity. .. versionadded:: 3.0.4 @@ -1224,39 +175,18 @@ def select_field(self, identity, default=ValueError()): :Parameters: - identity: - Select the field construct. May be: - - * The identity of a field construct. - - A construct identity is specified by a string (e.g. - ``'air_temperature'``, ``'long_name=Air Temperature', - ``'ncvar%tas'``, etc.); or a compiled regular expression - (e.g. ``re.compile('^air_')``) that selects the relevant - constructs whose identities match via `re.search`. - - Each construct has a number of identities, and is selected - if any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - five identities: + identities: optional + Select the field construct by one or more of - >>> x.identities() - ['air_temperature', - 'long_name=Air Temperature', - 'foo=bar', - 'standard_name=air_temperature', - 'ncvar%tas'] + * A construct identity. - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. + {{construct selection identity}} default: optional - Return the value of the *default* parameter if a unique - field construct can not be found. If set to an `Exception` - instance then it will be raised instead. + Return the value of the *default* parameter if a + unique field construct can not be found. + + {{default Exception}} :Returns: @@ -1279,22 +209,24 @@ def select_field(self, identity, default=ValueError()): ValueError: No fields found """ - out = self.select_by_identity(identity) + out = self.select_by_identity(*identities) - if not out or len(out) > 1: - if isinstance(default, Exception): - if not default.args: - if not out: - message = "No fields found" - else: - message = "Multiple fields found" + if not out: + if default is None: + return - default = copy(default) - default.args = (message,) + return self._default( + default, "select_field() can't return 0 fields" + ) - raise default + n = len(out) + if n > 1: + if default is None: + return - return default + return self._default( + default, "select_field() can't return {n} fields" + ) return out[0] diff --git a/cf/mixin/__init__.py b/cf/mixin/__init__.py index 555f87c84d..235e14faf5 100644 --- a/cf/mixin/__init__.py +++ b/cf/mixin/__init__.py @@ -3,3 +3,4 @@ from .propertiesdatabounds import PropertiesDataBounds from .coordinate import Coordinate from .fielddomain import FieldDomain +from .fielddomainlist import FieldDomainList diff --git a/cf/mixin/fielddomain.py b/cf/mixin/fielddomain.py index 1484f812d2..14926f7443 100644 --- a/cf/mixin/fielddomain.py +++ b/cf/mixin/fielddomain.py @@ -1284,6 +1284,8 @@ def auxiliary_coordinate( {{item: `bool`, optional}} + .. versionadded:: (cfdm) 3.9.0 + default: optional Return the value of the *default* parameter if there is no unique construct. @@ -1292,6 +1294,8 @@ def auxiliary_coordinate( {{filter_kwargs: optional}} + .. versionadded:: (cfdm) 3.9.0 + :Returns: {{Returns construct}} @@ -1343,6 +1347,8 @@ def construct( {{item: `bool`, optional}} + .. versionadded:: (cfdm) 3.9.0 + default: optional Return the value of the *default* parameter if there is no unique construct. @@ -1351,6 +1357,8 @@ def construct( {{filter_kwargs: optional}} + .. versionadded:: (cfdm) 3.9.0 + :Returns: {{Returns construct}} @@ -1405,6 +1413,8 @@ def cell_measure( {{item: `bool`, optional}} + .. versionadded:: (cfdm) 3.9.0 + default: optional Return the value of the *default* parameter if there is no unique construct. @@ -1413,6 +1423,8 @@ def cell_measure( {{filter_kwargs: optional}} + .. versionadded:: (cfdm) 3.9.0 + :Returns: {{Returns construct}} @@ -1467,6 +1479,8 @@ def coordinate( {{item: `bool`, optional}} + .. versionadded:: (cfdm) 3.9.0 + default: optional Return the value of the *default* parameter if there is no unique construct. @@ -1475,6 +1489,8 @@ def coordinate( {{filter_kwargs: optional}} + .. versionadded:: (cfdm) 3.9.0 + :Returns: {{Returns construct}} @@ -1530,6 +1546,8 @@ def coordinate_reference( {{item: `bool`, optional}} + .. versionadded:: (cfdm) 3.9.0 + default: optional Return the value of the *default* parameter if there is no unique construct. @@ -1538,6 +1556,8 @@ def coordinate_reference( {{filter_kwargs: optional}} + .. versionadded:: (cfdm) 3.9.0 + :Returns: {{Returns construct}} @@ -1666,6 +1686,8 @@ def dimension_coordinate( {{item: `bool`, optional}} + .. versionadded:: (cfdm) 3.9.0 + default: optional Return the value of the *default* parameter if there is no unique construct. @@ -1674,6 +1696,8 @@ def dimension_coordinate( {{filter_kwargs: optional}} + .. versionadded:: (cfdm) 3.9.0 + :Returns: {{Returns construct}} @@ -1827,6 +1851,8 @@ def domain_ancillary( {{item: `bool`, optional}} + .. versionadded:: (cfdm) 3.9.0 + default: optional Return the value of the *default* parameter if there is no unique construct. @@ -1835,6 +1861,8 @@ def domain_ancillary( {{filter_kwargs: optional}} + .. versionadded:: (cfdm) 3.9.0 + :Returns: {{Returns construct}} @@ -1900,6 +1928,8 @@ def domain_axis( {{item: `bool`, optional}} + .. versionadded:: (cfdm) 3.9.0 + default: optional Return the value of the *default* parameter if there is no unique construct. @@ -1908,6 +1938,8 @@ def domain_axis( {{filter_kwargs: optional}} + .. versionadded:: (cfdm) 3.9.0 + :Returns: {{Returns construct}} @@ -2029,31 +2061,25 @@ def get_coordinate_reference( return out - def iscyclic(self, identity, **kwargs): + def iscyclic(self, *identity, **filter_kwargs): """Returns True if the given axis is cyclic. + {{unique construct}} + .. versionadded:: 1.0 - .. seealso:: `axis`, `cyclic`, `period` + .. seealso:: `cyclic`, `period`, `domain_axis` :Parameters: - identity: - Select the domain axis construct by one of: - - * An identity or key of a 1-d coordinate construct that - whose data spans the domain axis construct. - - * A domain axis construct identity or key. - - * The position of the domain axis construct in the field - construct's data. + identity: optional + Select the unique domain axis construct returned by + ``f.domain_axis(*identity, **filter_kwargs)``. See + `domain_axis` for details. - The *identity* parameter selects the domain axis as - returned by this call of the field construct's - `domain_axis` method: ``f.domain_axis(identity)``. + {{filter_kwargs: optional}} - kwargs: deprecated at version 3.0.0 + .. versionadded:: (cfdm) 3.9.0 :Returns: @@ -2075,16 +2101,11 @@ def iscyclic(self, identity, **kwargs): >>> x = f.iscyclic(2) """ - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, "iscyclic", kwargs - ) # pragma: no cover - - axis = self.domain_axis(identity, key=True, default=None) + axis = self.domain_axis( + *identity, key=True, default=None, **filter_kwargs + ) if axis is None: - raise ValueError( - f"Can't identify unique axis from identity {identity!r}" - ) + raise ValueError("Can't identify unique axis") return axis in self.cyclic() @@ -2185,68 +2206,16 @@ def replace_construct( .. versionadded:: 3.0.0 - .. seealso:: `set_construct` + .. seealso:: `set_construct`, `construct` :Parameters: - identity: - Select TODO the metadata construct to be replaced by one of: - - * The identity or key of a metadata construct. - - * The identity or key of a domain axis construct that is - spanned by a metadata construct's data. - - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, ``'ncvar%lat'``, - etc.); a `Query` object (e.g. ``cf.eq('longitude')``); or - a compiled regular expression - (e.g. ``re.compile('^atmosphere')``) that selects the - relevant constructs whose identities match via - `re.search`. - - A construct has a number of identities, and is selected if - any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: - - >>> x.identities() - ['time', - 'long_name=Time', - 'foo=bar', - 'standard_name=time', - 'ncvar%t', - 'T'] - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. - - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. - - *Parameter example:* - ``identity='Y'`` - - *Parameter example:* - ``identity='latitude'`` - - *Parameter example:* - ``identity='long_name=Latitude'`` - - *Parameter example:* - ``identity='dimensioncoordinate1'`` - - *Parameter example:* - ``identity='domainaxis2'`` - - *Parameter example:* - ``identity='ncdim%y'`` + identity: optional + Select the unique construct returned by + ``f.construct(*identity, **filter_kwargs)``. See + `construct` for details. - construct: + new: The new construct to replace that selected by the *identity* parameter. @@ -2254,6 +2223,13 @@ def replace_construct( If True then set a copy of the new construct. By default the construct is copied. + {{filter_kwargs: optional}} + + .. versionadded:: 3.9.0 + + construct: + Deprecated at version 3.9.0 + :Returns: The construct that was replaced. diff --git a/cf/mixin/fielddomainlist.py b/cf/mixin/fielddomainlist.py new file mode 100644 index 0000000000..3bc65777fb --- /dev/null +++ b/cf/mixin/fielddomainlist.py @@ -0,0 +1,339 @@ +from .functions import _DEPRECATION_ERROR_ARG + + +class FieldDomainList: + """TODO.""" + + def select_by_construct(self, *identities, OR=False, **conditions): + """Select elements by their metadata constructs. + + To find the inverse of the selection, use a list comprehension + with the !match_by_construct` method of the constuct + elements. For example, to select all constructs that do *not* + have a "latitude" metadata construct: + + >>> gl = cf.{{class}}( + ... f for f in fl if not f.match_by_construct('latitude') + ... ) + + .. note:: The API changed at version 3.1.0 + + .. versionadded:: 3.0.0 + + .. seealso: `select`, `__call__`, `select_by_units`, + `select_by_naxes`, `select_by_rank`, + `select_by_property` + + :Parameters: + + identities: optional + Identify metadata constructs that have an identity, + defined by their `!identities` methods, that matches + any of the given values. + + If no identities nor conditions (see the *conditions* + parameter) are provided then all constructs are + selected. + + {{value match}} + + {{displayed identity}} + + If a cell method construct identity is given (such as + ``'method:mean'``) then it will only be compared with + the most recently applied cell method operation. + + Alternatively, one or more cell method constucts may + be identified in a single string with a CF-netCDF cell + methods-like syntax for describing both the collapse + dimensions, the collapse method, and any cell method + construct qualifiers. If N cell methods are described + in this way then they will collectively identify the N + most recently applied cell method operations. For + example, ``'T: maximum within years T: mean over + years'`` will be compared with the most two most + recently applied cell method operations. + + *Parameter example:* + `'latitude'`` + + *Parameter example:* + ``'T'`` + + *Parameter example:* + ``'latitude'`` + + *Parameter example:* + ``'long_name=Cell Area'`` + + *Parameter example:* + ``'cellmeasure1'`` + + *Parameter example:* + ``'measure:area'`` + + *Parameter example:* + ``cf.eq('time')'`` + + *Parameter example:* + ``re.compile('^lat')`` + + *Parameter example:* + ``'domainancillary2', 'longitude'`` + + *Parameter example:* + ``'area: mean T: maximum'`` + + *Parameter example:* + ``'grid_latitude', 'area: mean T: maximum'`` + + conditions: optional + Identify the metadata constructs that have any of the + given identities or construct keys, and whose data satisfy + conditions. + + A construct identity or construct key (defined in the + same way as by the *identities* parameter) is given as + a keyword name and a condition on its data is given as + the keyword value. + + The condition is satisfied if any of its data values + equals the value provided. + + If no conditions nor identities (see the *identities* + parameter) are provided then all list elements are + selected. + + *Parameter example:* + ``longitude=180.0`` + + *Parameter example:* + ``time=cf.dt('1959-12-16')`` + + *Parameter example:* + ``latitude=cf.ge(0)`` + + *Parameter example:* + ``latitude=cf.ge(0), air_pressure=500`` + + *Parameter example:* + ``**{'latitude': cf.ge(0), 'long_name=soil_level': 4}`` + + OR: `bool`, optional + If True then return `True` if at least one metadata + construct matches at least one of the criteria given + by the *identities* or *conditions* arguments. By + default `True` is only returned if the field + constructs matches each of the given criteria. + + mode: deprecated at version 3.1.0 + Use the *OR* parameter instead. + + constructs: deprecated at version 3.1.0 + + :Returns: + + `bool` + The matching field constructs. + + **Examples:** + + TODO + + """ + if identities: + if identities[0] == "or": + _DEPRECATION_ERROR_ARG( + self, + "select_by_construct", + "or", + message="Use 'OR=True' instead.", + version="3.1.0", + ) # pragma: no cover + + if identities[0] == "and": + _DEPRECATION_ERROR_ARG( + self, + "select_by_construct", + "and", + message="Use 'OR=False' instead.", + version="3.1.0", + ) # pragma: no cover + + return type(self)( + f + for f in self + if f.match_by_construct(*identities, OR=OR, **conditions) + ) + + def select_by_ncvar(self, *ncvars): + """Select list elements by netCDF variable name. + + To find the inverse of the selection, use a list comprehension + with the `!match_by_ncvar` method of the constuct + elements. For example, to select all constructs which do *not* + have a netCDF name of 'tas': + + >>> gl = cf.{{class}}( + ... f for f in fl if not f.match_by_ncvar('tas') + ... ) + + .. versionadded:: 3.0.0 + + .. seealso:: `select`, `select_by_identity`, `select_by_property`, + + :Parameters: + + ncvars: optional + Select constructs from the list. May be one or more: + + * The netCDF name of a construct. + + A construct is selected if it matches any of the given + names. + + A netCDF variable name is specified by a string (e.g. + ``'tas'``, etc.); a `Query` object + (e.g. ``cf.eq('tas')``); or a compiled regular + expression (e.g. ``re.compile('^air_')``) that selects + the constructs whose netCDF variable names match via + `re.search`. + + If no netCDF variable names are provided then all are + selected. + + :Returns: + + `{{class}}` + The matching constructs. + + **Examples:** + + >>> fl = cf.{{class}}([cf.example_field(0), cf.example_field(1)]) + >>> fl + [, + ] + >>> f[0].nc_get_variable() + 'humidity' + >>> f[1].nc_get_variable() + 'temp' + + >>> fl.select_by_ncvar('humidity') + [] + >>> fl.select_by_ncvar('humidity', 'temp') + [, + ] + >>> fl.select_by_ncvar() + [, + ] + + >>> import re + >>> fl.select_by_ncvar(re.compile('^hum')) + [] + + """ + return type(self)(f for f in self if f.match_by_ncvar(*ncvars)) + + def select_by_property(self, *mode, **properties): + """Select list elements by property. + + To find the inverse of the selection, use a list comprehension + with the `!match_by_property` method of the constuct + elements. For example, to select all constructs which do *not* + have a long_name property of "Pressure": + + >>> gl = cf.{{class}}( + ... f for f in fl if not f.match_by_property(long_name='Pressure') + ... ) + + .. versionadded:: 3.0.0 + + .. seealso:: `select`, `select_by_identity`, `select_by_ncvar` + + :Parameters: + + mode: optional + Define the behaviour when multiple properties are + provided. + + By default (or if the *mode* parameter is ``'and'``) a + construct is selected if it matches all of the given + properties, but if the *mode* parameter is ``'or'`` + then a construct will be selected when at least one of + its properties matches. + + properties: optional + Select the constructs with the given properties. May be + one or more of: + + * The property of a construct. + + By default a construct is selected if it matches all + of the given properties, but it may alternatively be + selected when at least one of its properties matches + (see the *mode* positional parameter). + + A property value is given by a keyword parameter of + the property name. The value may be a scalar or vector + (e.g. ``'air_temperature'``, ``4``, ``['foo', + 'bar']``); or a compiled regular expression + (e.g. ``re.compile('^ocean')``), for which all + constructs whose methods match (via `re.search`) are + selected. + + :Returns: + + `{{class}}` + The matching constructs. + + **Examples:** + + See `cf.{{class}}.select_by_identity` + + """ + return type(self)( + f for f in self if f.match_by_property(*mode, **properties) + ) + + def select_by_rank(self, *ranks): + """Select list elements by the number of domain axis constructs. + + .. versionadded:: 3.0.0 + + .. seealso: `select`, `__call__`, `select_by_units`, + `select_by_naxes`, `select_by_construct`, + `select_by_property`, `cf.Field.match_by_identity` + + :Parameters: + + ranks: optional + Define conditions on the number of domain axis constructs. + + A condition is one of: + + * `int` + * a `Query` object + + The condition is satisfied if the number of domain + axis constructs equals the condition value. + + *Parameter example:* + To see if the field construct has 4 domain axis + constructs: ``4`` + + *Parameter example:* + To see if the field construct has at least 3 domain + axis constructs: ``cf.ge(3)`` + + :Returns: + + `bool` + The matching field constructs. + + **Examples:** + + TODO + + """ + + return type(self)(f for f in self if f.match_by_rank(*ranks)) diff --git a/cf/mixin/properties.py b/cf/mixin/properties.py index 9a8e0f020f..cd27ac7156 100644 --- a/cf/mixin/properties.py +++ b/cf/mixin/properties.py @@ -742,18 +742,20 @@ def match_by_identity(self, *identities): Define one or more conditions on the identities. A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time', ``'ncvar%lat'``, - etc.); a `Query` object (e.g. ``cf.eq('longitude')``); or - a compiled regular expression - (e.g. ``re.compile('^atmosphere')``) that is compared with - the construct's identities via `re.search`. - - A construct has a number of identities, and the condition - is satisfied if any of the construct's identities, as - returned by the `identities` method, equals the condition - value. A construct's identities are those returned by its - `!identities` method. In the following example, the - construct ``x`` has six identities: + (e.g. ``'latitude'``, ``'long_name=time', + ``'ncvar%lat'``, etc.); a `Query` object + (e.g. ``cf.eq('longitude')``); or a compiled regular + expression (e.g. ``re.compile('^atmosphere')``) that + is compared with the construct's identities via + `re.search`. + + A construct has a number of identities, and the + condition is satisfied if any of the construct's + identities, as returned by the `identities` method, + equals the condition value. A construct's identities + are those returned by its `!identities` method. In the + following example, the construct ``x`` has six + identities: >>> x.identities() ['time', @@ -782,11 +784,9 @@ def match_by_identity(self, *identities): if not identities: return True - self_identities = self.identities() - ok = False - for value0 in identities: - for value1 in self_identities: + for value1 in self.identities(generator=True): + for value0 in identities: ok = self._matching_values(value0, value1, basic=True) if ok: break diff --git a/cf/mixin/propertiesdata.py b/cf/mixin/propertiesdata.py index 22c6675af0..faf6a40e2a 100644 --- a/cf/mixin/propertiesdata.py +++ b/cf/mixin/propertiesdata.py @@ -4875,22 +4875,29 @@ def iscyclic(self, axis): def get_data(self, default=ValueError(), _units=None, _fill_value=True): """Return the data. - Note that a `Data` instance is returned. Use its `array` attribute - to return the data as an independent `numpy` array. + Note that a `Data` instance is returned. Use its `array` + attribute to return the data as an independent `numpy` array. The units, calendar and fill value properties are, if set, inserted into the data. .. versionadded:: 1.7.0 - .. seealso:: `array`, `data`, `del_data`, `has_data`, `set_data` + .. seealso:: `array`, `data`, `del_data`, `has_data`, + `set_data` :Parameters: default: optional - Return the value of the *default* parameter if data have - not been set. If set to an `Exception` instance then it - will be raised instead. + Return the value of the *default* parameter if data + have not been set. + + {{default Exception}} + + _units: optional + Ignored. + + _fill_value: optional :Returns: diff --git a/cf/mixin/propertiesdatabounds.py b/cf/mixin/propertiesdatabounds.py index ab9d1f19bd..e05783a899 100644 --- a/cf/mixin/propertiesdatabounds.py +++ b/cf/mixin/propertiesdatabounds.py @@ -2459,16 +2459,17 @@ def halo( def flip(self, axes=None, inplace=False, i=False): """Flip (reverse the direction of) data dimensions. - .. seealso:: `insert_dimension`, `squeeze`, `transpose`, `unsqueeze` + .. seealso:: `insert_dimension`, `squeeze`, `transpose`, + `unsqueeze` :Parameters: axes: optional Select the domain axes to flip. One, or a sequence, of: - * The position of the dimension in the data. + * The position of the dimension in the data. - If no axes are specified then all axes are flipped. + If no axes are specified then all axes are flipped. {{inplace: `bool`, optional}} @@ -2477,8 +2478,8 @@ def flip(self, axes=None, inplace=False, i=False): :Returns: `{{class}}` or `None` - The construct with flipped axes, or `None` if the operation - was in-place. + The construct with flipped axes, or `None` if the + operation was in-place. **Examples:** @@ -2492,6 +2493,7 @@ def flip(self, axes=None, inplace=False, i=False): """ v = _inplace_enabled_define_and_cleanup(self) + super(PropertiesDataBounds, v).flip(axes=axes, inplace=True) interior_ring = v.get_interior_ring(None) @@ -2503,7 +2505,7 @@ def flip(self, axes=None, inplace=False, i=False): # -------------------------------------------------------- interior_ring.flip(axes, inplace=True) - bounds = v.get_bounds(None) + bounds = v.get_bounds_data(None, _fill_value=False) if bounds is not None: # -------------------------------------------------------- # Flip the bounds. diff --git a/cf/query.py b/cf/query.py index 08d27edd85..38171ee4ec 100644 --- a/cf/query.py +++ b/cf/query.py @@ -755,9 +755,9 @@ def inspect(self): # ---------------------------------------------------------------- @property def exact(self): - """TODO Deprecated at version 3.0.0. + """Deprecated at version 3.0.0. - Use re.compile objects instead. + Use `re.compile` objects instead. """ _DEPRECATION_ERROR_ATTRIBUTE( diff --git a/cf/read_write/netcdf/netcdfread.py b/cf/read_write/netcdf/netcdfread.py index 20ebbe7789..e1d4e3a3ca 100644 --- a/cf/read_write/netcdf/netcdfread.py +++ b/cf/read_write/netcdf/netcdfread.py @@ -209,7 +209,6 @@ def _create_data( if strlen > 1: ncdimensions.pop() dtype = numpy_dtype("S{0}".format(strlen)) - # --- End: if cfa_data["dtype"] = dtype cfa_data["_axes"] = ncdimensions @@ -264,10 +263,8 @@ def _create_data( p.append(slice(x[0], x[1] - 1, x[2])) else: p.append(list(x)) - # --- End: for attrs["part"] = p - # --- End: for construct.del_property("cf_role") construct.del_property("cfa_array") @@ -344,7 +341,6 @@ def _customize_read_vars(self): == "cfa_private" ): g["do_not_create_field"].add(ncvar) - # --- End: if # ------------------------------------------------------------ # @@ -394,6 +390,3 @@ def file_open(self, filename, flatten=True, verbose=None): out = super().file_open(filename, flatten=flatten, verbose=verbose) _file_to_fh["netCDF"].pop(filename, None) return out - - -# --- End: class diff --git a/cf/read_write/read.py b/cf/read_write/read.py index daf36380c2..3710d4cafc 100644 --- a/cf/read_write/read.py +++ b/cf/read_write/read.py @@ -621,7 +621,6 @@ def read( break else: files3.append(x) - # --- End: for files2 = files3 @@ -657,7 +656,6 @@ def read( ) # pragma: no cover continue - # --- End: if ftypes.add(ftype) @@ -696,8 +694,6 @@ def read( field_counter = len(field_list) file_counter += 1 - # --- End: for - # --- End: for logger.info( "Read {0} field{1} from {2} file{3}".format( @@ -722,7 +718,6 @@ def read( org_len, _plural(org_len), n, _plural(n) ) ) # pragma: no cover - # --- End: if # ---------------------------------------------------------------- # Sort by netCDF variable name @@ -738,7 +733,6 @@ def read( if standard_name is not None: f.set_property("standard_name", standard_name, copy=False) del f._custom["standard_name"] - # --- End: for # ---------------------------------------------------------------- # Select matching fields from UM/PP fields (post setting of @@ -764,7 +758,6 @@ def read( elif unsqueeze: for f in field_list: f.unsqueeze(inplace=True) - # --- End: if if nfields is not None and len(field_list) != nfields: raise ValueError( @@ -885,7 +878,6 @@ def _read_a_file( if endian is None: endian = "big" - # --- End: if if umversion is not None: umversion = float(str(umversion).replace(".", "0", 1)) @@ -899,7 +891,6 @@ def _read_a_file( # logger.warning('WARNING: {}'.format(error)) # pragma: no cover # # return FieldList() - # --- End: if extra_read_vars = { "chunk": chunk, @@ -935,7 +926,6 @@ def _read_a_file( "Can't determine format of file {} generated from CDL " "file {}".format(filename, cdl_filename) ) - # --- End: if if ftype == "netCDF" and extra_read_vars["fmt"] in (None, "NETCDF", "CFA"): fields = netcdf.read( diff --git a/cf/read_write/um/filearray.py b/cf/read_write/um/filearray.py index 7a9d25de47..c2592539b8 100644 --- a/cf/read_write/um/filearray.py +++ b/cf/read_write/um/filearray.py @@ -124,7 +124,6 @@ def __getitem__(self, indices): mask = array == fill_value if mask.any(): array = numpy_ma_masked_where(mask, array, copy=False) - # --- End: if # ------------------------------------------------------------ # Unpack the array using the scale_factor and add_offset, if @@ -194,6 +193,3 @@ def open(self): word_size=getattr(self, "word_size", None), byte_ordering=getattr(self, "byte_ordering", None), ) - - -# --- End: class diff --git a/cf/read_write/um/functions.py b/cf/read_write/um/functions.py index 04d94505be..59eb1034c8 100644 --- a/cf/read_write/um/functions.py +++ b/cf/read_write/um/functions.py @@ -43,7 +43,6 @@ def _open_um_file( close_one_file() f.open_fd() - # --- End: if return f diff --git a/cf/test/test_Field.py b/cf/test/test_Field.py index 9f5567e6b9..9ec1c9a683 100644 --- a/cf/test/test_Field.py +++ b/cf/test/test_Field.py @@ -1558,7 +1558,7 @@ def test_Field_indices(self): filename = os.path.join( os.path.dirname(os.path.abspath(__file__)), "test_file.nc" ) - f = cf.read(self.filename)[0] + f = cf.read(filename)[0] array = numpy.ma.array(f.array) @@ -1891,11 +1891,10 @@ def test_Field_match(self): ["None", "eastward_wind"], ): self.assertTrue( - f.match(*identities), "Failed with {}".format(identities) + f.match(*identities), f"Failed with {identities!r}" ) self.assertTrue( - f.match_by_identity(*identities), - "Failed with {}".format(identities), + f.match_by_identity(*identities), f"Failed with {identities!r}" ) # match_by_property @@ -1909,7 +1908,7 @@ def test_Field_match(self): ): self.assertTrue( f.match_by_property(*mode, **properties), - "Failed with {} {}".format(mode, properties), + f"Failed with {mode} {properties}", ) for mode in (["or"],): @@ -1923,7 +1922,7 @@ def test_Field_match(self): ): self.assertTrue( f.match_by_property(*mode, **properties), - "Failed with {} {}".format(mode, properties), + f"Failed with {mode} {properties}", ) # match_by_units self.assertTrue(f.match_by_units("m s-1")) diff --git a/cf/test/test_FieldList.py b/cf/test/test_FieldList.py index 743f743240..b932f90ddd 100644 --- a/cf/test/test_FieldList.py +++ b/cf/test/test_FieldList.py @@ -430,7 +430,7 @@ def test_FieldList_select_field(self): with self.assertRaises(Exception): f.select_field("not this one") - self.assertIsNone(f.select_field("not this one", None)) + self.assertIsNone(f.select_field("not this one", default=None)) g = f.select_field("air_temperature") self.assertIsInstance(g, cf.Field) diff --git a/cf/umread_lib/cInterface.py b/cf/umread_lib/cInterface.py index 5b123c7e19..cf2fba58e7 100644 --- a/cf/umread_lib/cInterface.py +++ b/cf/umread_lib/cInterface.py @@ -20,9 +20,6 @@ class File_type(CT.Structure): ] -# -- End: class - - def _get_ctypes_array(dtype, size=None): """Get ctypes corresponding to a numpy array of a given type. @@ -119,9 +116,6 @@ def as_index(self, val): return self.names.index(val) -# -- End: class - - enum_file_format = Enum("PP", "FF") enum_byte_ordering = Enum("little_endian", "big_endian") enum_data_type = Enum("integer", "real") @@ -606,9 +600,6 @@ def read_record_data( return data -# --- End: class - - if __name__ == "__main__": import sys diff --git a/cf/umread_lib/umfile.py b/cf/umread_lib/umfile.py index 74a5724a4d..d7d15ec49f 100644 --- a/cf/umread_lib/umfile.py +++ b/cf/umread_lib/umfile.py @@ -154,9 +154,6 @@ def _add_back_refs(self): rec.file = self -# --- End: class - - class Var: """Container for some information about variables.""" @@ -242,14 +239,10 @@ def group_records_by_extra_data(self): this_grp.sort(key=self._compare_recs_by_orig_order) groups.append(this_grp) this_grp = [] - # --- End: for return groups -# --- End: class - - class Rec: """Container for some information about records.""" @@ -411,9 +404,6 @@ def get_data(self): ) -# --- End: class - - if __name__ == "__main__": import sys diff --git a/docs/source/class/cf.AuxiliaryCoordinate.rst b/docs/source/class/cf.AuxiliaryCoordinate.rst index 603e07f393..1297c0314d 100644 --- a/docs/source/class/cf.AuxiliaryCoordinate.rst +++ b/docs/source/class/cf.AuxiliaryCoordinate.rst @@ -107,6 +107,21 @@ Geometries ~cf.AuxiliaryCoordinate.interior_ring +Climatology +^^^^^^^^^^^ + +.. rubric:: Methods + +.. autosummary:: + :nosignatures: + :toctree: ../method/ + :template: method.rst + + ~cf.AuxiliaryCoordinate.del_climatology + ~cf.AuxiliaryCoordinate.get_climatology + ~cf.AuxiliaryCoordinate.is_climatology + ~cf.AuxiliaryCoordinate.set_climatology + Selection --------- diff --git a/docs/source/class/cf.Constructs.rst b/docs/source/class/cf.Constructs.rst index 6d57eb1de3..3b8a0bdb08 100644 --- a/docs/source/class/cf.Constructs.rst +++ b/docs/source/class/cf.Constructs.rst @@ -47,6 +47,7 @@ Constructs and keys :toctree: ../method/ :template: method.rst + ~cf.Constructs.domain_axes ~cf.Constructs.key ~cf.Constructs.value @@ -61,6 +62,7 @@ Data axes :template: method.rst ~cf.Constructs.data_axes + ~cf.Constructs.get_data_axes Miscellaneous ------------- @@ -74,6 +76,7 @@ Miscellaneous ~cf.Constructs.copy ~cf.Constructs.shallow_copy + ~cf.Constructs.todict ~cf.Constructs.equals ~cf.Constructs.ordered ~cf.Constructs.construct_type diff --git a/docs/source/class/cf.DimensionCoordinate.rst b/docs/source/class/cf.DimensionCoordinate.rst index d26d3aaf9b..1fb4c94dcf 100644 --- a/docs/source/class/cf.DimensionCoordinate.rst +++ b/docs/source/class/cf.DimensionCoordinate.rst @@ -111,6 +111,21 @@ Geometries ~cf.DimensionCoordinate.interior_ring +Climatology +^^^^^^^^^^^ + +.. rubric:: Methods + +.. autosummary:: + :nosignatures: + :toctree: ../method/ + :template: method.rst + + ~cf.DimensionCoordinate.del_climatology + ~cf.DimensionCoordinate.get_climatology + ~cf.DimensionCoordinate.is_climatology + ~cf.DimensionCoordinate.set_climatology + Selection --------- diff --git a/docs/source/class/cf.Domain.rst b/docs/source/class/cf.Domain.rst index bb430d472a..ddfcbb3701 100644 --- a/docs/source/class/cf.Domain.rst +++ b/docs/source/class/cf.Domain.rst @@ -32,7 +32,15 @@ Metadata constructs :toctree: ../method/ :template: method.rst + ~cf.Domain.auxiliary_coordinates + ~cf.Domain.cell_measures + ~cf.Domain.coordinates + ~cf.Domain.coordinate_references + ~cf.Domain.dimension_coordinates + ~cf.Domain.domain_ancillaries + ~cf.Domain.domain_axes ~cf.Domain.construct + ~cf.Domain.construct_item ~cf.Domain.construct_key ~cf.Domain.del_construct ~cf.Domain.get_construct @@ -43,6 +51,7 @@ Metadata constructs ~cf.Domain.has_data_axes ~cf.Domain.set_data_axes ~cf.Domain.domain_axis_key + ~cf.Domain.climatological_time_axes .. rubric:: Attributes @@ -52,13 +61,6 @@ Metadata constructs :template: attribute.rst ~cf.Domain.constructs - ~cf.Domain.auxiliary_coordinates - ~cf.Domain.cell_measures - ~cf.Domain.coordinates - ~cf.Domain.coordinate_references - ~cf.Domain.dimension_coordinates - ~cf.Domain.domain_ancillaries - ~cf.Domain.domain_axes Miscellaneous ------------- @@ -73,6 +75,17 @@ Miscellaneous ~cf.Domain.copy ~cf.Domain.equals ~cf.Domain.fromconstructs + ~cf.Domain.apply_masking + ~cf.Domain.get_filenames + +.. rubric:: Attributes + +.. autosummary:: + :nosignatures: + :toctree: ../attribute/ + :template: attribute.rst + + ~cf.Domain.has_geometry Special ------- diff --git a/docs/source/class/cf.Field.rst b/docs/source/class/cf.Field.rst index a9a37355a8..caf5f12e03 100644 --- a/docs/source/class/cf.Field.rst +++ b/docs/source/class/cf.Field.rst @@ -123,14 +123,15 @@ Units ~cf.Field.override_units ~cf.Field.override_calendar - + .. autosummary:: :nosignatures: :toctree: ../attribute/ :template: attribute.rst ~cf.Field.Units - + ~cf.Field.reference_datetime + .. _Field-Data: Data @@ -257,7 +258,17 @@ Metadata constructs :toctree: ../method/ :template: method.rst + ~cf.Field.auxiliary_coordinates + ~cf.Field.cell_measures + ~cf.Field.cell_methods + ~cf.Field.coordinates + ~cf.Field.coordinate_references + ~cf.Field.dimension_coordinates + ~cf.Field.domain_ancillaries + ~cf.Field.domain_axes + ~cf.Field.field_ancillaries ~cf.Field.construct + ~cf.Field.construct_item ~cf.Field.construct_key ~cf.Field.del_construct ~cf.Field.get_construct @@ -296,17 +307,6 @@ Metadata constructs :template: attribute.rst ~cf.Field.constructs - ~cf.Field.auxiliary_coordinates - ~cf.Field.cell_measures - ~cf.Field.cell_methods - ~cf.Field.coordinates - ~cf.Field.coordinate_references - ~cf.Field.dimension_coordinates - ~cf.Field.domain_ancillaries - ~cf.Field.domain_axes - ~cf.Field.axes - ~cf.Field.field_ancillaries - ~cf.Field.reference_datetime .. _Field-Domain: diff --git a/docs/source/field_analysis.rst b/docs/source/field_analysis.rst index 09b12ef0ff..1971c547ef 100644 --- a/docs/source/field_analysis.rst +++ b/docs/source/field_analysis.rst @@ -1548,12 +1548,12 @@ pressure coordinates after the regridding operation. >>> z_ln_p.axis = 'Z' >>> print(z_ln_p.array) [6.74523635 6.55108034 6.2146081 5.52146092 3.91202301] - >>> _ = v.replace_construct('Z', z_ln_p) + >>> _ = v.replace_construct('Z', new=z_ln_p) >>> new_z_p = cf.DimensionCoordinate(data=cf.Data([800, 705, 632, 510, 320.], 'hPa')) >>> new_z_ln_p = new_z_p.log() >>> new_z_ln_p.axis = 'Z' >>> new_v = v.regridc({'Z': new_z_ln_p}, axes='Z', method='linear') - >>> new_v.replace_construct('Z', new_z_p) + >>> new_v.replace_construct('Z', new=new_z_p) >>> print(new_v) Field: eastward_wind (ncvar%ua) ------------------------------- diff --git a/docs/source/function.rst b/docs/source/function.rst index 1d8090b12c..07255e84f4 100644 --- a/docs/source/function.rst +++ b/docs/source/function.rst @@ -171,6 +171,8 @@ Miscellaneous cf.dump cf.environment cf.example_field + cf.example_fields + cf.example_domain cf.flat cf.hash_array cf.implementation diff --git a/docs/source/tutorial.py b/docs/source/tutorial.py index d6cd40de4c..cc1b48bef0 100644 --- a/docs/source/tutorial.py +++ b/docs/source/tutorial.py @@ -56,8 +56,8 @@ q, t = cf.read('file.nc') t.coordinate_references print(t.coordinate_references) -list(t.coordinate_references.keys()) -for key, value in t.coordinate_references.items(): +list(t.coordinate_references().keys()) +for key, value in t.coordinate_references().items(): print(key, repr(value)) print(t.dimension_coordinates) print(t.domain_axes) @@ -217,10 +217,12 @@ 'or', standard_name='air_temperature standard_error', units='m')) -print(t.constructs.filter_by_axis('and', 'domainaxis1')) +print(t.constructs.filter_by_axis('X', 'Y', axis_mode='or')) print(t.constructs.filter_by_measure('area')) print(t.constructs.filter_by_method('maximum')) -print(t.constructs.filter_by_type('auxiliary_coordinate').filter_by_axis('and', 'domainaxis2')) +print( + t.constructs.filter_by_type('auxiliary_coordinate').filter_by_axis('domainaxis2') +) c = t.constructs.filter_by_type('dimension_coordinate') d = c.filter_by_property(units='degrees') print(d) @@ -248,34 +250,30 @@ t.construct('latitude', key=True) key = t.construct_key('latitude') t.get_construct(key) -t.constructs('latitude').value() -t.constructs.get(key) +key, lat = t.construct_item('latitude') t.constructs[key] +t.constructs.get(key) t.auxiliary_coordinate('latitude') t.auxiliary_coordinate('latitude', key=True) +t.auxiliary_coordinate('latitude', item=True) try: - t.construct('measure:volume') # Raises Exception + t.construct('measure:volume') # Raises Exception except: pass -t.construct('measure:volume', False) -c = t.constructs.filter_by_measure('volume') -len(c) +t.construct('measure:volume', default=False) try: - c.value() # Raises Exception + t.construct('measure:volume', default=Exception("my error")) # Raises Exception except: pass -c.value(default='No construct') -try: - c.value(default=KeyError('My message')) # Raises Exception -except: - pass -d = t.constructs('units=degrees') +c = t.constructs.filter_by_measure("volume") +len(c) +d = t.constructs("units=degrees") len(d) try: - d.value() # Raises Exception + t.construct("units=degrees") # Raises Exception except: pass -print(d.value(default=None)) +print(t.construct("units=degrees", default=None)) lon = q.construct('longitude') lon lon.set_property('long_name', 'Longitude') @@ -322,7 +320,7 @@ domain_latitude.del_property('test') field_latitude.has_property('test') print(q.domain_axes) -d = q.domain_axes.get('domainaxis1') +d = q.domain_axes().get('domainaxis1') d d.get_size() print(t.coordinates) @@ -360,7 +358,7 @@ g = f.compute_vertical_coordinates() g.auxiliary_coordinate('altitude').dump() print(t.cell_methods) -t.cell_methods.ordered() +t.cell_methods().ordered() cm = t.constructs('method:mean').value() cm cm.get_axes() diff --git a/docs/source/tutorial.rst b/docs/source/tutorial.rst index c9c3ef7360..265fdf36b3 100644 --- a/docs/source/tutorial.rst +++ b/docs/source/tutorial.rst @@ -822,9 +822,9 @@ like their corresponding `dict` methods. It also has a Constructs: {'coordinatereference0': , 'coordinatereference1': } - >>> list(t.coordinate_references.keys()) + >>> list(t.coordinate_references().keys()) ['coordinatereference0', 'coordinatereference1'] - >>> for key, value in t.coordinate_references.items(): + >>> for key, value in t.coordinate_references().items(): ... print(key, repr(value)) ... coordinatereference0 @@ -1738,6 +1738,7 @@ constructs that meet various criteria: ================================ ========================================================================== Method Filter criteria ================================ ========================================================================== +`~Constructs.filter` General purpose interface to all other filter methods `~Constructs.filter_by_identity` Metadata construct identity `~Constructs.filter_by_type` Metadata construct type `~Constructs.filter_by_property` Property values @@ -1752,6 +1753,9 @@ Method Filter criteria `~Constructs.filter_by_ncdim` NetCDF dimension name (see the :ref:`netCDF interface `) ================================ ========================================================================== +The `~Constructs.filter` method of a `Constructs` instance allows +these filters to be chained together in a single call. + Each of these methods returns a new `cf.Constructs` instance that contains the selected metadata constructs. @@ -1793,11 +1797,10 @@ contains the selected metadata constructs. 'fieldancillary0': } .. code-block:: python - :caption: *Get constructs whose data span the 'domainaxis1' domain - axis construct; and those which also do not span the - 'domainaxis2' domain axis construct.* + :caption: *Get constructs whose data span at least one of the 'Y' + and 'X' domain axis constructs.* - >>> print(t.constructs.filter_by_axis('and', 'domainaxis1')) + >>> print(t.constructs.filter_by_axis('X', 'Y', axis_mode='or')) Constructs: {'auxiliarycoordinate0': , 'auxiliarycoordinate1': , @@ -1827,7 +1830,9 @@ easy to perform further filters on their results: .. code-block:: python :caption: *Make selections from previous selections.* - >>> print(t.constructs.filter_by_type('auxiliary_coordinate').filter_by_axis('and', 'domainaxis2')) + >>> print( + ... t.constructs.filter_by_type('auxiliary_coordinate').filter_by_axis('domainaxis2') + ... ) Constructs: {'auxiliarycoordinate0': , 'auxiliarycoordinate1': } @@ -2016,8 +2021,7 @@ returned by any of the following techniques: >>> t.construct('latitude', key=True) 'auxiliarycoordinate0' -* with the `~Field.construct_key` and `~Field.get_construct` methods of - a field construct: +* with the `~Field.construct_key` method of a field construct: .. code-block:: python :caption: *Get the "latitude" metadata construct key with its construct @@ -2027,32 +2031,31 @@ returned by any of the following techniques: >>> t.get_construct(key) -* with the `~Constructs.value` method of a `cf.Constructs` instance - that contains one construct, +* with the `~Field.construct_item` method of a field construct: .. code-block:: python - :caption: *Get the "latitude" metadata construct via its identity - and the 'value' method.* - - >>> t.constructs('latitude').value() - + :caption: *Get the "latitude" metadata construct and its identifier + via its construct identity.* + + >>> key, lat = t.construct_item('latitude') + ('auxiliarycoordinate0', ) -* with the `~Constructs.get` method of a `cf.Constructs` instance, or +* by indexing a `cf.Constructs` instance with a construct key. .. code-block:: python :caption: *Get the "latitude" metadata construct via its construct - key and the 'get' method.* + key and indexing* - >>> t.constructs.get(key) + >>> t.constructs[key] -* by indexing a `cf.Constructs` instance with a construct key. +* with the `~Constructs.get` method of a `cf.Constructs` instance, or .. code-block:: python :caption: *Get the "latitude" metadata construct via its construct - key and indexing* + key and the 'get' method.* - >>> t.constructs[key] + >>> t.constructs.get(key) In addition, an individual metadata construct of a particular type can @@ -2082,6 +2085,8 @@ of the chosen type. >>> t.auxiliary_coordinate('latitude', key=True) 'auxiliarycoordinate0' + >>> t.auxiliary_coordinate('latitude', item=True) + ('auxiliarycoordinate0', ) The `~Field.construct` method of the field construct, the above methods for finding a construct of a particular type, and the @@ -2095,33 +2100,27 @@ raising a customised exception: unique construct that meets the criteria. Alternatively, the value of the "default" parameter is returned.* - >>> t.construct('measure:volume') # Raises Exception + >>> t.construct('measure:volume') # Raises Exception Traceback (most recent call last): - ... + ... ValueError: Can't return zero constructs - >>> t.construct('measure:volume', False) + >>> t.construct('measure:volume', default=False) False - >>> c = t.constructs.filter_by_measure('volume') + >>> t.construct('measure:volume', default=Exception("my error")) # Raises Exception + Traceback (most recent call last): + ... + Exception: my error + >>> c = t.constructs.filter_by_measure("volume") >>> len(c) 0 - >>> c.value() # Raises Exception - Traceback (most recent call last): - ... - ValueError: Can't return zero constructs - >>> c.value(default='No construct') - 'No construct' - >>> c.value(default=KeyError('My message')) # Raises Exception - Traceback (most recent call last): - ... - KeyError: 'My message' - >>> d = t.constructs('units=degrees') + >>> d = t.constructs("units=degrees") >>> len(d) 2 - >>> d.value() # Raises Exception + >>> t.construct("units=degrees") # Raises Exception Traceback (most recent call last): - ... - ValueError: Can't return 2 constructs - >>> print(d.value(default=None)) + ... + ValueError: Field.construct() can't return 2 constructs + >>> print(t.construct("units=degrees", default=None)) None The `~Constructs.get` method of a `cf.Constructs` instance accepts an @@ -2431,7 +2430,7 @@ the `~cf.DomainAxis.get_size` method of the domain axis construct. {'domainaxis0': , 'domainaxis1': , 'domainaxis2': } - >>> d = q.domain_axes.get('domainaxis1') + >>> d = q.domain_axes().get('domainaxis1') >>> d >>> d.get_size() @@ -2806,7 +2805,7 @@ to the field construct during :ref:`field construct creation :caption: *Retrieve the cell method constructs in the same order that they were applied.* - >>> t.cell_methods.ordered() + >>> t.cell_methods().ordered() OrderedDict([('cellmethod0', ), ('cellmethod1', )]) @@ -6888,3 +6887,4 @@ if any, are filtered out. .. _indexed contiguous: http://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#_ragged_array_representation_of_time_series_profiles .. _geometries: http://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#geometries .. _Hierarchical groups: http://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#groups +,c From a52c2b82461ed130072702fd25fc41d23d12e975 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Wed, 21 Apr 2021 23:50:30 +0100 Subject: [PATCH 36/53] devs --- cf/mixin/fielddomainlist.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cf/mixin/fielddomainlist.py b/cf/mixin/fielddomainlist.py index 3bc65777fb..3d07b5bba6 100644 --- a/cf/mixin/fielddomainlist.py +++ b/cf/mixin/fielddomainlist.py @@ -1,4 +1,4 @@ -from .functions import _DEPRECATION_ERROR_ARG +from ..functions import _DEPRECATION_ERROR_ARG class FieldDomainList: From d4fe5ba9651c217d8a98022d760c128818309d4f Mon Sep 17 00:00:00 2001 From: David Hassell Date: Fri, 23 Apr 2021 12:02:56 +0100 Subject: [PATCH 37/53] spelling --- Changelog.rst | 2 +- cf/constructlist.py | 6 +- cf/field.py | 211 ++++++++++++----------- cf/fieldlist.py | 73 +------- cf/mixin/fielddomain.py | 14 +- cf/mixin/fielddomainlist.py | 8 +- cf/read_write/um/umread.py | 2 +- cf/read_write/write.py | 4 +- docs/source/spelling_false_positives.txt | 6 +- 9 files changed, 133 insertions(+), 193 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index 5d74fef59b..a198ae703b 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -127,7 +127,7 @@ version 3.5.0 * Changed the API to `cf.Field.period`: Now sets and reports on the period of the field construct data, rather than that of its metadata - constucts. + constructs. * Enabled configuration of the extent and nature of informational and warning messages output by `cf` using a logging framework (see points below and also https://github.com/NCAS-CMS/cf-python/issues/37) diff --git a/cf/constructlist.py b/cf/constructlist.py index 227516c1f2..84eb9ccd84 100644 --- a/cf/constructlist.py +++ b/cf/constructlist.py @@ -342,7 +342,7 @@ def sort(self, key=None, reverse=False): """Sort of the list in place. By default the list is sorted by the identities of its - constructs, but any sort criteria cna be specified with the + constructs, but any sort criteria can be specified with the *key* parameter. The sort is stable. @@ -635,7 +635,7 @@ def select_by_identity(self, *identities): """Select list elements constructs by identity. To find the inverse of the selection, use a list comprehension - with the `!match_by_identity` method of the constucts. For + with the `!match_by_identity` method of the constructs. For example, to select all constructs whose identity is *not* ``'air_temperature'``: @@ -690,7 +690,7 @@ def select(self, *identities, **kwargs): """Alias of `cf.{{class}}.select_by_identity`. To find the inverse of the selection, use a list comprehension - with the `!match_by_identity` method of the constucts. For + with the `!match_by_identity` method of the constructs. For example, to select all constructs whose identity is *not* ``'air_temperature'``: diff --git a/cf/field.py b/cf/field.py index 162b053905..d8d516865d 100644 --- a/cf/field.py +++ b/cf/field.py @@ -2082,8 +2082,8 @@ def _binary_operation(self, other, method): # } axes_to_replace_from_field1 = {} - # List of field1 coordinate reference constucts which will - # be added to field0 + # List of field1 coordinate reference constructs which will be + # added to field0 refs_to_add_from_field1 = [] # Check that the two fields are combinable @@ -5920,25 +5920,26 @@ def cell_area( :Parameters: radius: optional - Specify the radius used for calculating the areas of cells - defined in spherical polar coordinates. The radius is that - which would be returned by this call of the field - construct's `~cf.Field.radius` method: + Specify the radius used for calculating the areas of + cells defined in spherical polar coordinates. The + radius is that which would be returned by this call of + the field construct's `~cf.Field.radius` method: ``f.radius(radius)``. See the `cf.Field.radius` for details. - By default *radius* is ``'earth'`` which means that if and - only if the radius can not found from the datums of any - coordinate reference constucts, then the default radius - taken as 6371229 metres. + By default *radius* is ``'earth'`` which means that if + and only if the radius can not found from the datums + of any coordinate reference constructs, then the + default radius taken as 6371229 metres. great_circle: `bool`, optional - If True then allow, if required, the derivation of i) area - weights from polygon geometry cells by assuming that each - cell part is a spherical polygon composed of great circle - segments; and ii) and the derivation of line-length - weights from line geometry cells by assuming that each - line part is composed of great circle segments. + If True then allow, if required, the derivation of i) + area weights from polygon geometry cells by assuming + that each cell part is a spherical polygon composed of + great circle segments; and ii) and the derivation of + line-length weights from line geometry cells by + assuming that each line part is composed of great + circle segments. .. versionadded:: 3.2.0 @@ -5949,7 +5950,8 @@ def cell_area( :Returns: `Field` - A field construct containing the horizontal cell areas. + A field construct containing the horizontal cell + areas. **Examples:** @@ -5984,9 +5986,9 @@ def radius(self, default=None): """Return the radius used for calculating cell areas in spherical polar coordinates. - The radius is taken from the datums of any coordinate reference - constucts, but if and only if this is not possible then a default - value may be used instead. + The radius is taken from the datums of any coordinate + reference constructs, but if and only if this is not possible + then a default value may be used instead. .. versionadded:: 3.0.2 @@ -5996,15 +5998,16 @@ def radius(self, default=None): default: optional The radius is taken from the datums of any coordinate - reference constucts, but if and only if this is not - possible then the value set by the *default* parameter is - used. May be set to any numeric scalar object, including - `numpy` and `Data` objects. The units of the radius are - assumed to be metres, unless specified by a `Data` - object. If the special value ``'earth'`` is given then the - default radius taken as 6371229 metres. If *default* is - `None` an exception will be raised if no unique datum can - be found in the coordinate reference constucts. + reference constructs, but if and only if this is not + possible then the value set by the *default* parameter + is used. May be set to any numeric scalar object, + including `numpy` and `Data` objects. The units of the + radius are assumed to be metres, unless specified by a + `Data` object. If the special value ``'earth'`` is + given then the default radius taken as 6371229 + metres. If *default* is `None` an exception will be + raised if no unique datum can be found in the + coordinate reference constructs. *Parameter example:* Five equivalent ways to set a default radius of 6371200 @@ -6582,46 +6585,47 @@ def weights( ``scale=1``. measure: `bool`, optional - Create weights that are cell measures, i.e. which describe - actual cell sizes (e.g. cell areas) with appropriate units - (e.g. metres squared). + Create weights that are cell measures, i.e. which + describe actual cell sizes (e.g. cell areas) with + appropriate units (e.g. metres squared). Cell measures can be created for any combination of - axes. For example, cell measures for a time axis are the - time span for each cell with canonical units of seconds; - cell measures for the combination of four axes - representing time and three dimensional space could have - canonical units of metres cubed seconds. + axes. For example, cell measures for a time axis are + the time span for each cell with canonical units of + seconds; cell measures for the combination of four + axes representing time and three dimensional space + could have canonical units of metres cubed seconds. .. note:: Specifying cell volume weights via ``weights=['X', 'Y', 'Z']`` or - ``weights=['area', 'Z']`` (or other equivalents) - will produce **an incorrect result if the - vertical dimension coordinates do not define the - actual height or depth thickness of every cell - in the domain**. In this case, - ``weights='volume'`` should be used instead, - which requires the field construct to have a - "volume" cell measure construct. - - If ``weights=True`` then care also needs to be - taken, as a "volume" cell measure construct will - be used if present, otherwise the cell volumes - will be calculated using the size of the - vertical coordinate cells. + ``weights=['area', 'Z']`` (or other + equivalents) will produce **an incorrect + result if the vertical dimension coordinates + do not define the actual height or depth + thickness of every cell in the domain**. In + this case, ``weights='volume'`` should be + used instead, which requires the field + construct to have a "volume" cell measure + construct. + + If ``weights=True`` then care also needs to + be taken, as a "volume" cell measure + construct will be used if present, otherwise + the cell volumes will be calculated using + the size of the vertical coordinate cells. radius: optional - Specify the radius used for calculating the areas of cells - defined in spherical polar coordinates. The radius is that - which would be returned by this call of the field - construct's `~cf.Field.radius` method: + Specify the radius used for calculating the areas of + cells defined in spherical polar coordinates. The + radius is that which would be returned by this call of + the field construct's `~cf.Field.radius` method: ``f.radius(radius)``. See the `cf.Field.radius` for details. - By default *radius* is ``'earth'`` which means that if and - only if the radius can not found from the datums of any - coordinate reference constucts, then the default radius - taken as 6371229 metres. + By default *radius* is ``'earth'`` which means that if + and only if the radius can not found from the datums + of any coordinate reference constructs, then the + default radius taken as 6371229 metres. components: `bool`, optional If True then a dictionary of orthogonal weights components @@ -7799,17 +7803,17 @@ def bin( sample mean (Bessel's correction). radius: optional - Specify the radius used for calculating the areas of cells - defined in spherical polar coordinates. The radius is that - which would be returned by this call of the field - construct's `~cf.Field.radius` method: + Specify the radius used for calculating the areas of + cells defined in spherical polar coordinates. The + radius is that which would be returned by this call of + the field construct's `~cf.Field.radius` method: ``f.radius(radius)``. See the `cf.Field.radius` for details. - By default *radius* is ``'earth'`` which means that if and - only if the radius can not found from the datums of any - coordinate reference constucts, then the default radius - taken as 6371229 metres. + By default *radius* is ``'earth'`` which means that if + and only if the radius can not found from the datums + of any coordinate reference constructs, then the + default radius taken as 6371229 metres. great_circle: `bool`, optional If True then allow, if required, the derivation of i) area @@ -8941,17 +8945,17 @@ def collapse( .. versionadded:: 3.0.2 radius: optional - Specify the radius used for calculating the areas of cells - defined in spherical polar coordinates. The radius is that - which would be returned by this call of the field - construct's `~cf.Field.radius` method: + Specify the radius used for calculating the areas of + cells defined in spherical polar coordinates. The + radius is that which would be returned by this call of + the field construct's `~cf.Field.radius` method: ``f.radius(radius)``. See the `cf.Field.radius` for details. - By default *radius* is ``'earth'`` which means that if and - only if the radius can not found from the datums of any - coordinate reference constucts, then the default radius - taken as 6371229 metres. + By default *radius* is ``'earth'`` which means that if + and only if the radius can not found from the datums + of any coordinate reference constructs, then the + default radius taken as 6371229 metres. .. versionadded:: 3.0.2 @@ -9060,7 +9064,7 @@ def collapse( defined group size. Each subsequent group immediately follows the preceding one. By default each group contains the - consective run of elements whose + consecutive run of elements whose coordinate values lie within the group limits (see the *group_by* parameter). @@ -9083,7 +9087,7 @@ def collapse( spans the defined group size. Each subsequent group immediately follows the preceding one. By default each group - contains the consective run of elements + contains the consecutive run of elements whose coordinate values lie within the group limits (see the *group_by* parameter). @@ -9156,7 +9160,7 @@ def collapse( given grouped collapse. * The groups do not have to be in runs of - consective elements; they may be + consecutive elements; they may be scattered throughout the axis. * An element which corresponds to a @@ -9420,7 +9424,7 @@ def collapse( spans the defined group size. Each subsequent group immediately follows the preceding one. By default each group - contains the consective run of elements + contains the consecutive run of elements whose coordinate cells lie within the group limits (see the *group_by* parameter). @@ -9507,7 +9511,7 @@ def collapse( and spans the defined group size. Each subsequent group immediately follows the preceding one. By default each group - contains the consective run of elements + contains the consecutive run of elements whose coordinate cells lie within the group limits (see the *group_by* parameter). @@ -13077,16 +13081,16 @@ def match_by_construct(self, *identities, OR=False, **conditions): A construct identity is specified by a string (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); or a compiled regular expression - (e.g. ``re.compile('^atmosphere')``) that selects the - relevant constructs whose identities match via - `re.search`. + ``'ncvar%lat'``, etc.); or a compiled regular + expression (e.g. ``re.compile('^atmosphere')``) that + selects the relevant constructs whose identities match + via `re.search`. - Each construct has a number of identities, and is selected - if any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: + Each construct has a number of identities, and is + selected if any of them match any of those provided. A + construct's identities are those returned by its + `!identities` method. In the following example, the + construct ``x`` has six identities: >>> x.identities() ['time', @@ -13098,27 +13102,28 @@ def match_by_construct(self, *identities, OR=False, **conditions): A construct key may optionally have the ``'key%'`` prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. + ``'key%dimensioncoordinate2'`` are both acceptable + keys. Note that in the output of a `print` call or `!dump` method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. + identities, and so this description may always be used + as an *identity* argument. If a cell method construct identity is given (such as - ``'method:mean'``) then it will only be compared with the - most recently applied cell method operation. + ``'method:mean'``) then it will only be compared with + the most recently applied cell method operation. - Alternatively, one or more cell method constucts may be - identified in a single string with a CF-netCDF cell + Alternatively, one or more cell method constructs may + be identified in a single string with a CF-netCDF cell methods-like syntax for describing both the collapse dimensions, the collapse method, and any cell method - construct qualifiers. If N cell methods are described in - this way then they will collectively identify the N most - recently applied cell method operations. For example, - ``'T: maximum within years T: mean over years'`` will be - compared with the most two most recently applied cell - method operations. + construct qualifiers. If N cell methods are described + in this way then they will collectively identify the N + most recently applied cell method operations. For + example, ``'T: maximum within years T: mean over + years'`` will be compared with the most two most + recently applied cell method operations. *Parameter example:* ``'measure:area'`` @@ -13479,7 +13484,7 @@ def moving_window( By default *radius* is ``'earth'`` which means that if and only if the radius can not found from the datums - of any coordinate reference constucts, then the + of any coordinate reference constructs, then the default radius taken as 6371229 metres. great_circle: `bool`, optional diff --git a/cf/fieldlist.py b/cf/fieldlist.py index 633791e903..a5ff4d8eea 100644 --- a/cf/fieldlist.py +++ b/cf/fieldlist.py @@ -1,12 +1,7 @@ from . import mixin from . import ConstructList -from .functions import ( - _DEPRECATION_ERROR, - _DEPRECATION_ERROR_KWARGS, - _DEPRECATION_ERROR_METHOD, - _DEPRECATION_ERROR_DICT, -) +from .functions import _DEPRECATION_ERROR_METHOD class FieldList(mixin.FieldDomainList, ConstructList): @@ -18,7 +13,7 @@ class FieldList(mixin.FieldDomainList, ConstructList): indexing and methods like `!append`). These methods provide functionality similar to that of a :ref:`built-in list `. The main difference is that when a field - construct element needs to be assesed for equality its + construct element needs to be assessed for equality its `~cf.Field.equals` method is used, rather than the ``==`` operator. @@ -66,7 +61,7 @@ def select_by_naxes(self, *naxes): """Select field constructs by property. To find the inverse of the selection, use a list comprehension - with `~cf.Field.match_by_naxes` method of the constuct + with `~cf.Field.match_by_naxes` method of the construct elements. For example, to select all constructs which do *not* have 3-dimensional data: @@ -108,7 +103,7 @@ def select_by_units(self, *units, exact=True): """Select field constructs by units. To find the inverse of the selection, use a list comprehension - with `~cf.Field.match_by_units` method of the constuct + with `~cf.Field.match_by_units` method of the construct elements. For example, to select all constructs whose units are *not* ``'km'``: @@ -230,66 +225,6 @@ def select_field(self, *identities, default=ValueError()): return out[0] - # ---------------------------------------------------------------- - # Aliases - # ---------------------------------------------------------------- - def select(self, *identities, **kwargs): - """Alias of `cf.FieldList.select_by_identity`. - - To find the inverse of the selection, use a list comprehension - with the `~cf.Field.match_by_identity` method of the field - constucts. For example, to select all field constructs whose - identity is *not* ``'air_temperature'``: - - >>> gl = cf.FieldList(f for f in fl - ... if not f.match_by_identity('air_temperature')) - - .. seealso:: `select_by_identity`, `select_field` - - """ - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, - "select", - kwargs, - "Use methods 'select_by_units', 'select_by_construct', " - "'select_by_properties', 'select_by_naxes', 'select_by_rank' " - "instead.", - ) # pragma: no cover - - if identities and isinstance(identities[0], (list, tuple, set)): - _DEPRECATION_ERROR( - "Use of a {!r} for identities has been deprecated. Use the " - "* operator to unpack the arguments instead.".format( - identities[0].__class__.__name__ - ) - ) # pragma: no cover - - for i in identities: - if isinstance(i, dict): - _DEPRECATION_ERROR_DICT( - "Use methods 'select_by_units', 'select_by_construct', " - "'select_by_properties', 'select_by_naxes', " - "'select_by_rank' instead." - ) # pragma: no cover - - if isinstance(i, str) and ":" in i: - error = True - if "=" in i: - index0 = i.index("=") - index1 = i.index(":") - error = index0 > index1 - - if error: - _DEPRECATION_ERROR( - "The identity format {!r} has been deprecated at " - "version 3.0.0. Try {!r} instead.".format( - i, i.replace(":", "=", 1) - ) - ) # pragma: no cover - - return self.select_by_identity(*identities) - # ---------------------------------------------------------------- # Deprecated attributes and methods # ---------------------------------------------------------------- diff --git a/cf/mixin/fielddomain.py b/cf/mixin/fielddomain.py index 14926f7443..d096d4211f 100644 --- a/cf/mixin/fielddomain.py +++ b/cf/mixin/fielddomain.py @@ -66,7 +66,7 @@ def _construct( any number of constructs. identities: sequence - As for the *identities* parmaeter of the calling + As for the *identities* parameter of the calling method. {{key: `bool`, optional}} @@ -204,7 +204,7 @@ def _indices(self, mode, data_axes, auxiliary_mask, **kwargs): ) if axes in parsed: - # The axes are the same as an exisiting key + # The axes are the same as an existing key parsed[axes].append((axes, key, construct, value)) else: new_key = True @@ -1621,7 +1621,7 @@ def coordinate_reference_domain_axes(self, identity=None): :Returns: `set` - The identifiers of the domain axis constructs that san + The identifiers of the domain axis constructs that span the data of all coordinate and domain ancillary constructs used by the selected coordinate reference construct. @@ -1748,7 +1748,7 @@ def direction(self, identity, axes=None, **kwargs): domain construct when there is only one of them. axes: deprecated at version 3.0.0 - Use the *identity* parmeter instead. + Use the *identity* parameter instead. size: deprecated at version 3.0.0 @@ -2336,7 +2336,7 @@ def set_construct( autocyclic: `dict`, optional Additional parameters for optimizing the operation, - raelating to coordinate periodicity and cyclicity. See + relating to coordinate periodicity and cyclicity. See the code for details. .. versionadded:: 3.9.0 @@ -2446,7 +2446,7 @@ def set_coordinate_reference( The construct identifier to be used for the construct. If not set then a new, unique identifier is created automatically. If the identifier already - exisits then the exisiting construct will be replaced. + exists then the existing construct will be replaced. *Parameter example:* ``key='coordinatereference1'`` @@ -2463,7 +2463,7 @@ def set_coordinate_reference( :Returns: `str` - The construct identifier for the coordinate refernece + The construct identifier for the coordinate reference construct. """ diff --git a/cf/mixin/fielddomainlist.py b/cf/mixin/fielddomainlist.py index 3d07b5bba6..aa0db17605 100644 --- a/cf/mixin/fielddomainlist.py +++ b/cf/mixin/fielddomainlist.py @@ -8,7 +8,7 @@ def select_by_construct(self, *identities, OR=False, **conditions): """Select elements by their metadata constructs. To find the inverse of the selection, use a list comprehension - with the !match_by_construct` method of the constuct + with the !match_by_construct` method of the construct elements. For example, to select all constructs that do *not* have a "latitude" metadata construct: @@ -43,7 +43,7 @@ def select_by_construct(self, *identities, OR=False, **conditions): ``'method:mean'``) then it will only be compared with the most recently applied cell method operation. - Alternatively, one or more cell method constucts may + Alternatively, one or more cell method constructs may be identified in a single string with a CF-netCDF cell methods-like syntax for describing both the collapse dimensions, the collapse method, and any cell method @@ -170,7 +170,7 @@ def select_by_ncvar(self, *ncvars): """Select list elements by netCDF variable name. To find the inverse of the selection, use a list comprehension - with the `!match_by_ncvar` method of the constuct + with the `!match_by_ncvar` method of the construct elements. For example, to select all constructs which do *not* have a netCDF name of 'tas': @@ -238,7 +238,7 @@ def select_by_property(self, *mode, **properties): """Select list elements by property. To find the inverse of the selection, use a list comprehension - with the `!match_by_property` method of the constuct + with the `!match_by_property` method of the construct elements. For example, to select all constructs which do *not* have a long_name property of "Pressure": diff --git a/cf/read_write/um/umread.py b/cf/read_write/um/umread.py index c02d851d98..735c871ba2 100644 --- a/cf/read_write/um/umread.py +++ b/cf/read_write/um/umread.py @@ -558,7 +558,7 @@ def __init__( kwargs: *optional* Keyword arguments providing extra CF properties for each - return field constuct. + return field construct. """ self._bool = False diff --git a/cf/read_write/write.py b/cf/read_write/write.py index 2e1071c44b..d509fa48f6 100644 --- a/cf/read_write/write.py +++ b/cf/read_write/write.py @@ -532,8 +532,8 @@ def write( If False then create a "flat" netCDF file, i.e. one with only the root group, regardless of any group structure specified by the field constructs. By default any groups - defined by the netCDF interface of the field constucts and - its components will be created and populated. + defined by the netCDF interface of the field constructs + and its components will be created and populated. .. versionadded:: 3.6.0 diff --git a/docs/source/spelling_false_positives.txt b/docs/source/spelling_false_positives.txt index 2f94f0ecf6..9fc700b7bf 100644 --- a/docs/source/spelling_false_positives.txt +++ b/docs/source/spelling_false_positives.txt @@ -29,6 +29,7 @@ autodetected autoperiod aux AuxiliaryCoordinate +auxiliarycoordinate auxs basenames bilinear @@ -70,10 +71,8 @@ climatological Climatological Colour conda +config conformant -consective -constuct -constucts contigious contiguousness contruct @@ -362,6 +361,7 @@ TimeDuration timeseries timeSeries timeSeriesProfile +todict tolist trac trajectoryProfile From aa6ea3ca30214ec86a1b03caa5cd1430319a6a2a Mon Sep 17 00:00:00 2001 From: David Hassell Date: Mon, 26 Apr 2021 15:54:35 +0100 Subject: [PATCH 38/53] optimise 'cf.read(select=)' for PP/UM files --- cf/read_write/read.py | 58 ++++++++++++++------- cf/read_write/um/umread.py | 103 ++++++++++++++++++++++++++++--------- cf/test/test_pp.py | 4 ++ 3 files changed, 121 insertions(+), 44 deletions(-) diff --git a/cf/read_write/read.py b/cf/read_write/read.py index 3710d4cafc..1ca2836cbe 100644 --- a/cf/read_write/read.py +++ b/cf/read_write/read.py @@ -4,19 +4,25 @@ from glob import glob from os.path import isdir -from .netcdf import NetCDFRead -from .um import UMRead - from ..cfimplementation import implementation - from ..fieldlist import FieldList - from ..aggregate import aggregate as cf_aggregate - from ..decorators import _manage_log_level_via_verbosity - +from ..query import Query from ..functions import flat, _DEPRECATION_ERROR_FUNCTION_KWARGS +from .netcdf import NetCDFRead +from .um import UMRead + +# TODO - replace the try block with "from re import Pattern" when +# Python 3.6 is deprecated +try: + from re import Pattern +except ImportError: # pragma: no cover + python36 = True # pragma: no cover +else: + python36 = False + # -------------------------------------------------------------------- # Create an implementation container and initialize a read object for @@ -334,13 +340,12 @@ def read( ``f.match_by_identity(*select)`` is `True`. See `cf.Field.match_by_identity` for details. - This is equivalent to, but possibly faster than, not using - the *select* parameter but applying its value to the - returned field list with its - `cf.FieldList.select_by_identity` method. For example, - ``fl = cf.read(file, select='air_temperature')`` is - equivalent to - ``fl = cf.read(file).select_by_identity('air_temperature')``. + This is equivalent to, but faster than, not using the + *select* parameter but applying its value to the returned + field list with its `cf.FieldList.select_by_identity` + method. For example, ``fl = cf.read(file, + select='air_temperature')`` is equivalent to ``fl = + cf.read(file).select_by_identity('air_temperature')``. recursive: `bool`, optional If True then recursively read sub-directories of any @@ -558,8 +563,16 @@ def read( ) # pragma: no cover # Parse select - if isinstance(select, str): - select = (select,) + # TODO - delete the "if python36:" clause when Python 3.6 is + # deprecated + if python36: + if isinstance(select, (str, Query)) or hasattr( + select, "search" + ): # pragma: no cover + select = (select,) # pragma: no cover + else: + if isinstance(select, (str, Query, Pattern)): + select = (select,) if squeeze and unsqueeze: raise ValueError("squeeze and unsqueeze can not both be True") @@ -678,10 +691,11 @@ def read( chunk=chunk, mask=mask, warn_valid=warn_valid, + select=select, ) # -------------------------------------------------------- - # Select matching fields (not from UM files) + # Select matching fields (not from UM files, yet) # -------------------------------------------------------- if select and ftype != "UM": fields = fields.select_by_identity(*select) @@ -714,9 +728,8 @@ def read( n = len(field_list) # pragma: no cover logger.info( - "{0} input field{1} aggregated into {2} field{3}".format( - org_len, _plural(org_len), n, _plural(n) - ) + f"{org_len} input field{_plural(org_len)} aggregated into " + f"{n} field{ _plural(n)}" ) # pragma: no cover # ---------------------------------------------------------------- @@ -794,6 +807,7 @@ def _read_a_file( chunk=True, mask=True, warn_valid=False, + select=None, ): """Read the contents of a single file into a field list. @@ -846,6 +860,9 @@ def _read_a_file( that is set (up to a maximum of ``3``/``'DETAIL'``) for increasing verbosity, the more description that is printed. + select: optional + For `read. Ignored for a netCDF file. + :Returns: `FieldList` @@ -950,6 +967,7 @@ def _read_a_file( word_size=word_size, endian=endian, chunk=chunk, + select=select, ) # , mask=mask, warn_valid=warn_valid) # PP fields are aggregated intrafile prior to interfile diff --git a/cf/read_write/um/umread.py b/cf/read_write/um/umread.py index 735c871ba2..1812339795 100644 --- a/cf/read_write/um/umread.py +++ b/cf/read_write/um/umread.py @@ -27,7 +27,9 @@ from netCDF4 import date2num as netCDF4_date2num import cftime + import cfdm +from cfdm import Constructs from ... import __version__, __Conventions__ from ...decorators import ( @@ -507,7 +509,8 @@ def __init__( height_at_top_of_model, verbose=None, implementation=None, - **kwargs + select=None, + **kwargs, ): """**Initialization** @@ -672,12 +675,67 @@ def __init__( LBCODE = int_hdr[lbcode] LBPROC = int_hdr[lbproc] LBVC = int_hdr[lbvc] + stash = int_hdr[lbuser4] LBUSER5 = int_hdr[lbuser5] + submodel = int_hdr[lbuser7] BPLAT = real_hdr[bplat] BPLON = real_hdr[bplon] BDX = real_hdr[bdx] BDY = real_hdr[bdy] + if stash: + section, item = divmod(stash, 1000) + um_stash_source = "m%02ds%02di%03d" % (submodel, section, item) + else: + um_stash_source = None + + header_um_version, source = divmod(int_hdr[lbsrce], 10000) + + if header_um_version > 0 and int(um_version) == um_version: + model_um_version = header_um_version + self.um_version = header_um_version + else: + model_um_version = None + self.um_version = um_version + + # Set source + source = _lbsrce_model_codes.setdefault(source, None) + if source is not None and model_um_version is not None: + source += f" vn{model_um_version}" + + # Only process the requested fields + ok = True + if select: + values1 = ( + f"stash_code={stash}", + f"lbproc={LBPROC}", + f"lbtim={LBTIM}", + f"runid={self.decode_lbexp()}", + f"submodel={submodel}", + ) + if um_stash_source is not None: + values1 += (f"um_stash_source={um_stash_source}",) + if source: + values1 += (f"source={source}",) + + ok = False + for value0 in select: + for value1 in values1: + ok = Constructs._matching_values( + value0, None, value1, basic=True + ) + if ok: + break + + if ok: + break + + if not ok: + # This PP/UM ield does not match the requested selection + self.field = (None,) + return + + # Still here? self.lbnpt = LBNPT self.lbrow = LBROW self.lbtim = LBTIM @@ -705,19 +763,6 @@ def __init__( self.calendar = calendar self.reference_time_Units() - header_um_version, source = divmod(int_hdr[lbsrce], 10000) - - if header_um_version > 0 and int(um_version) == um_version: - model_um_version = header_um_version - self.um_version = header_um_version - else: - model_um_version = None - self.um_version = um_version - - # Set source - source = _lbsrce_model_codes.setdefault(source, None) - if source is not None and model_um_version is not None: - source += " vn{0}".format(model_um_version) if source: cf_properties["source"] = source @@ -764,8 +809,7 @@ def __init__( # Set a identifying name based on the submodel and STASHcode # (or field code). - stash = int_hdr[lbuser4] - submodel = int_hdr[lbuser7] + # stash = int_hdr[lbuser4]# self.stash = stash # The STASH code has been set in the PP header, so try to find @@ -821,21 +865,17 @@ def __init__( break - if stash: - section, item = divmod(stash, 1000) - um_stash_source = "m%02ds%02di%03d" % (submodel, section, item) + if um_stash_source is not None: cf_properties["um_stash_source"] = um_stash_source - identity = "UM_{0}_vn{1}".format(um_stash_source, self.um_version) + identity = f"UM_{um_stash_source}_vn{self.um_version}" else: - identity = "UM_{0}_fc{1}_vn{2}".format( - submodel, int_hdr[lbfc], self.um_version - ) + identity = f"UM_{submodel}_fc{int_hdr[lbfc]}_vn{self.um_version}" if um_Units is None: self.um_Units = _Units[None] if um_condition: - identity += "_{0}".format(um_condition) + identity += f"_{um_condition}" if long_name is None: cf_properties["long_name"] = identity @@ -3282,6 +3322,7 @@ def read( fmt=None, chunk=True, verbose=None, + select=None, ): """Read fields from a PP file or UM fields file. @@ -3331,6 +3372,19 @@ def read( set_standard_name: `bool`, optional + select: (sequence of) `str` or `Query` or `re.Pattern`, optional + Only return field constructs whose identities match the + given values(s), i.e. those fields ``f`` for which + ``f.match_by_identity(*select)`` is `True`. See + `cf.Field.match_by_identity` for details. + + This is equivalent to, but faster than, not using the + *select* parameter but applying its value to the returned + field list with its `cf.FieldList.select_by_identity` + method. For example, ``fl = cf.read(file, + select='stash_code=3236')`` is equivalent to ``fl = + cf.read(file).select_by_identity('stash_code=3236')``. + :Returns: `list` @@ -3375,6 +3429,7 @@ def read( height_at_top_of_model=height_at_top_of_model, verbose=verbose, implementation=self.implementation, + select=select, ) for var in f.vars ] diff --git a/cf/test/test_pp.py b/cf/test/test_pp.py index ba8da09918..92d6c3311d 100644 --- a/cf/test/test_pp.py +++ b/cf/test/test_pp.py @@ -69,6 +69,10 @@ def test_stash2standard_name(self): e = cf.stash2standard_name() self.assertNotEqual(d, e) + def test_PP_select(self): + f = cf.read(self.ppfile, select="lbproc=0") + self.assertEqual(len(f), 1) + def test_PP_WGDOS_UNPACKING(self): f = cf.read(self.ppfile)[0] From d0b10337868fa743d11e536057cd9d5911799053 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Wed, 28 Apr 2021 18:39:42 +0100 Subject: [PATCH 39/53] devs --- cf/field.py | 252 ---------------------------------------- cf/mixin/fielddomain.py | 251 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 251 insertions(+), 252 deletions(-) diff --git a/cf/field.py b/cf/field.py index d8d516865d..8cb9d3cb26 100644 --- a/cf/field.py +++ b/cf/field.py @@ -2397,74 +2397,6 @@ def _binary_operation(self, other, method): # ------------------------------------------------------------ return field0 - def _conform_coordinate_references(self, key, coordref=None): - """Where possible replace the content of coordiante reference - construct coordinates with coordinate construct keys. - - .. versionadded:: 3.0.0 - - :Parameters: - - key: `str` - Coordinate construct key. - - coordref: `CoordianteReference`, optional - - .. versionadded:: 3.6.0 - - :Returns: - - `None` - - **Examples:** - - >>> f._conform_coordinate_references('auxiliarycoordinate1') - >>> f._conform_coordinate_references('auxiliarycoordinate1', - ... coordref=cr) - - """ - identity = self.constructs[key].identity(strict=True) - - if coordref is None: - refs = self.coordinate_references(todict=True).values() - else: - refs = [coordref] - - for ref in refs: - if identity in ref.coordinates(): - ref.del_coordinate(identity, None) - ref.set_coordinate(key) - - def _coordinate_reference_axes(self, key): - """Returns the field's set of coordinate reference axes for a - key. - - :Parameters: - - key: `str` - Coordinate reference construct key. - - :Returns: - - `set` - - **Examples:** - - >>> f._coordinate_reference_axes('coordinatereference0') - - """ - ref = self.constructs[key] - - axes = [] - - for c_key in ref.coordinates(): - axes.extend(self.get_data_axes(c_key)) - - for da_key in ref.coordinate_conversion.domain_ancillaries().values(): - axes.extend(self.get_data_axes(da_key)) - - return set(axes) - def _conform_cell_methods(self): """Changes the axes of the field's cell methods so they conform. @@ -2488,190 +2420,6 @@ def _conform_cell_methods(self): cm.change_axes(axis_map, inplace=True) - @_manage_log_level_via_verbosity - def _equivalent_coordinate_references( - self, - field1, - key0, - key1, - atol=None, - rtol=None, - s=None, - t=None, - verbose=None, - axis_map=None, - ): - """True if coordinate reference constructs are equivalent. - - Two real numbers ``x`` and ``y`` are considered equal if - ``|x-y|<=atol+rtol|y|``, where ``atol`` (the tolerance on absolute - differences) and ``rtol`` (the tolerance on relative differences) - are positive, typically very small numbers. See the *atol* and - *rtol* parameters. - - :Parameters: - - ref0: `CoordinateReference` - - ref1: `CoordinateReference` - - field1: `Field` - The field which contains *ref1*. - - :Returns: - - `bool` - - """ - ref0 = self.coordinate_references(todict=True)[key0] - ref1 = field1.coordinate_references(todict=True)[key1] - - if not ref0.equivalent(ref1, rtol=rtol, atol=atol, verbose=verbose): - logger.info( - f"{self.__class__.__name__}: Non-equivalent coordinate " - f"references ({ref0!r}, {ref1!r})" - ) # pragma: no cover - return False - - # Compare the domain ancillaries - # TODO consider case of None key ? - for ( - term, - identifier0, - ) in ref0.coordinate_conversion.domain_ancillaries().items(): - if identifier0 is None: - continue - - identifier1 = ref1.coordinate_conversion.domain_ancillaries()[term] - - # key0 = domain_ancillaries.filter_by_key(identifier0).key() - # key1 = field1_domain_ancillaries.filter_by_key(identifier1).key() - - if not self._equivalent_construct_data( - field1, - key0=identifier0, # key0, - key1=identifier1, # key1, - rtol=rtol, - atol=atol, - s=s, - t=t, - verbose=verbose, - axis_map=axis_map, - ): - # add traceback TODO - return False - - return True - - def _set_construct_parse_axes(self, item, axes=None, allow_scalar=True): - """Parse axes for the set_construct method. - - :Parameters: - - item: metadata construct - - axes: (sequence of) `str or `int`, optional - - allow_scalar: `bool`, optional - - :Returns: - - `list` - - """ - data = item.get_data(None, _fill_value=False) - - if axes is None: - # -------------------------------------------------------- - # The axes have not been set => infer the axes. - # -------------------------------------------------------- - if data is not None: - shape = item.shape - if allow_scalar and shape == (): - axes = [] - else: - if not allow_scalar and not shape: - shape = (1,) - - if not shape or len(shape) != len(set(shape)): - raise ValueError( - f"Can't insert {item!r}: Ambiguous shape: " - f"{shape}. Consider setting the 'axes' parameter." - ) - - domain_axes = self.domain_axes(todict=True) - axes = [] - axes_sizes = [ - domain_axis.get_size(None) - for domain_axis in domain_axes.values() - ] - for n in shape: - if not axes_sizes.count(n): - raise ValueError( - f"Can't insert {item!r}: There is no " - f"domain axis construct with size {n}." - ) - - if axes_sizes.count(n) == 1: - da_key = self.domain_axis( - filter_by_size=(n,), key=True - ) - axes.append(da_key) - else: - raise ValueError( - f"Can't insert {item!r}: Ambiguous shape: " - "f{shape}. Consider setting the 'axes' " - "parameter." - ) - else: - # -------------------------------------------------------- - # Axes have been provided - # -------------------------------------------------------- - if isinstance(axes, (str, int)): - axes = (axes,) - - if axes and data is not None: - ndim = item.ndim - if not ndim and not allow_scalar: - ndim = 1 - - if isinstance(axes, (str, int)): - axes = (axes,) - - if len(axes) != ndim or len(set(axes)) != ndim: - raise ValueError( - f"Can't insert {item!r}: Incorrect number of given " - f"axes (got {len(set(axes))}, expected {ndim})" - ) - - domain_axes = self.domain_axes(todict=True) - axes2 = [] - for axis, size in zip(axes, item.data.shape): - dakey = self.domain_axis( - axis, - key=True, - default=ValueError(f"Unknown axis: {axis!r}"), - ) - - axis_size = domain_axes[dakey].get_size(None) - if size != axis_size: - raise ValueError( - f"Can't insert {item!r}: Mismatched axis size " - f"({size} != {axis_size})" - ) - - axes2.append(dakey) - - axes = axes2 - - if ndim != len(set(axes)): - raise ValueError( - f"Can't insert {item!r}: Mismatched number of axes " - f"({len(set(axes))} != {ndim})" - ) - - return axes - def _conform_for_assignment(self, other, check_coordinates=False): """Conform *other* so that it is ready for metadata-unaware assignment broadcasting across *self*. diff --git a/cf/mixin/fielddomain.py b/cf/mixin/fielddomain.py index d096d4211f..aba2304ea2 100644 --- a/cf/mixin/fielddomain.py +++ b/cf/mixin/fielddomain.py @@ -40,6 +40,73 @@ class FieldDomain: """ + def _coordinate_reference_axes(self, key): + """Returns the set of coordinate reference axes for a key. + + :Parameters: + + key: `str` + Coordinate reference construct key. + + :Returns: + + `set` + + **Examples:** + + >>> f._coordinate_reference_axes('coordinatereference0') + + """ + ref = self.constructs[key] + + axes = [] + + for c_key in ref.coordinates(): + axes.extend(self.get_data_axes(c_key)) + + for da_key in ref.coordinate_conversion.domain_ancillaries().values(): + axes.extend(self.get_data_axes(da_key)) + + return set(axes) + + def _conform_coordinate_references(self, key, coordref=None): + """Where possible replace the content of coordiante reference + construct coordinates with coordinate construct keys. + + .. versionadded:: 3.0.0 + + :Parameters: + + key: `str` + Coordinate construct key. + + coordref: `CoordinateReference`, optional + + .. versionadded:: 3.6.0 + + :Returns: + + `None` + + **Examples:** + + >>> f._conform_coordinate_references('auxiliarycoordinate1') + >>> f._conform_coordinate_references('auxiliarycoordinate1', + ... coordref=cr) + + """ + identity = self.constructs[key].identity(strict=True) + + if coordref is None: + refs = self.coordinate_references(todict=True).values() + else: + refs = (coordref,) + + for ref in refs: + if identity in ref.coordinates(): + ref.del_coordinate(identity, None) + ref.set_coordinate(key) + def _construct( self, _method, @@ -115,6 +182,81 @@ def _construct( "constructs", ) + @_manage_log_level_via_verbosity + def _equivalent_coordinate_references( + self, + field1, + key0, + key1, + atol=None, + rtol=None, + s=None, + t=None, + verbose=None, + axis_map=None, + ): + """True if coordinate reference constructs are equivalent. + + Two real numbers ``x`` and ``y`` are considered equal if + ``|x-y|<=atol+rtol|y|``, where ``atol`` (the tolerance on absolute + differences) and ``rtol`` (the tolerance on relative differences) + are positive, typically very small numbers. See the *atol* and + *rtol* parameters. + + :Parameters: + + ref0: `CoordinateReference` + + ref1: `CoordinateReference` + + field1: `Field` + The field which contains *ref1*. + + :Returns: + + `bool` + + """ + ref0 = self.coordinate_references(todict=True)[key0] + ref1 = field1.coordinate_references(todict=True)[key1] + + if not ref0.equivalent(ref1, rtol=rtol, atol=atol, verbose=verbose): + logger.info( + f"{self.__class__.__name__}: Non-equivalent coordinate " + f"references ({ref0!r}, {ref1!r})" + ) # pragma: no cover + return False + + # Compare the domain ancillaries + # TODO consider case of None key ? + for ( + term, + identifier0, + ) in ref0.coordinate_conversion.domain_ancillaries().items(): + if identifier0 is None: + continue + + identifier1 = ref1.coordinate_conversion.domain_ancillaries()[term] + + # key0 = domain_ancillaries.filter_by_key(identifier0).key() + # key1 = field1_domain_ancillaries.filter_by_key(identifier1).key() + + if not self._equivalent_construct_data( + field1, + key0=identifier0, # key0, + key1=identifier1, # key1, + rtol=rtol, + atol=atol, + s=s, + t=t, + verbose=verbose, + axis_map=axis_map, + ): + # add traceback TODO + return False + + return True + def _indices(self, mode, data_axes, auxiliary_mask, **kwargs): """Create indices that define a subspace of the field or domain construct. @@ -681,6 +823,115 @@ def _roll_constructs(self, axis, shift): return shift + def _set_construct_parse_axes(self, item, axes=None, allow_scalar=True): + """Parse axes for the set_construct method. + + :Parameters: + + item: metadata construct + + axes: (sequence of) `str or `int`, optional + + allow_scalar: `bool`, optional + + :Returns: + + `list` + + """ + data = item.get_data(None, _fill_value=False) + + if axes is None: + # -------------------------------------------------------- + # The axes have not been set => infer the axes. + # -------------------------------------------------------- + if data is not None: + shape = item.shape + if allow_scalar and shape == (): + axes = [] + else: + if not allow_scalar and not shape: + shape = (1,) + + if not shape or len(shape) != len(set(shape)): + raise ValueError( + f"Can't insert {item!r}: Ambiguous shape: " + f"{shape}. Consider setting the 'axes' parameter." + ) + + domain_axes = self.domain_axes(todict=True) + axes = [] + axes_sizes = [ + domain_axis.get_size(None) + for domain_axis in domain_axes.values() + ] + + for n in shape: + if not axes_sizes.count(n): + raise ValueError( + f"Can't insert {item!r}: There is no " + f"domain axis construct with size {n}." + ) + + if axes_sizes.count(n) != 1: + raise ValueError( + f"Can't insert {item!r}: Ambiguous shape: " + "f{shape}. Consider setting the 'axes' " + "parameter." + ) + + da_key = self.domain_axis( + filter_by_size=(n,), key=True + ) + axes.append(da_key) + else: + # -------------------------------------------------------- + # Axes have been provided + # -------------------------------------------------------- + if isinstance(axes, (str, int)): + axes = (axes,) + + if axes and data is not None: + ndim = item.ndim + if not ndim and not allow_scalar: + ndim = 1 + + if isinstance(axes, (str, int)): + axes = (axes,) + + if len(axes) != ndim or len(set(axes)) != ndim: + raise ValueError( + f"Can't insert {item!r}: Incorrect number of given " + f"axes (got {len(set(axes))}, expected {ndim})" + ) + + axes2 = [] + for axis, size in zip(axes, item.data.shape): + da_key, domain_axis = self.domain_axis( + axis, + item=True, + default=ValueError(f"Unknown axis: {axis!r}"), + ) + + axis_size = domain_axis.get_size(None) + if size != axis_size: + raise ValueError( + f"Can't insert {item!r}: Mismatched axis size " + f"({size} != {axis_size})" + ) + + axes2.append(da_key) + + axes = axes2 + + if ndim != len(set(axes)): + raise ValueError( + f"Can't insert {item!r}: Mismatched number of axes " + f"({len(set(axes))} != {ndim})" + ) + + return axes + # ---------------------------------------------------------------- # Methods # ---------------------------------------------------------------- From c11bce0395e820595ca28de23e258b495407a667 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Fri, 30 Apr 2021 09:43:48 +0100 Subject: [PATCH 40/53] fix has_units behaviour --- cf/data/data.py | 171 +++++++++++++++++++++++++++++++++++++++++-- cf/test/test_Data.py | 61 ++++++++++++++- 2 files changed, 224 insertions(+), 8 deletions(-) diff --git a/cf/data/data.py b/cf/data/data.py index ecf06d1517..e5ca83eedb 100644 --- a/cf/data/data.py +++ b/cf/data/data.py @@ -6143,7 +6143,7 @@ def _collapse( inplace=False, i=False, _preserve_partitions=False, - **kwargs + **kwargs, ): """Collapse the data. @@ -6468,7 +6468,7 @@ def _collapse( mtol, _preserve_partitions=_preserve_partitions, _parallelise_collapse_subspace=_parallelise_collapse_sub, - **kwargs + **kwargs, ) partition.close(keep_in_memory=keep_in_memory) @@ -6541,7 +6541,7 @@ def _collapse_subspace( weights=None, _preserve_partitions=False, _parallelise_collapse_subspace=True, - **kwargs + **kwargs, ): """Collapse a subspace of a data array. @@ -9515,7 +9515,7 @@ def set_calendar(self, calendar): def set_units(self, value): """Set the units. - .. seealso:: `del_units`, `get_units` + .. seealso:: `del_units`, `get_units`, `has_units` :Parameters: @@ -11573,6 +11573,67 @@ def halo( return d + def has_calendar(self): + """Whether a calendar has been set. + + .. seealso:: `del_calendar`, `get_calendar`, `set_calendar`, + `has_units` + + :Returns: + + `bool` + True if the calendar has been set, otherwise False. + + **Examples:** + + >>> d.set_calendar('360_day') + >>> d.has_calendar() + True + >>> d.get_calendar() + '360_day' + >>> d.del_calendar() + >>> d.has_calendar() + False + >>> d.get_calendar() + ValueError: Can't get non-existent calendar + >>> print(d.get_calendar(None)) + None + >>> print(d.del_calendar(None)) + None + + """ + return hasattr(self.Units, "calendar") + + def has_units(self): + """Whether units have been set. + + .. seealso:: `del_units`, `get_units`, `set_units`, `has_calendar` + + :Returns: + + `bool` + True if units have been set, otherwise False. + + **Examples:** + + >>> d.set_units('metres') + >>> d.has_units() + True + >>> d.get_units() + 'metres' + >>> d.del_units() + >>> d.has_units() + False + >>> d.get_units() + ValueError: Can't get non-existent units + >>> print(d.get_units(None)) + None + >>> print(d.del_units(None)) + None + + """ + return hasattr(self.Units, "units") + @_inplace_enabled(default=False) def filled(self, fill_value=None, inplace=False): """Replace masked elements with the fill value. @@ -12409,6 +12470,106 @@ def mask_invalid(self, inplace=False, i=False): return d + def del_calendar(self, default=ValueError()): + """Delete the calendar. + + .. seealso:: `get_calendar`, `has_calendar`, `set_calendar`, + `del_units` + + :Parameters: + + default: optional + Return the value of the *default* parameter if the + calendar has not been set. + + {{default Exception}} + + :Returns: + + `str` + The value of the deleted calendar. + + **Examples:** + + >>> d.set_calendar('360_day') + >>> d.has_calendar() + True + >>> d.get_calendar() + '360_day' + >>> d.del_calendar() + >>> d.has_calendar() + False + >>> d.get_calendar() + ValueError: Can't get non-existent calendar + >>> print(d.get_calendar(None)) + None + >>> print(d.del_calendar(None)) + None + + """ + calendar = getattr(self.Units, "calendar", None) + + if calendar is not None: + self.override_calendar(None, inplace=True) + return calendar + + raise self._default( + default, f"{self.__class__.__name__} has no 'calendar' component" + ) + + def del_units(self, default=ValueError()): + """Delete the units. + + .. seealso:: `get_units`, `has_units`, `set_units`, `del_calendar` + + :Parameters: + + default: optional + Return the value of the *default* parameter if the units + has not been set. + + {{default Exception}} + + :Returns: + + `str` + The value of the deleted units. + + **Examples:** + + >>> d.set_units('metres') + >>> d.has_units() + True + >>> d.get_units() + 'metres' + >>> d.del_units() + >>> d.has_units() + False + >>> d.get_units() + ValueError: Can't get non-existent units + >>> print(d.get_units(None)) + None + >>> print(d.del_units(None)) + None + + """ + out = self.Units + + units = getattr(out, "units", None) + calendar = getattr(out, "calendar", None) + + if calendar is not None: + self.Units = Units(None, calendar) + else: + del self.Units + + if units is not None: + return units + + return self._default( + default, f"{self.__class__.__name__} has no 'units' component" + ) + @classmethod def masked_all(cls, shape, dtype=None, units=None, chunk=True): """Return a new data array of given shape and type with all @@ -14402,7 +14563,7 @@ def func( inplace=False, preserve_invalid=False, i=False, - **kwargs + **kwargs, ): """Apply an element-wise array operation to the data array. diff --git a/cf/test/test_Data.py b/cf/test/test_Data.py index afa4876b5e..473aef02cb 100644 --- a/cf/test/test_Data.py +++ b/cf/test/test_Data.py @@ -91,7 +91,7 @@ class DataTest(unittest.TestCase): mones = mones test_only = [] - test_only = ["NOTHING!!!!!"] + # test_only = ["NOTHING!!!!!"] # test_only = [ # 'test_Data_percentile', # 'test_Data_trigonometric_hyperbolic' @@ -3244,8 +3244,63 @@ def test_Data_filled(self): d = cf.Data(["a", "b", "c"], mask=[1, 0, 0]) self.assertTrue((d.filled().array == ["", "b", "c"]).all()) - -# --- End: class + def test_Data_del_units(self): + d = cf.Data(1) + with self.assertRaises(ValueError): + d.del_units() + + d = cf.Data(1, "") + self.assertEqual(d.del_units(), "") + d = cf.Data(1, "m") + self.assertEqual(d.del_units(), "m") + + d = cf.Data(1, "days since 2000-1-1") + self.assertTrue(d.del_units(), "days since 2000-1-1") + + d = cf.Data(1, "days since 2000-1-1", calendar="noleap") + with self.assertRaises(ValueError): + d.del_units() + + def test_Data_del_calendar(self): + d = cf.Data(1) + with self.assertRaises(ValueError): + d.del_calendar() + + d = cf.Data(1, "") + with self.assertRaises(ValueError): + d.del_calendar() + + d = cf.Data(1, "m") + with self.assertRaises(ValueError): + d.del_calendar() + + d = cf.Data(1, "days since 2000-1-1") + with self.assertRaises(ValueError): + d.del_calendar() + + d = cf.Data(1, "days since 2000-1-1", calendar="noleap") + self.assertTrue(d.del_calendar(), "noleap") + + def test_Data_has_units(self): + d = cf.Data(1) + self.assertFalse(d.has_units()) + d = cf.Data(1, "") + self.assertTrue(d.has_units()) + d = cf.Data(1, "m") + self.assertTrue(d.has_units()) + + def test_Data_has_calendar(self): + d = cf.Data(1) + self.assertFalse(d.has_calendar()) + d = cf.Data(1, "") + self.assertFalse(d.has_calendar()) + d = cf.Data(1, "m") + self.assertFalse(d.has_calendar()) + + d = cf.Data(1, "days since 2000-1-1") + self.assertFalse(d.has_calendar()) + d = cf.Data(1, "days since 2000-1-1", calendar="noleap") + self.assertTrue(d.has_calendar()) if __name__ == "__main__": From d36aacb07cf10e3a79472aee21eae278988c2e72 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Fri, 7 May 2021 13:09:22 +0100 Subject: [PATCH 41/53] docs and API consistency --- cf/field.py | 526 ++++++++-------------------------------- cf/mixin/fielddomain.py | 2 +- 2 files changed, 108 insertions(+), 420 deletions(-) diff --git a/cf/field.py b/cf/field.py index 8cb9d3cb26..b18768f4b0 100644 --- a/cf/field.py +++ b/cf/field.py @@ -5890,31 +5890,19 @@ def close(self): for construct in self.constructs.filter_by_data(todict=True).values(): construct.close() - def iscyclic(self, identity, **kwargs): - """Returns True if the given axis is cyclic. + def iscyclic(self, *identity, **filter_kwargs): + """Returns True if the specified axis is cyclic. .. versionadded:: 1.0 - .. seealso:: `axis`, `cyclic`, `period` + .. seealso:: `axis`, `cyclic`, `period`, `domain_axis` :Parameters: - identity: - Select the domain axis construct by one of: - - * An identity or key of a 1-d coordinate construct that - whose data spans the domain axis construct. - - * A domain axis construct identity or key. - - * The position of the domain axis construct in the field - construct's data. - - The *identity* parameter selects the domain axis as - returned by this call of the field construct's - `domain_axis` method: ``f.domain_axis(identity)``. - - kwargs: deprecated at version 3.0.0 + identity, filter_kwargs: optional + Select the unique domain axis construct returned by + ``f.domain_axis(*identity, **filter_kwargs)``. See + `domain_axis` for details. :Returns: @@ -5936,16 +5924,11 @@ def iscyclic(self, identity, **kwargs): >>> x = f.iscyclic(2) """ - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, "iscyclic", kwargs - ) # pragma: no cover - - axis = self.domain_axis(identity, key=True, default=None) + axis = self.domain_axis( + *identity, key=True, default=None, **filter_kwargs + ) if axis is None: - raise ValueError( - "Can't identify unique axis from identity " f"{identity!r}" - ) + raise ValueError("Can't identify unique domain axis") return axis in self.cyclic() @@ -6050,30 +6033,21 @@ def concatenate(cls, fields, axis=0, _preserve=True): return out def cyclic( - self, identity=None, iscyclic=True, period=None, config={}, **kwargs + self, *identity, iscyclic=True, period=None, config={}, **filter_kwargs ): """Set the cyclicity of an axis. .. versionadded:: 1.0 - .. seealso:: `autocyclic`, `domain_axis`, `iscyclic`, `period` + .. seealso:: `autocyclic`, `domain_axis`, `iscyclic`, + `period`, `domain_axis` :Parameters: - identity: - Select the domain axis construct by one of: - - * An identity or key of a 1-d coordinate construct that - whose data spans the domain axis construct. - - * A domain axis construct identity or key. - - * The position of the domain axis construct in the field - construct's data. - - The *identity* parameter selects the domain axis as - returned by this call of the field construct's - `domain_axis` method: ``f.domain_axis(identity)``. + identity, filter_kwargs: optional + Select the unique domain axis construct returned by + ``f.domain_axis(*identity, **filter_kwargs)``. See + `domain_axis` for details. iscyclic: `bool`, optional If False then the axis is set to be non-cyclic. By @@ -6081,25 +6055,30 @@ def cyclic( period: optional The period for a dimension coordinate construct which - spans the selected axis. May be any numeric scalar object - that can be converted to a `Data` object (which includes - numpy array and `Data` objects). The absolute value of - *period* is used. If *period* has units then they must be - compatible with those of the dimension coordinates, - otherwise it is assumed to have the same units as the - dimension coordinates. + spans the selected axis. May be any numeric scalar + object that can be converted to a `Data` object (which + includes numpy array and `Data` objects). The absolute + value of *period* is used. If *period* has units then + they must be compatible with those of the dimension + coordinates, otherwise it is assumed to have the same + units as the dimension coordinates. - axes: deprecated at version 3.0.0 - Use the *identity* parameter instead. + config: `dict` + Additional parameters for optimizing the + operation. See the code for details. - kwargs: deprecated at version 3.0.0 + .. versionadded:: 3.9.0 + + axes: deprecated at version 3.0.0 + Use the *identity* and **filter_kwargs* parameters + instead. :Returns: `set` - The construct keys of the domain axes which were cyclic - prior to the new setting, or the current cyclic domain - axes if no axis was specified. + The construct keys of the domain axes which were + cyclic prior to the new setting, or the current cyclic + domain axes if no axis was specified. **Examples:** @@ -6115,23 +6094,18 @@ def cyclic( set() """ - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, "cyclic", kwargs - ) # pragma: no cover - if not iscyclic and config.get("no-op"): return self._cyclic.copy() old = None cyclic = self._cyclic - if identity is None: + if not identity and not filter_kwargs: return cyclic.copy() axis = config.get("axis") if axis is None: - axis = self.domain_axis(identity, key=True) + axis = self.domain_axis(*identity, key=True, **filter_kwargs) data = self.get_data(None, _fill_value=False) if data is not None: @@ -7960,7 +7934,7 @@ def bin( return out - def has_construct(self, identity=None): + def has_construct(self, *identity, **filter_kwargs): """Whether a metadata construct exists. .. versionadded:: 3.4.0 @@ -7970,56 +7944,10 @@ def has_construct(self, identity=None): :Parameters: - identity: optional - Select the construct. Must be - - * The identity or key of a metadata construct. - - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); a `Query` object - (e.g. ``cf.eq('longitude')``); or a compiled regular - expression (e.g. ``re.compile('^atmosphere')``) that - selects the relevant constructs whose identities match via - `re.search`. - - A construct has a number of identities, and is selected if - any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: - - >>> x.identities() - ['time', - 'long_name=Time', - 'foo=bar', - 'standard_name=time', - 'ncvar%t', - 'T'] - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. - - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. - - *Parameter example:* - ``identity='T' - - *Parameter example:* - ``identity='measure:area'`` - - *Parameter example:* - ``identity='cell_area'`` - - *Parameter example:* - ``identity='long_name=Cell Area'`` - - *Parameter example:* - ``identity='cellmeasure1'`` + identity, filter_kwargs: optional + Select the unique construct returned by + ``f.construct(*identity, **filter_kwargs)``. See + `construct` for details. :Returns: @@ -8046,7 +7974,10 @@ def has_construct(self, identity=None): False """ - return self.construct(identity, default=None) is not None + return ( + self.construct(*identity, default=None, **filter_kwargs) + is not None + ) def histogram(self, digitized): """Return a multi-dimensional histogram of the data. @@ -12819,71 +12750,14 @@ def match_by_construct(self, *identities, OR=False, **conditions): .. seealso:: `match`, `match_by_property`, `match_by_rank`, `match_by_identity`, `match_by_ncvar`, - `match_by_units` + `match_by_units`, `construct` :Parameters: identities: optional - Identify the metadata constructs that have any of the - given identities or construct keys. - - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); or a compiled regular - expression (e.g. ``re.compile('^atmosphere')``) that - selects the relevant constructs whose identities match - via `re.search`. - - Each construct has a number of identities, and is - selected if any of them match any of those provided. A - construct's identities are those returned by its - `!identities` method. In the following example, the - construct ``x`` has six identities: - - >>> x.identities() - ['time', - 'long_name=Time', - 'foo=bar', - 'standard_name=time', - 'ncvar%t', - 'T'] - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable - keys. - - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used - as an *identity* argument. - - If a cell method construct identity is given (such as - ``'method:mean'``) then it will only be compared with - the most recently applied cell method operation. - - Alternatively, one or more cell method constructs may - be identified in a single string with a CF-netCDF cell - methods-like syntax for describing both the collapse - dimensions, the collapse method, and any cell method - construct qualifiers. If N cell methods are described - in this way then they will collectively identify the N - most recently applied cell method operations. For - example, ``'T: maximum within years T: mean over - years'`` will be compared with the most two most - recently applied cell method operations. - - *Parameter example:* - ``'measure:area'`` - - *Parameter example:* - ``'latitude'`` - - *Parameter example:* - ``'long_name=Longitude'`` - - *Parameter example:* - ``'domainancillary2', 'ncvar%areacello'`` + Select the unique construct returned by + ``f.construct(*identities)``. See `construct` for + details. conditions: optional Identify the metadata constructs that have any of the @@ -13805,7 +13679,9 @@ def convolution_filter( return f - def convert(self, identity, full_domain=True, cellsize=False): + def convert( + self, *identity, full_domain=True, cellsize=False, **filter_kwargs + ): """Convert a metadata construct into a new field construct. The new field construct has the properties and data of the @@ -13824,59 +13700,14 @@ def convert(self, identity, full_domain=True, cellsize=False): .. versionadded:: 3.0.0 - .. seealso:: `cf.read` + .. seealso:: `cf.read`, `construct` :Parameters: - identity: - Select the metadata construct by one of: - - * The identity or key of a construct. - - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); or a compiled regular expression - (e.g. ``re.compile('^atmosphere')``) that selects the - relevant constructs whose identities match via - `re.search`. - - Each construct has a number of identities, and is selected - if any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: - - >>> x.identities() - ['time', - 'long_name=Time', - 'foo=bar', - 'standard_name=time', - 'ncvar%t', - 'T'] - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. - - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. - - *Parameter example:* - ``identity='measure:area'`` - - *Parameter example:* - ``identity='latitude'`` - - *Parameter example:* - ``identity='long_name=Longitude'`` - - *Parameter example:* - ``identity='domainancillary2'`` - - *Parameter example:* - ``identity='ncvar%areacello'`` + identity, filter_kwargs: optional + Select the unique construct returned by + ``f.construct(*identity, **filter_kwargs)``. See + `construct` for details. full_domain: `bool`, optional If False then do not create a domain, other than domain @@ -13899,7 +13730,7 @@ def convert(self, identity, full_domain=True, cellsize=False): """ key, construct = self.construct( - identity, item=True, default=(None, None) + *identity, item=True, default=(None, None), **filter_kwargs ) if key is None: raise ValueError( @@ -14680,7 +14511,7 @@ def field_ancillary( **filter_kwargs, ) - def domain_axis_position(self, *identity): + def domain_axis_position(self, *identity, **filter_kwargs): """Return the position in the data of a domain axis construct. .. versionadded:: 3.0.0 @@ -14689,68 +14520,16 @@ def domain_axis_position(self, *identity): :Parameters: - identity: - Select the domain axis construct by one of: - - * An identity or key of a 1-d coordinate construct that - whose data spans the domain axis construct. - - * A domain axis construct identity or key. - - * The position of the domain axis construct in the field - construct's data. - - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); or a compiled regular expression - (e.g. ``re.compile('^atmosphere')``) that selects the - relevant constructs whose identities match via - `re.search`. - - Each construct has a number of identities, and is selected - if any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: - - >>> x.identities() - ['time', 'long_name=Time', 'foo=bar', 'standard_name=time', 'ncvar%t', 'T'] - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. - - A position of a domain axis construct in the field - construct's data is specified by an integer index. - - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. - - *Parameter example:* - ``identity='long_name=Latitude'`` - - *Parameter example:* - ``identity='dimensioncoordinate1'`` - - *Parameter example:* - ``identity='domainaxis2'`` - - *Parameter example:* - ``identity='key%domainaxis2'`` - - *Parameter example:* - ``identity='ncdim%y'`` - - *Parameter example:* - ``identity=2`` + identity, filter_kwargs: optional + Select the unique domain axis construct returned by + ``f.domain_axis(*identity, **filter_kwargs)``. See + `domain_axis` for details. :Returns: `int` - The position in the field construct's data of the selected - domain axis construct. + The position in the field construct's data of the + selected domain axis construct. **Examples:** @@ -14809,67 +14588,17 @@ def axes_names(self, *identities, **kwargs): return out - def axis_size(self, *identity, default=ValueError(), axes=None, **kwargs): + def axis_size( + self, *identity, default=ValueError(), axes=None, **filter_kwargs + ): """Return the size of a domain axis construct. :Parameters: - identity: - Select the domain axis construct by one of: - - * An identity or key of a 1-d coordinate construct that - whose data spans the domain axis construct. - - * A domain axis construct identity or key. - - * The position of the domain axis construct in the field - construct's data. - - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); or a compiled regular expression - (e.g. ``re.compile('^atmosphere')``) that selects the - relevant constructs whose identities match via - `re.search`. - - Each construct has a number of identities, and is selected - if any of them match any of those provided. A construct's - identities are those returned by its `!identities` - method. In the following example, the construct ``x`` has - six identities: - - >>> x.identities() - ['time', 'long_name=Time', 'foo=bar', 'standard_name=time', 'ncvar%t', 'T'] - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable keys. - - A position of a domain axis construct in the field - construct's data is specified by an integer index. - - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used as - an *identity* argument. - - *Parameter example:* - ``identity='long_name=Latitude'`` - - *Parameter example:* - ``identity='dimensioncoordinate1'`` - - *Parameter example:* - ``identity='domainaxis2'`` - - *Parameter example:* - ``identity='key%domainaxis2'`` - - *Parameter example:* - ``identity='ncdim%y'`` - - *Parameter example:* - ``identity=2`` + identity, filter_kwargs: optional + Select the unique domain axis construct returned by + ``f.domain_axis(*identity, **filter_kwargs)``. See + `domain_axis` for details. default: optional Return the value of the *default* parameter if a domain @@ -14878,8 +14607,6 @@ def axis_size(self, *identity, default=ValueError(), axes=None, **kwargs): axes: deprecated at version 3.0.0 - kwargs: deprecated at version 3.0.0 - :Returns: `int` @@ -14900,77 +14627,45 @@ def axis_size(self, *identity, default=ValueError(), axes=None, **kwargs): self, "axis_size", "Use keyword 'identity' instead." ) # pragma: no cover - if kwargs: - _DEPRECATION_ERROR_KWARGS( - self, "axis_size", kwargs, "See f.domain_axes" - ) # pragma: no cover - - axis = self.domain_axis(*identity, key=True) - - domain_axes = self.domain_axes(todict=True) + axis = self.domain_axis(*identity, default=None, **filter_kwargs) + if axis is None: + return self._default(default) - da = domain_axes.get(axis) - if da is not None: - return da.get_size(default=default) + return axis.get_size(default=default) - key = self.domain_axis(axis, key=True, default=None) - if key is None: - return self.domain_axis(axis, key=True, default=default) + def get_data_axes(self, *identity, default=ValueError(), **filter_kwargs): + """Return domain axis constructs spanned by data. - return domain_axes[key].get_size(default=default) - - def get_data_axes(self, identity=None, default=ValueError()): - """Return the keys of the domain axis constructs spanned by the - data of a metadata construct. + Specifically, returns the keys of the domain axis constructs + spanned by the field's data, or the data of a metadata construct. .. versionadded:: 3.0.0 - .. seealso:: `del_data_axes`, `has_data_axes`, `set_data_axes` + .. seealso:: `del_data_axes`, `has_data_axes`, + `set_data_axes`, `construct` :Parameters: - identity: optional - Select the construct for which to return the domain - axis constructs spanned by its data. By default the - field construct is selected. May be: - - * The identity or key of a metadata construct. - - A construct identity is specified by a string - (e.g. ``'latitude'``, ``'long_name=time'``, - ``'ncvar%lat'``, etc.); or a compiled regular - expression (e.g. ``re.compile('^atmosphere')``) that - selects the relevant constructs whose identities match - via `re.search`. - - Each construct has a number of identities, and is - selected if any of them match any of those provided. A - construct's identities are those returned by its - `!identities` method. In the following example, the - construct ``x`` has six identities: - - >>> x.identities() - ['time', 'long_name=Time', 'foo=bar', 'standard_name=time', 'ncvar%t', 'T'] - - A construct key may optionally have the ``'key%'`` - prefix. For example ``'dimensioncoordinate2'`` and - ``'key%dimensioncoordinate2'`` are both acceptable - keys. - - Note that in the output of a `print` call or `!dump` - method, a construct is always described by one of its - identities, and so this description may always be used - as an *identity* argument. + identity, filter_kwargs: optional + Select the unique construct returned by + ``f.construct(*identity, **filter_kwargs)``. See + `construct` for details. + + If neither *identity* nor *filter_kwargs* are set then + the domain of the field constructs's data are + returned. default: optional Return the value of the *default* parameter if the - data axes have not been set. If set to an `Exception` - instance then it will be raised instead. + data axes have not been set. + + {{default Exception}} :Returns: - `tuple` - The keys of the domain axis constructs spanned by the data. + `tuple` of `str` + The keys of the domain axis constructs spanned by the + data. **Examples:** @@ -14984,27 +14679,20 @@ def get_data_axes(self, identity=None, default=ValueError()): >>> print(f.get_data_axes(default=None)) None - TODO more examples with key= - """ - if identity is None: - # Get axes of Field data array + if not identity and not filter_kwargs: + # Get axes of the Field data array return super().get_data_axes(default=default) - axes = super().get_data_axes(identity, default=None) - if axes is not None: - return axes - - key = self.construct_key(identity, default=None) - if key is not None: - return super().get_data_axes(key=key, default=default) + key = self.construct(*identity, key=True, **filter_kwargs) - if default is None: - return default + axes = super().get_data_axes(key, default=None) + if axes is None: + return self._default( + default, "Can't get axes for non-existent construct" + ) - return self._default( - default, f"Can't get axes for non-existent construct {identity!r}" - ) + return axes @_inplace_enabled(default=False) @_manage_log_level_via_verbosity diff --git a/cf/mixin/fielddomain.py b/cf/mixin/fielddomain.py index aba2304ea2..883ac366ff 100644 --- a/cf/mixin/fielddomain.py +++ b/cf/mixin/fielddomain.py @@ -2498,7 +2498,7 @@ def replace_construct( f"with a {new.__class__.__name__} object" ) - axes = self.get_data_axes(key, None) + axes = self.get_data_axes(key, default=None) if axes is not None: shape0 = getattr(c, "shape", None) shape1 = getattr(new, "shape", None) From 0feea09cc8f635880db2b79485787289bca421a5 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Tue, 11 May 2021 09:53:30 +0100 Subject: [PATCH 42/53] devs --- cf/docstring/docstring.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/cf/docstring/docstring.py b/cf/docstring/docstring.py index 557433d65b..373901c6b7 100644 --- a/cf/docstring/docstring.py +++ b/cf/docstring/docstring.py @@ -63,7 +63,8 @@ "{{item: `bool`, optional}}": """item: `bool`, optional If True then return the selected construct identifier and the construct itself. By default the construct - itself is returned.""", + itself is returned. If *key* is True then *item* is + ignored.""", # ---------------------------------------------------------------- # Method description susbstitutions (4 levels of indentataion) # ---------------------------------------------------------------- From 6b68af75078411ae6d568547bfd9aeb93f9b99fb Mon Sep 17 00:00:00 2001 From: David Hassell Date: Wed, 12 May 2021 12:29:46 +0100 Subject: [PATCH 43/53] EC --- docs/_templates/sponsors.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/_templates/sponsors.html b/docs/_templates/sponsors.html index c54b1eb780..d3d7351d47 100644 --- a/docs/_templates/sponsors.html +++ b/docs/_templates/sponsors.html @@ -8,8 +8,8 @@ through Seachange and Couplet; by -the European -Commission Horizon 2020 programme through +the EC Horizon +2020 programme through IS-ENES3; by NERC through UKFAFMIP; From 5796cc12528fdebc4d5fd0af28b50115a22032a4 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Thu, 13 May 2021 10:35:24 +0100 Subject: [PATCH 44/53] f-string --- cf/test/cfa_test.sh | 0 cf/test/test_cfa.py | 7 ++-- scripts/cfa | 98 ++++++++++++++++++++------------------------- 3 files changed, 47 insertions(+), 58 deletions(-) mode change 100755 => 100644 cf/test/cfa_test.sh diff --git a/cf/test/cfa_test.sh b/cf/test/cfa_test.sh old mode 100755 new mode 100644 diff --git a/cf/test/test_cfa.py b/cf/test/test_cfa.py index 2e96c3883c..452bf5c78d 100644 --- a/cf/test/test_cfa.py +++ b/cf/test/test_cfa.py @@ -40,10 +40,9 @@ def test_cfa(self): returncode = cfa_test.returncode if returncode != 0: self.fail( - "A cfa command failed (see script's 'exit {}' point) with " - "error:\n{}".format( - returncode, cfa_stderr_via_stdout_channel.decode("utf-8") - ) + f"A cfa command failed (see script's 'exit {returncode}' " + "point) with error:\n" + f"{cfa_stderr_via_stdout_channel.decode('utf-8')}" ) # else: (passes by default) diff --git a/scripts/cfa b/scripts/cfa index 18e3d52942..d0dbe4c65a 100755 --- a/scripts/cfa +++ b/scripts/cfa @@ -9,11 +9,13 @@ if __name__ == "__main__": import cf from re import sub as re_sub - def print_help(version, date): + library_path = os.path.dirname(os.path.abspath(cf.__file__)) + + def print_help(): import subprocess - manpage = """\ -.TH "CFA" "1" "{0}" "{1}" "cfa" + manpage = f"""\ +.TH "CFA" "1" "{cf.__version__}" "{cf.__date__}" "cfa" . . . @@ -889,7 +891,7 @@ PP conditions which need to be satisfied for this translation The default mappings are found in the file - {2}/etc/STASH_to_CF.txt + {library_path}/etc/STASH_to_CF.txt and any new mappings will replace any entries which already exist. @@ -972,12 +974,13 @@ cfdump(1), ncdump(1) . . .SH LIBRARY -cf\-python library version {0} at {2} +cf\-python library version {cf.__version__} at {library_path} . . . .SH BUGS -Reports of bugs are welcome at https://github.com/NCAS-CMS/cf-python +New feature suggestions and reports of bugs are welcome at +https://github.com/NCAS-CMS/cf-python . . . @@ -988,9 +991,7 @@ Open Source Initiative MIT License . .SH AUTHOR Written by David Hassell -""".format( - version, date, os.path.dirname(os.path.abspath(cf.__file__)) - ) +""" p = subprocess.Popen( [ @@ -1012,23 +1013,23 @@ Written by David Hassell if not overwrite: print( - "{} ERROR: Can't overwrite output file {} unless " - "--overwrite is set".format(iam, outfile), + f"{iam} ERROR: Can't overwrite output file {outfile} unless " + "--overwrite is set", file=sys.stderr, ) sys.exit(2) if not os.access(outfile, os.W_OK): print( - "{} ERROR: Can't overwrite output file {} without " - "permission".format(iam, outfile), + f"{iam} ERROR: Can't overwrite output file {outfile} without " + "permission", file=sys.stderr, ) sys.exit(2) if set((outfile,)).intersection(files): print( - "{} ERROR: Can't overwrite input file {}".format(iam, outfile), + f"{iam} ERROR: Can't overwrite input file {outfile}", file=sys.stderr, ) sys.exit(2) @@ -1038,13 +1039,13 @@ Written by David Hassell iam = os.path.basename(sys.argv[0]) usage = ( - "USAGE: {0} [-1] [-D] [-d dir] [-e file] [-f format] [-h] [-i] " + f"USAGE: {iam} [-1] [-D] [-d dir] [-e file] [-f format] [-h] [-i] " "[-n] [-o file] [-s property] [-u] [-v mode] [-x] [OPTIONS] " - "INPUTS".format(iam) + "INPUTS" ) - short_help = """\ -{0} + short_help = f"""\ +{usage} [-1] View all input files as a single dataset [-D] Write to the same directories as its input files [-d dir] Directory for output files @@ -1089,9 +1090,7 @@ Written by David Hassell [--um=option] Extra decoding instructions for PP and fields files INPUTS Input files and directories -Using cf-python library version {1} at {2}""".format( - usage, cf.__version__, os.path.dirname(os.path.abspath(cf.__file__)) - ) +Using cf-python library version {cf.__version__} at {library_path}""" # -------------------------------------------------------------------- # Parse command line options @@ -1154,7 +1153,7 @@ Using cf-python library version {1} at {2}""".format( ) except GetoptError as err: # print help information and exit: - print("{} ERROR: {}".format(iam, str(err)), file=sys.stderr) + print(f"{iam} ERROR: {err}", file=sys.stderr) sys.exit(2) if not (infiles or opts): @@ -1186,7 +1185,7 @@ Using cf-python library version {1} at {2}""".format( for option, arg in opts: if option in ("-h", "--help"): - print_help(cf.__version__, cf.__date__) + print_help() sys.exit(0) elif option in ("-o", "--outfile"): outfile = arg @@ -1251,8 +1250,8 @@ Using cf-python library version {1} at {2}""".format( read_options["squeeze"] = True elif option == "--um_version": print( - "{} ERROR: The {} option has been removed. Use " - "--um=version=VN instead.".format(iam, option), + f"{iam} ERROR: The {option} option has been removed. Use " + "--um=version=VN instead.", file=sys.stderr, ) sys.exit(2) @@ -1282,17 +1281,15 @@ Using cf-python library version {1} at {2}""".format( view = arg if view not in "smc": print( - "{} ERROR: The {} option must have an argument of " - "either s, m or c".format(iam, option), + f"{iam} ERROR: The {option} option must have a value " + "of either s, m or c", file=sys.stderr, ) sys.exit(2) elif option in ("-a", "--all"): print( - "{} ERROR: The {} option has been deprecated and is now " - "the default behaviour. See the -x option.".format( - iam, option - ), + f"{iam} ERROR: The {option} option has been deprecated and " + "is now the default behaviour. See the -x option.", file=sys.stderr, ) sys.exit(2) @@ -1305,16 +1302,13 @@ Using cf-python library version {1} at {2}""".format( "--verbose", ): print( - "{} ERROR: The {} option has been deprecated.".format( - iam, option - ), + f"{iam} ERROR: The {option} option has been deprecated.", file=sys.stderr, ) sys.exit(2) else: print(usage) assert False, "Unknown option: " + option - # --- End: for if outfile is None and not directory_output and view is None: # No output file or directory options are set, then output a @@ -1323,8 +1317,8 @@ Using cf-python library version {1} at {2}""".format( if one and view is None: print( - "{} ERROR: Can only set the -1 option if the -v option is " - "also set.".format(iam), + f"{iam} ERROR: Can only set the -1 option if the -v option is " + "also set.", file=sys.stderr, ) sys.exit(2) @@ -1380,12 +1374,11 @@ Using cf-python library version {1} at {2}""".format( ) except Exception as error: print( - "{} ERROR: Can't load STASH table: {}".format( - iam, error - ) + f"{iam} ERROR: Can't load STASH table {stash_table}: " + f"{error}", + file=sys.stderr, ) sys.exit(2) - # --- End: if write_options["fmt"] = fmt @@ -1394,16 +1387,16 @@ Using cf-python library version {1} at {2}""".format( if fmt == "CFA": print( - "{} ERROR: '-f CFA' has been replaced by '-f CFA3' or " + f"{iam} ERROR: '-f CFA' has been replaced by '-f CFA3' or " "'-f CFA4' for netCDF3 classic and netCDF4 CFA output formats " - "respectively".format(iam), + "respectively", file=sys.stderr, ) sys.exit(2) if not infiles: print( - "{} ERROR: Must provide at least one input file".format(iam), + f"{iam} ERROR: Must provide at least one input file", file=sys.stderr, ) sys.exit(2) @@ -1415,8 +1408,7 @@ Using cf-python library version {1} at {2}""".format( > 1 ): print( - "{} ERROR: Can only set one of the -o, -d and -D " - "options".format(iam), + f"{iam} ERROR: Can only set one of the -o, -d and -D options", file=sys.stderr, ) sys.exit(2) @@ -1426,8 +1418,7 @@ Using cf-python library version {1} at {2}""".format( # absolute, normalised path. if not os.path.isdir(directory) or not os.access(directory, os.W_OK): print( - "{} ERROR: Can't write to output directory " - "{}".format(iam, directory), + f"{iam} ERROR: Can't write to output directory {directory}", file=sys.stderr, ) sys.exit(2) @@ -1440,8 +1431,8 @@ Using cf-python library version {1} at {2}""".format( follow_symlinks = read_options.get("follow_symlinks", False) if follow_symlinks and not recursive: print( - "{} ERROR: Can't set --follow_symlinks without setting " - "--recursive".format(iam), + f"{iam} ERROR: Can't set --follow_symlinks without setting " + "--recursive", file=sys.stderr, ) sys.exit(2) @@ -1458,7 +1449,7 @@ Using cf-python library version {1} at {2}""".format( infiles2.extend([os.path.join(path, f) for f in filenames]) if not recursive: break - # --- End: for + infiles = infiles2 # Initialise the set of all input and output files @@ -1497,7 +1488,7 @@ Using cf-python library version {1} at {2}""".format( f = cf.read(infile, **read_options) except Exception as error: print( - "{} ERROR reading {}: {}".format(iam, infile, error), + f"{iam} ERROR reading {infile}: {error}", file=sys.stderr, ) sys.exit(1) @@ -1525,8 +1516,7 @@ Using cf-python library version {1} at {2}""".format( cf.write(f, outfile, **write_options) except Exception as error: print( - "{} ERROR writing {}: {}".format(iam, outfile, error), + f"{iam} ERROR writing {outfile}: {error}", file=sys.stderr, ) sys.exit(1) - # --- End: for From 99e5f179ead85006fd50bd7d9df9ed292f94fb55 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Thu, 13 May 2021 10:59:40 +0100 Subject: [PATCH 45/53] import MaskError --- cf/read_write/read.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/cf/read_write/read.py b/cf/read_write/read.py index 96b20f182e..0c8102624d 100644 --- a/cf/read_write/read.py +++ b/cf/read_write/read.py @@ -4,6 +4,8 @@ from glob import glob from os.path import isdir +from numpy.ma.core import MaskError + from ..cfimplementation import implementation from ..fieldlist import FieldList from ..aggregate import aggregate as cf_aggregate From f4569cc2e72e34ab29cc54fa493736b5b568d261 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Mon, 17 May 2021 08:21:46 +0100 Subject: [PATCH 46/53] Typo Co-authored-by: Sadie L. Bartholomew --- cf/constructs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cf/constructs.py b/cf/constructs.py index f9fb9f910d..7c6467089f 100644 --- a/cf/constructs.py +++ b/cf/constructs.py @@ -204,7 +204,7 @@ def _filter_by_identity(self, arg, identities, todict, _config): @classmethod def _short_iteration(cls, x): - """The default short cicuit test. + """The default short circuit test. If this method returns True then only ther first identity return by the construct's `!identities` method will be From 077fc830a68630d90b56375312614a31e1facf8b Mon Sep 17 00:00:00 2001 From: David Hassell Date: Mon, 17 May 2021 08:22:13 +0100 Subject: [PATCH 47/53] Typo Co-authored-by: Sadie L. Bartholomew --- cf/constructs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cf/constructs.py b/cf/constructs.py index 7c6467089f..f2b6ef108a 100644 --- a/cf/constructs.py +++ b/cf/constructs.py @@ -206,8 +206,8 @@ def _filter_by_identity(self, arg, identities, todict, _config): def _short_iteration(cls, x): """The default short circuit test. - If this method returns True then only ther first identity - return by the construct's `!identities` method will be + If this method returns True then only the first identity + returned by the construct's `!identities` method will be checked. See `_filter_by_identity` for details. From ea2c03d7527156f2cadb75f840db6a5d9d5bb557 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Mon, 17 May 2021 08:29:17 +0100 Subject: [PATCH 48/53] Typo Co-authored-by: Sadie L. Bartholomew --- cf/mixin/fielddomain.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cf/mixin/fielddomain.py b/cf/mixin/fielddomain.py index 883ac366ff..57b2738565 100644 --- a/cf/mixin/fielddomain.py +++ b/cf/mixin/fielddomain.py @@ -70,7 +70,7 @@ def _coordinate_reference_axes(self, key): return set(axes) def _conform_coordinate_references(self, key, coordref=None): - """Where possible replace the content of coordiante reference + """Where possible replace the content of coordinate reference construct coordinates with coordinate construct keys. .. versionadded:: 3.0.0 From 96011cd9bd975ac06a0fbbb15380a0cbff3cfb20 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Mon, 17 May 2021 08:41:04 +0100 Subject: [PATCH 49/53] select_by_ncvar docs Co-authored-by: Sadie L. Bartholomew --- cf/mixin/fielddomainlist.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/cf/mixin/fielddomainlist.py b/cf/mixin/fielddomainlist.py index aa0db17605..b6b2580fd1 100644 --- a/cf/mixin/fielddomainlist.py +++ b/cf/mixin/fielddomainlist.py @@ -185,9 +185,7 @@ def select_by_ncvar(self, *ncvars): :Parameters: ncvars: optional - Select constructs from the list. May be one or more: - - * The netCDF name of a construct. + Select constructs from the list. May be one or more netCDF names of constructs. A construct is selected if it matches any of the given names. From af680376cdc5e1dd83a69851d9c87802a89e5bb7 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Mon, 17 May 2021 08:42:04 +0100 Subject: [PATCH 50/53] formatting Co-authored-by: Sadie L. Bartholomew --- cf/field.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cf/field.py b/cf/field.py index b18768f4b0..6f08e84119 100644 --- a/cf/field.py +++ b/cf/field.py @@ -4875,7 +4875,7 @@ def _weights_linear( return False raise ValueError( - f"Can't create linear weights for {axis!r} axis: No " "bounds" + f"Can't create linear weights for {axis!r} axis: No bounds" ) else: # Bounds exist From 51edfd2d511a711e1467f16f1b902bceab5a9f76 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Mon, 17 May 2021 08:43:21 +0100 Subject: [PATCH 51/53] example_fields API Co-authored-by: Sadie L. Bartholomew --- cf/examplefield.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cf/examplefield.py b/cf/examplefield.py index 6b1540e88f..c1ad7e4a42 100644 --- a/cf/examplefield.py +++ b/cf/examplefield.py @@ -13,8 +13,8 @@ def example_field(n, _implementation=_implementation): example_field.__doc__ = cfdm.example_field.__doc__.replace("cfdm.", "cf.") -def example_fields(n, _func=example_field): - return cfdm.example_fields(n, _func=_func) +def example_fields(*n, _func=example_field): + return cfdm.example_fields(*n, _func=_func) example_fields.__doc__ = cfdm.example_fields.__doc__.replace("cfdm.", "cf.") From 9b15196544fd6ca04b9b282074dc0a5a695e9d85 Mon Sep 17 00:00:00 2001 From: David Hassell Date: Mon, 17 May 2021 08:44:54 +0100 Subject: [PATCH 52/53] review fixes --- cf/mixin/fielddomain.py | 2 +- cf/mixin/fielddomainlist.py | 7 ++----- cf/test/cfa_test.sh | 0 3 files changed, 3 insertions(+), 6 deletions(-) mode change 100644 => 100755 cf/test/cfa_test.sh diff --git a/cf/mixin/fielddomain.py b/cf/mixin/fielddomain.py index 883ac366ff..7f0f0f58bb 100644 --- a/cf/mixin/fielddomain.py +++ b/cf/mixin/fielddomain.py @@ -291,7 +291,7 @@ def _indices(self, mode, data_axes, auxiliary_mask, **kwargs): The dictionary has two keys: ``'indices'`` and ``'mask'``. - The ``'indices'`` key stores a dictionary in keyed by + The ``'indices'`` key stores a dictionary keyed by domain axis identifiers, each of which has a value of the index for that domain axis. diff --git a/cf/mixin/fielddomainlist.py b/cf/mixin/fielddomainlist.py index aa0db17605..69014f5aa2 100644 --- a/cf/mixin/fielddomainlist.py +++ b/cf/mixin/fielddomainlist.py @@ -8,7 +8,7 @@ def select_by_construct(self, *identities, OR=False, **conditions): """Select elements by their metadata constructs. To find the inverse of the selection, use a list comprehension - with the !match_by_construct` method of the construct + with the `!match_by_construct` method of the construct elements. For example, to select all constructs that do *not* have a "latitude" metadata construct: @@ -55,14 +55,11 @@ def select_by_construct(self, *identities, OR=False, **conditions): recently applied cell method operations. *Parameter example:* - `'latitude'`` + ``'latitude'`` *Parameter example:* ``'T'`` - *Parameter example:* - ``'latitude'`` - *Parameter example:* ``'long_name=Cell Area'`` diff --git a/cf/test/cfa_test.sh b/cf/test/cfa_test.sh old mode 100644 new mode 100755 From 4037de9f3ff0b651a486c4dd7ac168a0e04d9b4f Mon Sep 17 00:00:00 2001 From: David Hassell Date: Mon, 17 May 2021 09:04:28 +0100 Subject: [PATCH 53/53] review fixes --- cf/mixin/fielddomainlist.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/cf/mixin/fielddomainlist.py b/cf/mixin/fielddomainlist.py index 75998788fc..2012321472 100644 --- a/cf/mixin/fielddomainlist.py +++ b/cf/mixin/fielddomainlist.py @@ -182,7 +182,8 @@ def select_by_ncvar(self, *ncvars): :Parameters: ncvars: optional - Select constructs from the list. May be one or more netCDF names of constructs. + Select constructs from the list. May be one or more + netCDF names of constructs. A construct is selected if it matches any of the given names. @@ -258,15 +259,7 @@ def select_by_property(self, *mode, **properties): its properties matches. properties: optional - Select the constructs with the given properties. May be - one or more of: - - * The property of a construct. - - By default a construct is selected if it matches all - of the given properties, but it may alternatively be - selected when at least one of its properties matches - (see the *mode* positional parameter). + Select the constructs with the given properties. A property value is given by a keyword parameter of the property name. The value may be a scalar or vector @@ -276,6 +269,11 @@ def select_by_property(self, *mode, **properties): constructs whose methods match (via `re.search`) are selected. + By default a construct is selected if it matches all + of the given properties, but it may alternatively be + selected when at least one of its properties matches + (see the *mode* positional parameter). + :Returns: `{{class}}`