Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix dims --> sizes for xarray datasets #983

Merged
merged 2 commits into from
Feb 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion mpas_analysis/ocean/climatology_map_argo.py
Original file line number Diff line number Diff line change
Expand Up @@ -430,7 +430,7 @@ def build_observational_dataset(self, fileName):
dsObs.coords['month'] = ('Time', np.array(dsObs['calmonth'], int))

# no meaningful year since this is already a climatology
dsObs.coords['year'] = ('Time', np.ones(dsObs.dims['Time'], int))
dsObs.coords['year'] = ('Time', np.ones(dsObs.sizes['Time'], int))
dsObs = dsObs[[self.fieldName, 'month']]

slices = []
Expand Down
2 changes: 1 addition & 1 deletion mpas_analysis/ocean/climatology_map_mld.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,7 @@ def build_observational_dataset(self, fileName):
dsObs.coords['month'] = ('Time', np.array(dsObs['calmonth'], int))

# no meaningful year since this is already a climatology
dsObs.coords['year'] = ('Time', np.ones(dsObs.dims['Time'], int))
dsObs.coords['year'] = ('Time', np.ones(dsObs.sizes['Time'], int))

dsObs = dsObs[['mld', 'month']]
return dsObs
2 changes: 1 addition & 1 deletion mpas_analysis/ocean/climatology_map_woa.py
Original file line number Diff line number Diff line change
Expand Up @@ -372,7 +372,7 @@ def build_observational_dataset(self, fileName):
# need to add a dummy year coordinate
dsObs = dsObs.rename({'month': 'Time'})
dsObs.coords['month'] = dsObs['Time']
dsObs.coords['year'] = ('Time', np.ones(dsObs.dims['Time'], int))
dsObs.coords['year'] = ('Time', np.ones(dsObs.sizes['Time'], int))

data_vars = {}
for fieldName in self.fieldNames:
Expand Down
2 changes: 1 addition & 1 deletion mpas_analysis/ocean/ocean_regional_profiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -303,7 +303,7 @@ def run_task(self):
startDate=startDate,
endDate=endDate) as dsIn:

for inIndex in range(dsIn.dims['Time']):
for inIndex in range(dsIn.sizes['Time']):

mask = np.logical_and(
dsIn.year[inIndex].values == years,
Expand Down
6 changes: 3 additions & 3 deletions mpas_analysis/ocean/remap_depth_slices_subtask.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ def run_task(self):
zBot = zMid.where(zMid.verticalIndex == self.maxLevelCell).sum(
dim='nVertLevels')

verticalIndices = np.zeros((len(self.depths), ds.dims['nCells']), int)
verticalIndices = np.zeros((len(self.depths), ds.sizes['nCells']), int)

mask = np.zeros(verticalIndices.shape, bool)

Expand All @@ -145,7 +145,7 @@ def run_task(self):
verticalIndices[depthIndex, :] = self.maxLevelCell.values
mask[depthIndex, :] = self.maxLevelCell.values >= 0
else:

diff = np.abs(zMid - depth).where(ocean_mask, drop=True)
verticalIndex = diff.argmin(dim='nVertLevels')

Expand Down Expand Up @@ -199,7 +199,7 @@ def customize_masked_climatology(self, climatology, season):

climatology.coords['verticalIndex'] = \
('nVertLevels',
np.arange(climatology.dims['nVertLevels']))
np.arange(climatology.sizes['nVertLevels']))

depthNames = [str(depth) for depth in self.depths]

Expand Down
4 changes: 2 additions & 2 deletions mpas_analysis/ocean/streamfunction_moc.py
Original file line number Diff line number Diff line change
Expand Up @@ -1043,7 +1043,7 @@ def _compute_moc_time_series_analysismember(self):
refTopDepth = dsMOCIn.depth.values

# first, copy all computed data
for inIndex in range(dsMOCIn.dims['Time']):
for inIndex in range(dsMOCIn.sizes['Time']):

mask = np.logical_and(
dsMOCIn.year[inIndex].values == years,
Expand Down Expand Up @@ -1219,7 +1219,7 @@ def _compute_moc_time_series_postprocess(self):
dsMOCIn.load()

# first, copy all computed data
for inIndex in range(dsMOCIn.dims['Time']):
for inIndex in range(dsMOCIn.sizes['Time']):

mask = np.logical_and(
dsMOCIn.year[inIndex].values == years,
Expand Down
2 changes: 1 addition & 1 deletion mpas_analysis/ocean/time_series_ocean_regions.py
Original file line number Diff line number Diff line change
Expand Up @@ -540,7 +540,7 @@ def run_task(self):
startDate=startDate,
endDate=endDate) as dsOut:

for inIndex in range(dsOut.dims['Time']):
for inIndex in range(dsOut.sizes['Time']):

mask = numpy.logical_and(
dsOut.year[inIndex].values == years,
Expand Down
2 changes: 1 addition & 1 deletion mpas_analysis/ocean/time_series_transport.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,7 @@ def run_task(self):
startDate=startDate,
endDate=endDate) as dsOut:

for inIndex in range(dsOut.dims['Time']):
for inIndex in range(dsOut.sizes['Time']):

mask = numpy.logical_and(
dsOut.year[inIndex].values == years,
Expand Down
4 changes: 2 additions & 2 deletions mpas_analysis/shared/climatology/climatology.py
Original file line number Diff line number Diff line change
Expand Up @@ -715,7 +715,7 @@ def _setup_climatology_caching(ds, startYearClimo, endYearClimo,

cacheInfo = []

cacheIndices = -1 * numpy.ones(ds.dims['Time'], int)
cacheIndices = -1 * numpy.ones(ds.sizes['Time'], int)
monthsInDs = ds.month.values
yearsInDs = ds.year.values

Expand Down Expand Up @@ -787,7 +787,7 @@ def _cache_individual_climatologies(ds, cacheInfo, printProgress,

totalDays = dsYear.daysInMonth.sum(dim='Time').values

monthCount = dsYear.dims['Time']
monthCount = dsYear.sizes['Time']

climatology = compute_climatology(dsYear, monthValues, calendar,
maskVaries=False)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ def open_multifile_dataset(fileNames, calendar, config,
# select only the data in the specified range of dates
ds = ds.sel(Time=slice(startDate, endDate))

if ds.dims['Time'] == 0:
if ds.sizes['Time'] == 0:
raise ValueError('The data set contains no Time entries between '
'dates {} and {}.'.format(
days_to_datetime(startDate, calendar=calendar),
Expand Down
2 changes: 1 addition & 1 deletion mpas_analysis/shared/io/mpas_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def open_mpas_dataset(fileName, calendar,
# select only the data in the specified range of dates
ds = ds.sel(Time=slice(startDate, endDate))

if ds.dims['Time'] == 0:
if ds.sizes['Time'] == 0:
raise ValueError('The data set contains no Time entries between '
'dates {} and {}.'.format(
days_to_datetime(startDate, calendar=calendar),
Expand Down
3 changes: 1 addition & 2 deletions suite/run_dev_suite.bash
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,13 @@ env_name=mpas_dev

conda_base=$(dirname $(dirname $CONDA_EXE))
source $conda_base/etc/profile.d/conda.sh
source $conda_base/etc/profile.d/mamba.sh

export HDF5_USE_FILE_LOCKING=FALSE

branch=$(git symbolic-ref --short HEAD)

# test building the docs
mamba activate ${env_name}
conda activate ${env_name}
cd docs
make clean
make html
Expand Down
8 changes: 4 additions & 4 deletions suite/run_suite.bash
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,14 @@ export HDF5_USE_FILE_LOCKING=FALSE

branch=$(git symbolic-ref --short HEAD)

conda update -y conda conda-build mamba boa
conda mambabuild ci/recipe
conda update -y conda conda-build
conda build ci/recipe

# create the test conda envs
for py in ${main_py} ${alt_py}
do
env=test_mpas_analysis_py${py}
mamba create -y -n ${env} --use-local python=${py} mpas-analysis sphinx \
conda create -y -n ${env} --use-local python=${py} mpas-analysis sphinx \
mock sphinx_rtd_theme "tabulate>=0.8.2" "m2r2>=0.3.3" "mistune<2" \
pytest "mache>=1.11.0" "esmf=*=mpi_mpich_*" jinja2
conda activate ${env}
Expand All @@ -30,7 +30,7 @@ done
# create another env for testing xarray main branch
py=${main_py}
env=test_mpas_analysis_xarray_main
mamba create --yes --quiet --name ${env} --use-local python=${py} \
conda create --yes --quiet --name ${env} --use-local python=${py} \
mpas-analysis pytest
conda activate ${env}
pip install git+https://github.com/pydata/xarray.git
Expand Down