Skip to content

Commit

Permalink
WPS returns datasets : access band_data variable (#356)
Browse files Browse the repository at this point in the history
In notebook 02 using the PAVICS beta image `stats_resp.get(asobj=True)`
returns an `xr.Dataset` ... plotting requires accessing the `band_data`
variable

See Ouranosinc/pavics-jupyter-env-issues#7

Note this will break the notebook when using `current` image. Do not
merge until all fixes are ready

Fix for
Ouranosinc/PAVICS-e2e-workflow-tests#121.
  • Loading branch information
tlvu authored May 9, 2024
2 parents 0676b1c + 6ff9f29 commit 1f5be4e
Show file tree
Hide file tree
Showing 2 changed files with 34 additions and 27 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -405,6 +405,13 @@
"all_properties = {**shape_info, **land_use, **terrain}\n",
"display(all_properties)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
Expand All @@ -418,7 +425,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.16"
"version": "3.10.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@
"import xclim.sdba as sdba\n",
"from birdy import WPSClient\n",
"from clisops.core import average, subset\n",
"from dask.diagnostics import ProgressBar\n",
"\n",
"from ravenpy import Emulator\n",
"from ravenpy.config import commands as rc\n",
Expand Down Expand Up @@ -399,7 +400,7 @@
" ERA5_pr = ERA5_pr.mean({\"latitude\", \"longitude\"})\n",
"\n",
" # Ensure that the precipitation is non-negative, which can happen with some reanalysis models.\n",
" ERA5_pr[ERA5_pr < 0] = 0\n",
" ERA5_pr = np.maximum(ERA5_pr, 0)\n",
"\n",
" # Transform them to a dataset such that they can be written with attributes to netcdf\n",
" ERA5_tmin = ERA5_tmin.to_dataset(name=\"tmin\", promote_attrs=True)\n",
Expand Down Expand Up @@ -463,21 +464,20 @@
" col_subset = col.search(require_all_on=[\"source_id\"], **query)\n",
" mapper = fsCMIP.get_mapper(col_subset.df.zstore[0])\n",
"\n",
" # special case for precipitation, which does not have the \"height\" variable that we need to discard as for tasmax and tasmin.\n",
" if variable == \"pr\":\n",
" out[exp][variable] = average.average_shape(\n",
" xr.open_zarr(mapper, consolidated=True).sel(\n",
" time=slice(period_start, period_end)\n",
" )[variable],\n",
" basin_contour,\n",
" ).chunk(-1)\n",
" else:\n",
" out[exp][variable] = average.average_shape(\n",
" xr.open_zarr(mapper, consolidated=True)\n",
" .sel(time=slice(period_start, period_end))\n",
" .reset_coords(\"height\", drop=True)[variable],\n",
" basin_contour,\n",
" ).chunk(-1)\n",
" ds = xr.open_zarr(mapper, consolidated=True).sel(\n",
" time=slice(period_start, period_end)\n",
" )\n",
"\n",
" if \"height\" in ds.coords:\n",
" ds = ds.drop_vars(\"height\")\n",
"\n",
" out[exp][variable] = average.average_shape(\n",
" ds,\n",
" basin_contour,\n",
" )[\n",
" variable\n",
" ].chunk(-1)\n",
"\n",
"\n",
"# We can now extract the variables that we will need later:\n",
"historical_tasmax = out[\"historical\"][\"tasmax\"]\n",
Expand Down Expand Up @@ -633,15 +633,16 @@
" ]\n",
")\n",
"# Write to temporary folder\n",
"fn_tmp_fut = tmp / \"future_dataset_tmp.nc\"\n",
"fut_dataset.to_netcdf(fn_tmp_fut)\n",
"with ProgressBar():\n",
" fn_tmp_fut = tmp / \"future_dataset_tmp.nc\"\n",
" fut_dataset.to_netcdf(fn_tmp_fut)\n",
"\n",
"# Write the data to disk to a temporary location for future use.\n",
"ref_dataset = xr.open_dataset(fn_tmp_ref)\n",
"ref_dataset.isel(geom=0).squeeze().to_netcdf(tmp / \"reference_dataset.nc\")\n",
" # Write the data to disk to a temporary location for future use.\n",
" ref_dataset = xr.open_dataset(fn_tmp_ref)\n",
" ref_dataset.isel(geom=0).squeeze().to_netcdf(tmp / \"reference_dataset.nc\")\n",
"\n",
"fut_dataset = xr.open_dataset(fn_tmp_fut)\n",
"fut_dataset.isel(geom=0).squeeze().to_netcdf(tmp / \"future_dataset.nc\")"
" fut_dataset = xr.open_dataset(fn_tmp_fut)\n",
" fut_dataset.isel(geom=0).squeeze().to_netcdf(tmp / \"future_dataset.nc\")"
]
},
{
Expand Down Expand Up @@ -826,11 +827,10 @@
"source": [
"# Copy the configuration of the previous model that we will modify for our validation:\n",
"model_validation = model_config.duplicate(\n",
" params=optimized_parameters,\n",
" StartDate=dt.datetime(1986, 1, 1),\n",
" EndDate=dt.datetime(1990, 12, 31),\n",
" SuppressOutput=False,\n",
")\n",
").set_params(optimized_parameters)\n",
"\n",
"sim_output = Emulator(config=model_validation).run()\n",
"\n",
Expand Down Expand Up @@ -1007,7 +1007,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.16"
"version": "3.11.6"
}
},
"nbformat": 4,
Expand Down

0 comments on commit 1f5be4e

Please sign in to comment.