diff --git a/docs/notebooks/02_Extract_geographical_watershed_properties.ipynb b/docs/notebooks/02_Extract_geographical_watershed_properties.ipynb index 4e3ca434..84be4db0 100644 --- a/docs/notebooks/02_Extract_geographical_watershed_properties.ipynb +++ b/docs/notebooks/02_Extract_geographical_watershed_properties.ipynb @@ -405,6 +405,13 @@ "all_properties = {**shape_info, **land_use, **terrain}\n", "display(all_properties)" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { @@ -418,7 +425,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.16" + "version": "3.10.13" } }, "nbformat": 4, diff --git a/docs/notebooks/paper/Perform_a_climate_change_impact_study_on_a_watershed.ipynb b/docs/notebooks/paper/Perform_a_climate_change_impact_study_on_a_watershed.ipynb index c3bde7b2..41b2d4c5 100644 --- a/docs/notebooks/paper/Perform_a_climate_change_impact_study_on_a_watershed.ipynb +++ b/docs/notebooks/paper/Perform_a_climate_change_impact_study_on_a_watershed.ipynb @@ -77,6 +77,7 @@ "import xclim.sdba as sdba\n", "from birdy import WPSClient\n", "from clisops.core import average, subset\n", + "from dask.diagnostics import ProgressBar\n", "\n", "from ravenpy import Emulator\n", "from ravenpy.config import commands as rc\n", @@ -399,7 +400,7 @@ " ERA5_pr = ERA5_pr.mean({\"latitude\", \"longitude\"})\n", "\n", " # Ensure that the precipitation is non-negative, which can happen with some reanalysis models.\n", - " ERA5_pr[ERA5_pr < 0] = 0\n", + " ERA5_pr = np.maximum(ERA5_pr, 0)\n", "\n", " # Transform them to a dataset such that they can be written with attributes to netcdf\n", " ERA5_tmin = ERA5_tmin.to_dataset(name=\"tmin\", promote_attrs=True)\n", @@ -463,21 +464,20 @@ " col_subset = col.search(require_all_on=[\"source_id\"], **query)\n", " mapper = fsCMIP.get_mapper(col_subset.df.zstore[0])\n", "\n", - " # special case for precipitation, which does not have the \"height\" variable that we need to discard as for tasmax and tasmin.\n", - " if variable == \"pr\":\n", - " out[exp][variable] = average.average_shape(\n", - " xr.open_zarr(mapper, consolidated=True).sel(\n", - " time=slice(period_start, period_end)\n", - " )[variable],\n", - " basin_contour,\n", - " ).chunk(-1)\n", - " else:\n", - " out[exp][variable] = average.average_shape(\n", - " xr.open_zarr(mapper, consolidated=True)\n", - " .sel(time=slice(period_start, period_end))\n", - " .reset_coords(\"height\", drop=True)[variable],\n", - " basin_contour,\n", - " ).chunk(-1)\n", + " ds = xr.open_zarr(mapper, consolidated=True).sel(\n", + " time=slice(period_start, period_end)\n", + " )\n", + "\n", + " if \"height\" in ds.coords:\n", + " ds = ds.drop_vars(\"height\")\n", + "\n", + " out[exp][variable] = average.average_shape(\n", + " ds,\n", + " basin_contour,\n", + " )[\n", + " variable\n", + " ].chunk(-1)\n", + "\n", "\n", "# We can now extract the variables that we will need later:\n", "historical_tasmax = out[\"historical\"][\"tasmax\"]\n", @@ -633,15 +633,16 @@ " ]\n", ")\n", "# Write to temporary folder\n", - "fn_tmp_fut = tmp / \"future_dataset_tmp.nc\"\n", - "fut_dataset.to_netcdf(fn_tmp_fut)\n", + "with ProgressBar():\n", + " fn_tmp_fut = tmp / \"future_dataset_tmp.nc\"\n", + " fut_dataset.to_netcdf(fn_tmp_fut)\n", "\n", - "# Write the data to disk to a temporary location for future use.\n", - "ref_dataset = xr.open_dataset(fn_tmp_ref)\n", - "ref_dataset.isel(geom=0).squeeze().to_netcdf(tmp / \"reference_dataset.nc\")\n", + " # Write the data to disk to a temporary location for future use.\n", + " ref_dataset = xr.open_dataset(fn_tmp_ref)\n", + " ref_dataset.isel(geom=0).squeeze().to_netcdf(tmp / \"reference_dataset.nc\")\n", "\n", - "fut_dataset = xr.open_dataset(fn_tmp_fut)\n", - "fut_dataset.isel(geom=0).squeeze().to_netcdf(tmp / \"future_dataset.nc\")" + " fut_dataset = xr.open_dataset(fn_tmp_fut)\n", + " fut_dataset.isel(geom=0).squeeze().to_netcdf(tmp / \"future_dataset.nc\")" ] }, { @@ -826,11 +827,10 @@ "source": [ "# Copy the configuration of the previous model that we will modify for our validation:\n", "model_validation = model_config.duplicate(\n", - " params=optimized_parameters,\n", " StartDate=dt.datetime(1986, 1, 1),\n", " EndDate=dt.datetime(1990, 12, 31),\n", " SuppressOutput=False,\n", - ")\n", + ").set_params(optimized_parameters)\n", "\n", "sim_output = Emulator(config=model_validation).run()\n", "\n", @@ -1007,7 +1007,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.16" + "version": "3.11.6" } }, "nbformat": 4,