diff --git a/docs/examples/example_moving_eddies.py b/docs/examples/example_moving_eddies.py index 4868aec6cb..f5d80403d3 100644 --- a/docs/examples/example_moving_eddies.py +++ b/docs/examples/example_moving_eddies.py @@ -1,4 +1,3 @@ -import gc import math from argparse import ArgumentParser from datetime import timedelta @@ -229,12 +228,16 @@ def fieldsetfile(mesh, tmpdir): return filename -@pytest.mark.parametrize("mesh", ["flat", "spherical"]) -def test_moving_eddies_file(mesh, tmpdir): - gc.collect() - fieldset = parcels.FieldSet.from_parcels( - fieldsetfile(mesh, tmpdir), extra_fields={"P": "P"} - ) +def test_moving_eddies_file(tmpdir): + data_folder = parcels.download_example_dataset("MovingEddies_data") + filenames = { + "U": str(data_folder / "moving_eddiesU.nc"), + "V": str(data_folder / "moving_eddiesV.nc"), + "P": str(data_folder / "moving_eddiesP.nc"), + } + variables = {"U": "vozocrtx", "V": "vomecrty", "P": "P"} + dimensions = {"lon": "nav_lon", "lat": "nav_lat", "time": "time_counter"} + fieldset = parcels.FieldSet.from_netcdf(filenames, variables, dimensions) outfile = tmpdir.join("EddyParticle") pset = moving_eddies_example(fieldset, outfile, 2) # Also include last timestep @@ -242,12 +245,8 @@ def test_moving_eddies_file(mesh, tmpdir): pset.particledata.setallvardata( f"{var}", pset.particledata.getvardata(f"{var}_nextloop") ) - if mesh == "flat": - assert pset[0].lon < 2.2e5 and 1.1e5 < pset[0].lat < 1.2e5 - assert pset[1].lon < 2.2e5 and 3.7e5 < pset[1].lat < 3.8e5 - else: - assert pset[0].lon < 2.0 and 46.2 < pset[0].lat < 46.25 - assert pset[1].lon < 2.0 and 48.8 < pset[1].lat < 48.85 + assert pset[0].lon < 2.2e5 and 1.1e5 < pset[0].lat < 1.2e5 + assert pset[1].lon < 2.2e5 and 3.7e5 < pset[1].lat < 3.8e5 @pytest.mark.v4alpha diff --git a/docs/examples/example_peninsula.py b/docs/examples/example_peninsula.py index 37d9d354b1..3124b874a0 100644 --- a/docs/examples/example_peninsula.py +++ b/docs/examples/example_peninsula.py @@ -1,4 +1,3 @@ -import gc import math # NOQA from argparse import ArgumentParser from datetime import timedelta @@ -216,14 +215,18 @@ def fieldsetfile(mesh, tmpdir): return filename -@pytest.mark.parametrize("mesh", ["flat", "spherical"]) -def test_peninsula_file(mesh, tmpdir): +def test_peninsula_file(tmpdir): """Open fieldset files and execute.""" - gc.collect() - fieldset = parcels.FieldSet.from_parcels( - fieldsetfile(mesh, tmpdir), - extra_fields={"P": "P"}, - allow_time_extrapolation=True, + data_folder = parcels.download_example_dataset("Peninsula_data") + filenames = { + "U": str(data_folder / "peninsulaU.nc"), + "V": str(data_folder / "peninsulaV.nc"), + "P": str(data_folder / "peninsulaP.nc"), + } + variables = {"U": "vozocrtx", "V": "vomecrty", "P": "P"} + dimensions = {"lon": "nav_lon", "lat": "nav_lat", "time": "time_counter"} + fieldset = parcels.FieldSet.from_netcdf( + filenames, variables, dimensions, allow_time_extrapolation=True ) outfile = tmpdir.join("Peninsula") pset = peninsula_example(fieldset, outfile, 5, degree=1) @@ -299,9 +302,17 @@ def main(args=None): fieldset.write(filename) # Open fieldset file set - fieldset = parcels.FieldSet.from_parcels( - "peninsula", extra_fields={"P": "P"}, allow_time_extrapolation=True + filenames = { + "U": f"{filename}U.nc", + "V": f"{filename}V.nc", + "P": f"{filename}P.nc", + } + variables = {"U": "vozocrtx", "V": "vomecrty", "P": "P"} + dimensions = {"lon": "nav_lon", "lat": "nav_lat", "time": "time_counter"} + fieldset = parcels.FieldSet.from_netcdf( + filenames, variables, dimensions, allow_time_extrapolation=True ) + outfile = "Peninsula" if args.profiling: diff --git a/docs/examples/parcels_tutorial.ipynb b/docs/examples/parcels_tutorial.ipynb index 7ddc59216e..e50483cdc2 100644 --- a/docs/examples/parcels_tutorial.ipynb +++ b/docs/examples/parcels_tutorial.ipynb @@ -65,7 +65,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "The first step to running particles with Parcels is to define a `FieldSet` object, which is simply a collection of hydrodynamic fields. In this first case, we use a simple flow of two idealised moving eddies. That field can be downloaded using the `download_example_dataset()` function that comes with Parcels. Since we know that the files are in what's called Parcels FieldSet format, we can call these files using the function `FieldSet.from_parcels()`.\n" + "The first step to running particles with Parcels is to define a `FieldSet` object, which is simply a collection of hydrodynamic fields. In this first case, we use a simple flow of two idealised moving eddies. That field can be downloaded using the `download_example_dataset()` function that comes with Parcels. Since we know that the files are in what's called Parcels FieldSet format, we can call these files using the function `FieldSet.from_netcdf()`.\n" ] }, { @@ -76,7 +76,13 @@ "source": [ "example_dataset_folder = parcels.download_example_dataset(\"MovingEddies_data\")\n", "\n", - "fieldset = parcels.FieldSet.from_parcels(f\"{example_dataset_folder}/moving_eddies\")\n", + "filenames = {\n", + " \"U\": str(example_dataset_folder / \"moving_eddiesU.nc\"),\n", + " \"V\": str(example_dataset_folder / \"moving_eddiesV.nc\"),\n", + "}\n", + "variables = {\"U\": \"vozocrtx\", \"V\": \"vomecrty\"}\n", + "dimensions = {\"lon\": \"nav_lon\", \"lat\": \"nav_lat\", \"time\": \"time_counter\"}\n", + "fieldset = parcels.FieldSet.from_netcdf(filenames, variables, dimensions)\n", "\n", "print(fieldset)" ] @@ -666,10 +672,15 @@ "outputs": [], "source": [ "example_dataset_folder = parcels.download_example_dataset(\"Peninsula_data\")\n", - "fieldset = parcels.FieldSet.from_parcels(\n", - " f\"{example_dataset_folder}/peninsula\",\n", - " extra_fields={\"P\": \"P\"},\n", - " allow_time_extrapolation=True,\n", + "filenames = {\n", + " \"U\": str(example_dataset_folder / \"peninsulaU.nc\"),\n", + " \"V\": str(example_dataset_folder / \"peninsulaV.nc\"),\n", + " \"P\": str(example_dataset_folder / \"peninsulaP.nc\"),\n", + "}\n", + "variables = {\"U\": \"vozocrtx\", \"V\": \"vomecrty\", \"P\": \"P\"}\n", + "dimensions = {\"lon\": \"nav_lon\", \"lat\": \"nav_lat\", \"time\": \"time_counter\"}\n", + "fieldset = parcels.FieldSet.from_netcdf(\n", + " filenames, variables, dimensions, allow_time_extrapolation=True\n", ")" ] }, diff --git a/docs/examples/tutorial_delaystart.ipynb b/docs/examples/tutorial_delaystart.ipynb index c6524b8c47..ca460bb1a9 100644 --- a/docs/examples/tutorial_delaystart.ipynb +++ b/docs/examples/tutorial_delaystart.ipynb @@ -50,8 +50,14 @@ "outputs": [], "source": [ "example_dataset_folder = parcels.download_example_dataset(\"Peninsula_data\")\n", - "fieldset = parcels.FieldSet.from_parcels(\n", - " f\"{example_dataset_folder}/peninsula\", allow_time_extrapolation=True\n", + "filenames = {\n", + " \"U\": str(example_dataset_folder / \"peninsulaU.nc\"),\n", + " \"V\": str(example_dataset_folder / \"peninsulaV.nc\"),\n", + "}\n", + "variables = {\"U\": \"vozocrtx\", \"V\": \"vomecrty\"}\n", + "dimensions = {\"lon\": \"nav_lon\", \"lat\": \"nav_lat\", \"time\": \"time_counter\"}\n", + "fieldset = parcels.FieldSet.from_netcdf(\n", + " filenames, variables, dimensions, allow_time_extrapolation=True\n", ")" ] }, diff --git a/docs/examples/tutorial_output.ipynb b/docs/examples/tutorial_output.ipynb index d465432dfa..febef5d321 100644 --- a/docs/examples/tutorial_output.ipynb +++ b/docs/examples/tutorial_output.ipynb @@ -44,8 +44,15 @@ "outputs": [], "source": [ "example_dataset_folder = parcels.download_example_dataset(\"Peninsula_data\")\n", - "fieldset = parcels.FieldSet.from_parcels(\n", - " f\"{example_dataset_folder}/peninsula\", allow_time_extrapolation=True\n", + "example_dataset_folder = parcels.download_example_dataset(\"Peninsula_data\")\n", + "filenames = {\n", + " \"U\": str(example_dataset_folder / \"peninsulaU.nc\"),\n", + " \"V\": str(example_dataset_folder / \"peninsulaV.nc\"),\n", + "}\n", + "variables = {\"U\": \"vozocrtx\", \"V\": \"vomecrty\"}\n", + "dimensions = {\"lon\": \"nav_lon\", \"lat\": \"nav_lat\", \"time\": \"time_counter\"}\n", + "fieldset = parcels.FieldSet.from_netcdf(\n", + " filenames, variables, dimensions, allow_time_extrapolation=True\n", ")\n", "\n", "npart = 10 # number of particles to be released\n", diff --git a/docs/examples/tutorial_periodic_boundaries.ipynb b/docs/examples/tutorial_periodic_boundaries.ipynb index 27a6024b01..bd69685814 100644 --- a/docs/examples/tutorial_periodic_boundaries.ipynb +++ b/docs/examples/tutorial_periodic_boundaries.ipynb @@ -86,8 +86,14 @@ "outputs": [], "source": [ "example_dataset_folder = parcels.download_example_dataset(\"Peninsula_data\")\n", - "fieldset = parcels.FieldSet.from_parcels(\n", - " f\"{example_dataset_folder}/peninsula\", allow_time_extrapolation=True\n", + "filenames = {\n", + " \"U\": str(example_dataset_folder / \"peninsulaU.nc\"),\n", + " \"V\": str(example_dataset_folder / \"peninsulaV.nc\"),\n", + "}\n", + "variables = {\"U\": \"vozocrtx\", \"V\": \"vomecrty\"}\n", + "dimensions = {\"lon\": \"nav_lon\", \"lat\": \"nav_lat\", \"time\": \"time_counter\"}\n", + "fieldset = parcels.FieldSet.from_netcdf(\n", + " filenames, variables, dimensions, allow_time_extrapolation=True\n", ")" ] }, diff --git a/docs/examples/tutorial_sampling.ipynb b/docs/examples/tutorial_sampling.ipynb index 0c3f55a491..50ef615bce 100644 --- a/docs/examples/tutorial_sampling.ipynb +++ b/docs/examples/tutorial_sampling.ipynb @@ -67,10 +67,15 @@ "source": [ "# Velocity and temperature fields\n", "example_dataset_folder = parcels.download_example_dataset(\"Peninsula_data\")\n", - "fieldset = parcels.FieldSet.from_parcels(\n", - " f\"{example_dataset_folder}/peninsula\",\n", - " extra_fields={\"T\": \"T\"},\n", - " allow_time_extrapolation=True,\n", + "filenames = {\n", + " \"U\": str(example_dataset_folder / \"peninsulaU.nc\"),\n", + " \"V\": str(example_dataset_folder / \"peninsulaV.nc\"),\n", + " \"T\": str(example_dataset_folder / \"peninsulaT.nc\"),\n", + "}\n", + "variables = {\"U\": \"vozocrtx\", \"V\": \"vomecrty\", \"T\": \"T\"}\n", + "dimensions = {\"lon\": \"nav_lon\", \"lat\": \"nav_lat\", \"time\": \"time_counter\"}\n", + "fieldset = parcels.FieldSet.from_netcdf(\n", + " filenames, variables, dimensions, allow_time_extrapolation=True\n", ")\n", "\n", "# Particle locations and initial time\n", diff --git a/parcels/fieldset.py b/parcels/fieldset.py index 9c8c073a2a..dee653f71e 100644 --- a/parcels/fieldset.py +++ b/parcels/fieldset.py @@ -2,7 +2,6 @@ import os import sys import warnings -from copy import deepcopy from glob import glob import numpy as np @@ -894,57 +893,6 @@ def from_b_grid_dataset( **kwargs, ) - @classmethod - def from_parcels( - cls, - basename, - uvar="vozocrtx", - vvar="vomecrty", - extra_fields=None, - allow_time_extrapolation: bool | None = None, - **kwargs, - ): - """Initialises FieldSet data from NetCDF files using the Parcels FieldSet.write() conventions. - - Parameters - ---------- - basename : str - Base name of the file(s); may contain - wildcards to indicate multiple files. - fieldtype : - Optional dictionary mapping fields to fieldtypes to be used for UnitConverter. - (either 'U', 'V', 'Kh_zonal', 'Kh_meridional' or None) - extra_fields : - Extra fields to read beyond U and V (Default value = None) - allow_time_extrapolation : bool - boolean whether to allow for extrapolation - (i.e. beyond the last available time snapshot) - Default is False if dimensions includes time, else True - uvar : - (Default value = 'vozocrtx') - vvar : - (Default value = 'vomecrty') - **kwargs : - Keyword arguments passed to the :func:`Fieldset.from_netcdf` constructor. - """ - if extra_fields is None: - extra_fields = {} - - dimensions = {} - default_dims = {"lon": "nav_lon", "lat": "nav_lat", "depth": "depth", "time": "time_counter"} - extra_fields.update({"U": uvar, "V": vvar}) - for vars in extra_fields: - dimensions[vars] = deepcopy(default_dims) - dimensions[vars]["depth"] = f"depth{vars.lower()}" - filenames = {v: str(f"{basename}{v}.nc") for v in extra_fields.keys()} - return cls.from_netcdf( - filenames, - variables=extra_fields, - dimensions=dimensions, - allow_time_extrapolation=allow_time_extrapolation, - **kwargs, - ) - @classmethod def from_xarray_dataset(cls, ds, variables, dimensions, mesh="spherical", allow_time_extrapolation=None, **kwargs): """Initialises FieldSet data from xarray Datasets. diff --git a/tests/test_fieldset.py b/tests/test_fieldset.py index 32014d02f0..f717451350 100644 --- a/tests/test_fieldset.py +++ b/tests/test_fieldset.py @@ -143,21 +143,6 @@ def test_fieldset_from_data_different_dimensions(xdim, ydim): assert np.allclose(fieldset.P.data, 2.0, rtol=1e-12) -@pytest.mark.parametrize("xdim", [100, 200]) -@pytest.mark.parametrize("ydim", [100, 200]) -def test_fieldset_from_parcels(xdim, ydim, tmpdir): - """Simple test for fieldset initialisation from Parcels FieldSet file format.""" - filepath = tmpdir.join("test_parcels") - data, dimensions = generate_fieldset_data(xdim, ydim) - fieldset_out = FieldSet.from_data(data, dimensions) - fieldset_out.write(filepath) - fieldset = FieldSet.from_parcels(filepath) - assert len(fieldset.U.data.shape) == 3 # Will be 4 once we use depth - assert len(fieldset.V.data.shape) == 3 - assert np.allclose(fieldset.U.data[0, :], data["U"], rtol=1e-12) - assert np.allclose(fieldset.V.data[0, :], data["V"], rtol=1e-12) - - def test_fieldset_from_modulefile(): nemo_fname = str(TEST_DATA / "fieldset_nemo.py") nemo_error_fname = str(TEST_DATA / "fieldset_nemo_error.py") diff --git a/tests/test_particlefile.py b/tests/test_particlefile.py index 994166e0b1..d8d7c18250 100755 --- a/tests/test_particlefile.py +++ b/tests/test_particlefile.py @@ -402,7 +402,13 @@ def test_pset_execute_outputdt_backwards_fieldset_timevarying(): # TODO: Not ideal using the `download_example_dataset` here, but I'm struggling to recreate this error using the test suite fieldsets we have example_dataset_folder = parcels.download_example_dataset("MovingEddies_data") - fieldset = parcels.FieldSet.from_parcels(f"{example_dataset_folder}/moving_eddies") + filenames = { + "U": str(example_dataset_folder / "moving_eddiesU.nc"), + "V": str(example_dataset_folder / "moving_eddiesV.nc"), + } + variables = {"U": "vozocrtx", "V": "vomecrty"} + dimensions = {"lon": "nav_lon", "lat": "nav_lat", "time": "time_counter"} + fieldset = parcels.FieldSet.from_netcdf(filenames, variables, dimensions) ds = setup_pset_execute(outputdt=outputdt, execute_kwargs=dict(runtime=runtime, dt=dt), fieldset=fieldset) file_outputdt = ds.isel(trajectory=0).time.diff(dim="obs").values