diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 48042e8da1..fe3556bd99 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -76,10 +76,13 @@ jobs: with: environment-file: environment.yml - name: Integration test - # TODO v4: Re-enable `tutorial_periodic_boundaries`, `tutorial_timevaryingdepthdimensions` and `tutorial_croco_3D` notebooks - # TODO v4: Re-enable `tutorial_nemo_3D` notebook once 3D grids are implemented (https://github.com/OceanParcels/Parcels/pull/1936#issuecomment-2717666705) + # TODO v4: Re-enable `tutorial_periodic_boundaries` + # TODO v4: Re-enable `tutorial_timevaryingdepthdimensions` + # TODO v4: Re-enable `tutorial_particle_field_interaction` + # TODO v4: Re-enable `tutorial_croco_3D` + # TODO v4: Re-enable `tutorial_nemo_3D` (https://github.com/OceanParcels/Parcels/pull/1936#issuecomment-2717666705) run: | - coverage run -m pytest -v -s --nbval-lax -k "not documentation and not tutorial_periodic_boundaries and not tutorial_timevaryingdepthdimensions and not tutorial_croco_3D and not tutorial_nemo_3D" --html="${{ matrix.os }}_${{ matrix.python-version }}_integration_test_report.html" --self-contained-html docs/examples + coverage run -m pytest -v -s --nbval-lax -k "not documentation and not tutorial_periodic_boundaries and not tutorial_timevaryingdepthdimensions and not tutorial_particle_field_interaction and not tutorial_croco_3D and not tutorial_nemo_3D" --html="${{ matrix.os }}_${{ matrix.python-version }}_integration_test_report.html" --self-contained-html docs/examples coverage xml - name: Codecov uses: codecov/codecov-action@v5.3.1 diff --git a/docs/examples/example_decaying_moving_eddy.py b/docs/examples/example_decaying_moving_eddy.py index 206eb80832..75de6ee78c 100644 --- a/docs/examples/example_decaying_moving_eddy.py +++ b/docs/examples/example_decaying_moving_eddy.py @@ -99,10 +99,8 @@ def test_rotation_example(tmpdir): def main(): - fset_filename = "decaying_moving_eddy" outfile = "DecayingMovingParticle.zarr" fieldset = decaying_moving_eddy_fieldset() - fieldset.write(fset_filename) decaying_moving_example(fieldset, outfile) diff --git a/docs/examples/example_peninsula.py b/docs/examples/example_peninsula.py index 3124b874a0..2f1aca1e3d 100644 --- a/docs/examples/example_peninsula.py +++ b/docs/examples/example_peninsula.py @@ -207,14 +207,6 @@ def test_peninsula_fieldset_AnalyticalAdvection(mesh, tmpdir): assert (err_adv <= 3.0e2).all() -def fieldsetfile(mesh, tmpdir): - """Generate fieldset files for peninsula test.""" - filename = tmpdir.join("peninsula") - fieldset = peninsula_fieldset(100, 50, mesh=mesh) - fieldset.write(filename) - return filename - - def test_peninsula_file(tmpdir): """Open fieldset files and execute.""" data_folder = parcels.download_example_dataset("Peninsula_data") @@ -294,24 +286,10 @@ def main(args=None): ) args = p.parse_args(args) - filename = "peninsula" if args.fieldset is not None: fieldset = peninsula_fieldset(args.fieldset[0], args.fieldset[1], mesh="flat") else: fieldset = peninsula_fieldset(100, 50, mesh="flat") - fieldset.write(filename) - - # Open fieldset file set - filenames = { - "U": f"{filename}U.nc", - "V": f"{filename}V.nc", - "P": f"{filename}P.nc", - } - variables = {"U": "vozocrtx", "V": "vomecrty", "P": "P"} - dimensions = {"lon": "nav_lon", "lat": "nav_lat", "time": "time_counter"} - fieldset = parcels.FieldSet.from_netcdf( - filenames, variables, dimensions, allow_time_extrapolation=True - ) outfile = "Peninsula" diff --git a/docs/examples/example_radial_rotation.py b/docs/examples/example_radial_rotation.py index dfc45669b7..3d65694f1d 100644 --- a/docs/examples/example_radial_rotation.py +++ b/docs/examples/example_radial_rotation.py @@ -86,10 +86,7 @@ def test_rotation_example(tmpdir): def main(): - filename = "radial_rotation" fieldset = radial_rotation_fieldset() - fieldset.write(filename) - outfile = "RadialParticle" rotation_example(fieldset, outfile) diff --git a/docs/examples/example_stommel.py b/docs/examples/example_stommel.py index 2f714bd789..26379e6043 100755 --- a/docs/examples/example_stommel.py +++ b/docs/examples/example_stommel.py @@ -101,16 +101,12 @@ def stommel_example( outfile="StommelParticle.zarr", repeatdt=None, maxage=None, - write_fields=True, custom_partition_function=False, ): parcels.timer.fieldset = parcels.timer.Timer( "FieldSet", parent=parcels.timer.stommel ) fieldset = stommel_fieldset(grid_type=grid_type) - if write_fields: - filename = "stommel" - fieldset.write(filename) parcels.timer.fieldset.stop() parcels.timer.pset = parcels.timer.Timer("Pset", parent=parcels.timer.stommel) @@ -184,14 +180,12 @@ def test_stommel_fieldset(grid_type, tmpdir): method=method["RK4"], grid_type=grid_type, outfile=outfile, - write_fields=False, ) psetRK45 = stommel_example( 1, method=method["RK45"], grid_type=grid_type, outfile=outfile, - write_fields=False, ) assert np.allclose(psetRK4.lon, psetRK45.lon, rtol=1e-3) assert np.allclose(psetRK4.lat, psetRK45.lat, rtol=1.1e-3) @@ -251,12 +245,6 @@ def main(args=None): type=int, help="max age of the particles (after which particles are deleted)", ) - p.add_argument( - "-wf", - "--write_fields", - default=True, - help="Write the hydrodynamic fields to NetCDF", - ) p.add_argument( "-cpf", "--custom_partition_function", @@ -274,7 +262,6 @@ def main(args=None): outfile=args.outfile, repeatdt=args.repeatdt, maxage=args.maxage, - write_fields=args.write_fields, custom_partition_function=args.custom_partition_function, ) parcels.timer.stommel.stop() diff --git a/parcels/field.py b/parcels/field.py index 3c88a76b32..761cee0f82 100644 --- a/parcels/field.py +++ b/parcels/field.py @@ -1,7 +1,6 @@ import collections import math import warnings -from pathlib import Path from typing import TYPE_CHECKING, cast import dask.array as da @@ -134,9 +133,6 @@ class Field: allow_time_extrapolation : bool boolean whether to allow for extrapolation in time (i.e. beyond the last available time snapshot) - to_write : bool - Write the Field in NetCDF format at the same frequency as the ParticleFile outputdt, - using a filenaming scheme based on the ParticleFile name """ @@ -157,7 +153,6 @@ def __init__( interp_method: InterpMethod = "linear", allow_time_extrapolation: bool | None = None, gridindexingtype: GridIndexingType = "nemo", - to_write: bool = False, data_full_zdim=None, ): if not isinstance(name, tuple): @@ -176,7 +171,6 @@ def __init__( self._grid = Grid.create_grid(lon, lat, depth, time, time_origin=time_origin, mesh=mesh) self.igrid = -1 self.fieldtype = self.name if fieldtype is None else fieldtype - self.to_write = to_write if self.grid.mesh == "flat" or (self.fieldtype not in unitconverters_map.keys()): self.units = UnitConverter() elif self.grid.mesh == "spherical": @@ -631,53 +625,6 @@ def eval(self, time, z, y, x, particle=None, applyConversion=True): else: return value - def write(self, filename, varname=None): - """Write a :class:`Field` to a netcdf file. - - Parameters - ---------- - filename : str - Basename of the file (i.e. '{filename}{Field.name}.nc') - varname : str - Name of the field, to be appended to the filename. (Default value = None) - """ - filepath = str(Path(f"{filename}{self.name}.nc")) - if varname is None: - varname = self.name - # Derive name of 'depth' variable for NEMO convention - vname_depth = f"depth{self.name.lower()}" - - # Create DataArray objects for file I/O - if self.grid._gtype == GridType.RectilinearZGrid: - nav_lon = xr.DataArray( - self.grid.lon + np.zeros((self.grid.ydim, self.grid.xdim), dtype=np.float32), - coords=[("y", self.grid.lat), ("x", self.grid.lon)], - ) - nav_lat = xr.DataArray( - self.grid.lat.reshape(self.grid.ydim, 1) + np.zeros(self.grid.xdim, dtype=np.float32), - coords=[("y", self.grid.lat), ("x", self.grid.lon)], - ) - elif self.grid._gtype == GridType.CurvilinearZGrid: - nav_lon = xr.DataArray(self.grid.lon, coords=[("y", range(self.grid.ydim)), ("x", range(self.grid.xdim))]) - nav_lat = xr.DataArray(self.grid.lat, coords=[("y", range(self.grid.ydim)), ("x", range(self.grid.xdim))]) - else: - raise NotImplementedError("Field.write only implemented for RectilinearZGrid and CurvilinearZGrid") - - attrs = {"units": "seconds since " + str(self.grid.time_origin)} if self.grid.time_origin.calendar else {} - time_counter = xr.DataArray(self.grid.time, dims=["time_counter"], attrs=attrs) - vardata = xr.DataArray( - self.data.reshape((self.grid.tdim, self.grid.zdim, self.grid.ydim, self.grid.xdim)), - dims=["time_counter", vname_depth, "y", "x"], - ) - # Create xarray Dataset and output to netCDF format - attrs = {"parcels_mesh": self.grid.mesh} - dset = xr.Dataset( - {varname: vardata}, - coords={"nav_lon": nav_lon, "nav_lat": nav_lat, "time_counter": time_counter, vname_depth: self.grid.depth}, - attrs=attrs, - ) - dset.to_netcdf(filepath, unlimited_dims="time_counter") - def _rescale_and_set_minmax(self, data): data[np.isnan(data)] = 0 return data diff --git a/parcels/fieldset.py b/parcels/fieldset.py index dee653f71e..cc8707c20a 100644 --- a/parcels/fieldset.py +++ b/parcels/fieldset.py @@ -6,7 +6,6 @@ import numpy as np -from parcels._compat import MPI from parcels._typing import GridIndexingType, InterpMethodOption, Mesh from parcels.field import Field, VectorField from parcels.grid import Grid @@ -14,7 +13,6 @@ from parcels.particlefile import ParticleFile from parcels.tools._helpers import fieldset_repr from parcels.tools.converters import TimeConverter -from parcels.tools.loggers import logger from parcels.tools.warnings import FieldSetWarning __all__ = ["FieldSet"] @@ -1003,26 +1001,6 @@ def add_constant(self, name, value): """ setattr(self, name, value) - def write(self, filename): - """Write FieldSet to NetCDF file using NEMO convention. - - Parameters - ---------- - filename : str - Basename of the output fileset. - """ - if MPI is None or MPI.COMM_WORLD.Get_rank() == 0: - logger.info(f"Generating FieldSet output with basename: {filename}") - - if hasattr(self, "U"): - self.U.write(filename, varname="vozocrtx") - if hasattr(self, "V"): - self.V.write(filename, varname="vomecrty") - - for v in self.get_fields(): - if isinstance(v, Field) and (v.name != "U") and (v.name != "V"): - v.write(filename) - def computeTimeChunk(self, time=0.0, dt=1): """Load a chunk of three data time steps into the FieldSet. This is used when FieldSet uses data imported from netcdf, diff --git a/parcels/tools/_helpers.py b/parcels/tools/_helpers.py index d3dd7fb79d..7f26273bb6 100644 --- a/parcels/tools/_helpers.py +++ b/parcels/tools/_helpers.py @@ -75,7 +75,6 @@ def field_repr(field: Field) -> str: grid : {field.grid!r} extrapolate time: {field.allow_time_extrapolation!r} gridindexingtype: {field.gridindexingtype!r} - to_write : {field.to_write!r} """ return textwrap.dedent(out).strip() diff --git a/pyproject.toml b/pyproject.toml index ad771f999e..faacac3c21 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,7 +46,7 @@ platforms = ["win-64", "linux-64", "osx-64", "osx-arm64"] [tool.pixi.tasks] tests = "pytest" -tests-notebooks = "pytest -v -s --nbval-lax -k 'not documentation and not tutorial_periodic_boundaries and not tutorial_timevaryingdepthdimensions and not tutorial_croco_3D and not tutorial_nemo_3D'" # TODO v4: Re-enable `tutorial_periodic_boundaries`, `tutorial_timevaryingdepthdimensions`, `tutorial_croco_3D`, `tutorial_nemo_3D` notebooks +tests-notebooks = "pytest -v -s --nbval-lax -k 'not documentation and not tutorial_periodic_boundaries and not tutorial_timevaryingdepthdimensions and not tutorial_particle_field_interaction and not tutorial_croco_3D and not tutorial_nemo_3D'" # TODO v4: Mirror ci.yml for notebooks being run coverage = "coverage run -m pytest && coverage html" typing = "mypy parcels" pre-commit = "pre-commit run --all-files" diff --git a/tests/test_advection.py b/tests/test_advection.py index 07ae63ad83..ca25f25c9b 100644 --- a/tests/test_advection.py +++ b/tests/test_advection.py @@ -459,7 +459,6 @@ def test_stationary_eddy_vertical(): pset.execute(AdvectionRK4_3D, dt=dt, endtime=endtime) exp_lon = [truth_stationary(x, z, pset[0].time)[0] for x, z in zip(lon, depth, strict=True)] exp_depth = [truth_stationary(x, z, pset[0].time)[1] for x, z in zip(lon, depth, strict=True)] - print(pset, exp_lon) assert np.allclose(pset.lon, exp_lon, rtol=1e-5) assert np.allclose(pset.lat, lat, rtol=1e-5) assert np.allclose(pset.depth, exp_depth, rtol=1e-5) diff --git a/tests/test_fieldset.py b/tests/test_fieldset.py index 78b7317a50..c461d2770f 100644 --- a/tests/test_fieldset.py +++ b/tests/test_fieldset.py @@ -229,13 +229,11 @@ def test_illegal_dimensionsdict(calltype): @pytest.mark.parametrize("xdim", [100, 200]) @pytest.mark.parametrize("ydim", [100, 200]) def test_add_field(xdim, ydim, tmpdir): - filepath = tmpdir.join("test_add") data, dimensions = generate_fieldset_data(xdim, ydim) fieldset = FieldSet.from_data(data, dimensions) field = Field("newfld", fieldset.U.data, lon=fieldset.U.lon, lat=fieldset.U.lat) fieldset.add_field(field) assert fieldset.newfld.data.shape == fieldset.U.data.shape - fieldset.write(filepath) @pytest.mark.parametrize("dupobject", ["same", "new"]) @@ -348,26 +346,6 @@ def test_fieldset_samegrids_from_data(): assert fieldset1.U.grid == fieldset1.B.grid -def test_fieldset_write_curvilinear(tmpdir): - fname = str(TEST_DATA / "mask_nemo_cross_180lon.nc") - filenames = {"dx": fname, "mesh_mask": fname} - variables = {"dx": "e1u"} - dimensions = {"lon": "glamu", "lat": "gphiu"} - fieldset = FieldSet.from_nemo(filenames, variables, dimensions) - - newfile = tmpdir.join("curv_field") - fieldset.write(newfile) - - fieldset2 = FieldSet.from_netcdf( - filenames=newfile + "dx.nc", - variables={"dx": "dx"}, - dimensions={"time": "time_counter", "depth": "depthdx", "lon": "nav_lon", "lat": "nav_lat"}, - ) - - for var in ["lon", "lat", "data"]: - assert np.allclose(getattr(fieldset2.dx, var), getattr(fieldset.dx, var)) - - def addConst(particle, fieldset, time): # pragma: no cover particle.lon = particle.lon + fieldset.movewest + fieldset.moveeast @@ -437,37 +415,6 @@ def SampleUV2(particle, fieldset, time): # pragma: no cover assert abs(pset.lat[0] - 0.5) < 1e-9 -def test_fieldset_write(tmp_zarrfile): - xdim, ydim = 3, 4 - lon = np.linspace(0.0, 10.0, xdim, dtype=np.float32) - lat = np.linspace(0.0, 10.0, ydim, dtype=np.float32) - U = np.ones((ydim, xdim), dtype=np.float32) - V = np.zeros((ydim, xdim), dtype=np.float32) - data = {"U": U, "V": V} - dimensions = {"U": {"lat": lat, "lon": lon}, "V": {"lat": lat, "lon": lon}} - fieldset = FieldSet.from_data(data, dimensions, mesh="flat") - - fieldset.U.to_write = True - - def UpdateU(particle, fieldset, time): # pragma: no cover - from parcels._index_search import _search_time_index - - tmp1, tmp2 = fieldset.UV[particle] - _, yi, xi = fieldset.U.unravel_index(particle.ei) - _, ti = _search_time_index(fieldset.U.grid, time) - fieldset.U.data[ti, yi, xi] += 1 - fieldset.U.grid.time[0] = time - - pset = ParticleSet(fieldset, pclass=Particle, lon=5, lat=5) - ofile = pset.ParticleFile(name=tmp_zarrfile, outputdt=2.0) - pset.execute(UpdateU, dt=1, runtime=10, output_file=ofile) - - assert fieldset.U.data[0, 1, 0] == 11 - - da = xr.open_dataset(str(tmp_zarrfile).replace(".zarr", "_0005U.nc")) - assert np.allclose(fieldset.U.data, da["U"].values, atol=1.0) - - @pytest.mark.v4remove @pytest.mark.xfail(reason="GH1918") @pytest.mark.parametrize("datetype", ["float", "datetime64"]) diff --git a/tests/test_mpirun.py b/tests/test_mpirun.py index cb242fb4a9..cb29261f69 100644 --- a/tests/test_mpirun.py +++ b/tests/test_mpirun.py @@ -19,10 +19,10 @@ def test_mpi_run(tmpdir, repeatdt, maxage, nump): outputNoMPI = tmpdir.join("StommelNoMPI.zarr") os.system( - f"mpirun -np 2 python {stommel_file} -p {nump} -o {outputMPI_partition_function} -r {repeatdt} -a {maxage} -wf False -cpf True" + f"mpirun -np 2 python {stommel_file} -p {nump} -o {outputMPI_partition_function} -r {repeatdt} -a {maxage} -cpf True" ) - os.system(f"mpirun -np 2 python {stommel_file} -p {nump} -o {outputMPI} -r {repeatdt} -a {maxage} -wf False") - os.system(f"python {stommel_file} -p {nump} -o {outputNoMPI} -r {repeatdt} -a {maxage} -wf False") + os.system(f"mpirun -np 2 python {stommel_file} -p {nump} -o {outputMPI} -r {repeatdt} -a {maxage}") + os.system(f"python {stommel_file} -p {nump} -o {outputNoMPI} -r {repeatdt} -a {maxage}") ds2 = xr.open_zarr(outputNoMPI)