From 3521de7945519fc0218390be8db15d0d0b2427a5 Mon Sep 17 00:00:00 2001 From: reint fischer Date: Tue, 14 Oct 2025 11:22:03 +0200 Subject: [PATCH 01/17] add output start time and end time test --- tests/test_particlefile.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/tests/test_particlefile.py b/tests/test_particlefile.py index 0fc2320bec..a3b3d2f187 100755 --- a/tests/test_particlefile.py +++ b/tests/test_particlefile.py @@ -363,6 +363,20 @@ def test_pset_execute_outputdt_forwards(fieldset): assert np.all(ds.isel(trajectory=0).time.diff(dim="obs").values == np.timedelta64(outputdt)) +def test_pset_execute_output_time_forwards(fieldset): + """Testing output times start at initial time and end at initial time + runtime.""" + outputdt = np.timedelta64(1, "h") + runtime = np.timedelta64(5, "h") + dt = np.timedelta64(5, "m") + + ds = setup_pset_execute(fieldset=fieldset, outputdt=outputdt, execute_kwargs=dict(runtime=runtime, dt=dt)) + + assert ( + ds.time[0, 0].values == fieldset.time_interval.left + and ds.time[0, -1].values == fieldset.time_interval.left + runtime + ) + + @pytest.mark.skip(reason="backwards in time not yet working") def test_pset_execute_outputdt_backwards(fieldset): """Testing output data dt matches outputdt in backwards time.""" From 659b162bffd974736a1d2db06e26964384e36d85 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Wed, 15 Oct 2025 09:52:50 +0200 Subject: [PATCH 02/17] Update tutorial_kernelloop.ipynb --- docs/examples/tutorial_kernelloop.ipynb | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/examples/tutorial_kernelloop.ipynb b/docs/examples/tutorial_kernelloop.ipynb index ffdc47855a..c9c2e6aec0 100644 --- a/docs/examples/tutorial_kernelloop.ipynb +++ b/docs/examples/tutorial_kernelloop.ipynb @@ -26,7 +26,7 @@ "\n", "When you run a Parcels simulation (i.e. a call to `pset.execute()`), the Kernel loop is the main part of the code that is executed. This part of the code loops through all particles and executes the Kernels that are defined for each particle.\n", "\n", - "In order to make sure that the displacements of a particle in the different Kernels can be summed, all Kernels add to a _change_ in position (`particles.dlon`, `particles.dlat`, and `particles.ddepth`). This is important, because there are situations where movement kernels would otherwise not commute. Take the example of advecting particles by currents _and_ winds. If the particle would first be moved by the currents and then by the winds, the result could be different from first moving by the winds and then by the currents. Instead, by adding the changes in position, the ordering of the Kernels has no consequence on the particle displacement." + "In order to make sure that the displacements of a particle in the different Kernels can be summed, all Kernels add to a _change_ in position (`particles.dlon`, `particles.dlat`, and `particles.dz`). This is important, because there are situations where movement kernels would otherwise not commute. Take the example of advecting particles by currents _and_ winds. If the particle would first be moved by the currents and then by the winds, the result could be different from first moving by the winds and then by the currents. Instead, by adding the changes in position, the ordering of the Kernels has no consequence on the particle displacement." ] }, { @@ -40,9 +40,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Below is a structured overview of the Kernel loop is implemented. Note that this is for longitude only, but the same process is applied for latitude and depth.\n", + "Below is a structured overview of the Kernel loop is implemented. Note that this is for longitude only, but the same process is applied for latitude and z.\n", "\n", - "1. Initialise an extra Variable `particles.lon=0` and `particles.time_nextloop = particles.time`\n", + "1. Initialise an extra Variable `particles.dlon=0` and `particles.time_nextloop = particles.time`\n", "\n", "2. Within the Kernel loop, for each particle:
\n", "\n", @@ -62,7 +62,7 @@ "\n", " 6. If `outputdt` is a multiple of `particle.time`, write `particle.lon` and `particle.time` to zarr output file
\n", "\n", - "Besides having commutable Kernels, the main advantage of this implementation is that, when using Field Sampling with e.g. `particle.temp = fieldset.Temp[particle.time, particle.depth, particle.lat, particle.lon]`, the particle location stays the same throughout the entire Kernel loop. Additionally, this implementation ensures that the particle location is the same as the location of the sampled field in the output file." + "Besides having commutable Kernels, the main advantage of this implementation is that, when using Field Sampling with e.g. `particle.temp = fieldset.Temp[particle.time, particle.z, particle.lat, particle.lon]`, the particle location stays the same throughout the entire Kernel loop. Additionally, this implementation ensures that the particle location is the same as the location of the sampled field in the output file." ] }, { @@ -155,10 +155,10 @@ "source": [ "def wind_kernel(particle, fieldset, time):\n", " particle_dlon += (\n", - " fieldset.UWind[time, particle.depth, particle.lat, particle.lon] * particle.dt\n", + " fieldset.UWind[time, particle.z, particle.lat, particle.lon] * particle.dt\n", " )\n", " particle_dlat += (\n", - " fieldset.VWind[time, particle.depth, particle.lat, particle.lon] * particle.dt\n", + " fieldset.VWind[time, particle.z, particle.lat, particle.lon] * particle.dt\n", " )" ] }, @@ -265,7 +265,7 @@ "### 3. The last time is not written to file\n", "Because the location at the start of the loop is written at the end of the Kernel loop, the last `particle.time` of the particle is not written to file. This is similar behaviour to e.g. `np.arange(start, stop)`, which also doesn't include the `stop` value itself. \n", "\n", - "If you do want to write the last time to file, you can increase the `runtime` or `endtime` by `dt` (although this may cause a TimeExtrapolationError if your run was to the end of the available hydrodynamic data), or you can call `pfile.write_latest_locations(pset, time=pset[0].time_nextloop)`. Note that in the latter case, the particle locations (longitude, latitude and depth) will be updated, but other variables will _not_ be updated as the Kernels are not run again." + "If you do want to write the last time to file, you can increase the `runtime` or `endtime` by `dt` (although this may cause a TimeExtrapolationError if your run was to the end of the available hydrodynamic data), or you can call `pfile.write_latest_locations(pset, time=pset[0].time_nextloop)`. Note that in the latter case, the particle locations (longitude, latitude and z) will be updated, but other variables will _not_ be updated as the Kernels are not run again." ] }, { @@ -355,7 +355,7 @@ "source": [ "def KeepInOcean(particle, fieldset, time):\n", " if particle.state == StatusCode.ErrorThroughSurface:\n", - " particle_ddepth = 0.0\n", + " particle_dz = 0.0\n", " particle.state = StatusCode.Success" ] }, From 0f5bdfffa2ea668225d9e334ae018fc26cae11ec Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Wed, 15 Oct 2025 17:49:14 +0200 Subject: [PATCH 03/17] First attempt at cleaning up the Kernel Loop By removing the time_nextloop from the KernelLoop, almost all tests work much smoother. A bit of work to be done on some edge cases --- src/parcels/_core/kernel.py | 7 +++---- src/parcels/_core/particlefile.py | 15 ++++++--------- src/parcels/_core/particleset.py | 8 ++++++-- tests/test_advection.py | 14 +++++++------- tests/test_diffusion.py | 1 - tests/test_particlefile.py | 2 +- tests/test_particleset.py | 2 +- tests/test_particleset_execute.py | 29 +++++++++++++++-------------- 8 files changed, 39 insertions(+), 39 deletions(-) diff --git a/src/parcels/_core/kernel.py b/src/parcels/_core/kernel.py index 787e14df91..8e04a73d65 100644 --- a/src/parcels/_core/kernel.py +++ b/src/parcels/_core/kernel.py @@ -117,15 +117,14 @@ def Setcoords(particles, fieldset): # pragma: no cover particles.lon += particles.dlon particles.lat += particles.dlat particles.z += particles.dz + particles.time += particles.dt particles.dlon = 0 particles.dlat = 0 particles.dz = 0 - particles.time = particles.time_nextloop - def UpdateTime(particles, fieldset): # pragma: no cover - particles.time_nextloop = particles.time + particles.dt + particles.time_nextloop = particles.time + particles.dt # TODO remove self._pyfuncs = (Setcoords + self + UpdateTime)._pyfuncs @@ -239,7 +238,7 @@ def execute(self, pset, endtime, dt): self._positionupdate_kernels_added = True while (len(pset) > 0) and np.any(np.isin(pset.state, [StatusCode.Evaluate, StatusCode.Repeat])): - time_to_endtime = compute_time_direction * (endtime - pset.time_nextloop) + time_to_endtime = compute_time_direction * (endtime - pset.time) if all(time_to_endtime <= 0): return StatusCode.Success diff --git a/src/parcels/_core/particlefile.py b/src/parcels/_core/particlefile.py index 3e290ac2f9..04e7b38fe3 100644 --- a/src/parcels/_core/particlefile.py +++ b/src/parcels/_core/particlefile.py @@ -301,21 +301,21 @@ def _to_write_particles(particle_data, time): ( np.less_equal( time - np.abs(particle_data["dt"] / 2), - particle_data["time_nextloop"], - where=np.isfinite(particle_data["time_nextloop"]), + particle_data["time"], + where=np.isfinite(particle_data["time"]), ) & np.greater_equal( time + np.abs(particle_data["dt"] / 2), - particle_data["time_nextloop"], - where=np.isfinite(particle_data["time_nextloop"]), + particle_data["time"], + where=np.isfinite(particle_data["time"]), ) # check time - dt/2 <= particle_data["time"] <= time + dt/2 | ( (np.isnan(particle_data["dt"])) - & np.equal(time, particle_data["time_nextloop"], where=np.isfinite(particle_data["time_nextloop"])) + & np.equal(time, particle_data["time"], where=np.isfinite(particle_data["time"])) ) # or dt is NaN and time matches particle_data["time"] ) & (np.isfinite(particle_data["trajectory"])) - & (np.isfinite(particle_data["time_nextloop"])) + & (np.isfinite(particle_data["time"])) )[0] @@ -324,9 +324,6 @@ def _convert_particle_data_time_to_float_seconds(particle_data, time_interval): particle_data = particle_data.copy() particle_data["time"] = ((particle_data["time"] - time_interval.left) / np.timedelta64(1, "s")).astype(np.float64) - particle_data["time_nextloop"] = ( - (particle_data["time_nextloop"] - time_interval.left) / np.timedelta64(1, "s") - ).astype(np.float64) particle_data["dt"] = (particle_data["dt"] / np.timedelta64(1, "s")).astype(np.float64) return particle_data diff --git a/src/parcels/_core/particleset.py b/src/parcels/_core/particleset.py index ace99e9738..fe0250a33a 100644 --- a/src/parcels/_core/particleset.py +++ b/src/parcels/_core/particleset.py @@ -518,7 +518,7 @@ def execute( ) from e start_time, end_time = _get_simulation_start_and_end_times( - self.fieldset.time_interval, self._data["time_nextloop"], runtime, endtime, sign_dt + self.fieldset.time_interval, self._data["time"], runtime, endtime, sign_dt ) # Set the time of the particles if it hadn't been set on initialisation @@ -536,7 +536,11 @@ def execute( pbar = tqdm(total=(end_time - start_time) / np.timedelta64(1, "s"), file=sys.stdout) pbar.set_description("Integration time: " + str(start_time)) - next_output = start_time + sign_dt * outputdt if output_file else None + next_output = start_time if output_file else None + + # TODO clean up two lines below: -dt is needed because in SetCoords dt gets added again + start_time -= dt + self._data["time"][:] -= dt time = start_time while sign_dt * (time - end_time) < 0: diff --git a/tests/test_advection.py b/tests/test_advection.py index 486bfe4582..00055f10b4 100644 --- a/tests/test_advection.py +++ b/tests/test_advection.py @@ -102,8 +102,8 @@ def test_advection_zonal_periodic(): startlon = np.array([0.5, 0.4]) pset = ParticleSet(fieldset, pclass=PeriodicParticle, lon=startlon, lat=[0.5, 0.5]) pset.execute([AdvectionEE, periodicBC], runtime=np.timedelta64(40, "s"), dt=np.timedelta64(1, "s")) - np.testing.assert_allclose(pset.total_dlon, 4, atol=1e-5) - np.testing.assert_allclose(pset.lon + pset.dlon, startlon, atol=1e-5) + np.testing.assert_allclose(pset.total_dlon, 4.1, atol=1e-5) + np.testing.assert_allclose(pset.lon, startlon, atol=1e-5) np.testing.assert_allclose(pset.lat, 0.5, atol=1e-5) @@ -165,7 +165,7 @@ def SubmergeParticle(particles, fieldset): # pragma: no cover kernels.append(DeleteParticle) pset = ParticleSet(fieldset=fieldset, lon=0.5, lat=0.5, z=0.9) - pset.execute(kernels, runtime=np.timedelta64(11, "s"), dt=np.timedelta64(1, "s")) + pset.execute(kernels, runtime=np.timedelta64(10, "s"), dt=np.timedelta64(1, "s")) if direction == "up" and wErrorThroughSurface: np.testing.assert_allclose(pset.lon[0], 0.6, atol=1e-5) @@ -222,7 +222,7 @@ def test_length1dimensions(u, v, w): # TODO: Refactor this test to be more read x0, y0, z0 = 2, 8, -4 pset = ParticleSet(fieldset, lon=x0, lat=y0, z=z0) kernel = AdvectionRK4 if w is None else AdvectionRK4_3D - pset.execute(kernel, runtime=np.timedelta64(5, "s"), dt=np.timedelta64(1, "s")) + pset.execute(kernel, runtime=np.timedelta64(4, "s"), dt=np.timedelta64(1, "s")) assert len(pset.lon) == len([p.lon for p in pset]) np.testing.assert_allclose(np.array([p.lon - x0 for p in pset]), 4 * u, atol=1e-6) @@ -332,7 +332,7 @@ def test_decaying_moving_eddy(method, rtol): fieldset.add_constant("RK45_min_dt", 10 * 60) pset = ParticleSet(fieldset, lon=start_lon, lat=start_lat, time=np.timedelta64(0, "s")) - pset.execute(kernel[method], dt=dt, endtime=np.timedelta64(1, "D")) + pset.execute(kernel[method], dt=dt, endtime=np.timedelta64(23, "h")) def truth_moving(x_0, y_0, t): t /= np.timedelta64(1, "s") @@ -412,7 +412,7 @@ def test_peninsula_fieldset(method, rtol, grid_type): fieldset = FieldSet([U, V, P, UV]) dt = np.timedelta64(30, "m") - runtime = np.timedelta64(1, "D") + runtime = np.timedelta64(23, "h") start_lat = np.linspace(3e3, 47e3, npart) start_lon = 3e3 * np.ones_like(start_lat) @@ -553,7 +553,7 @@ def test_nemo_3D_curvilinear_fieldset(method): lats = np.linspace(52.5, 51.6, npart) pset = parcels.ParticleSet(fieldset, lon=lons, lat=lats, z=np.ones_like(lons)) - pset.execute(kernel[method], runtime=np.timedelta64(4, "D"), dt=np.timedelta64(6, "h")) + pset.execute(kernel[method], runtime=np.timedelta64(3, "D") + np.timedelta64(18, "h"), dt=np.timedelta64(6, "h")) if method == "RK4": np.testing.assert_equal(round_and_hash_float_array([p.lon for p in pset], decimals=5), 29977383852960156017546) diff --git a/tests/test_diffusion.py b/tests/test_diffusion.py index 495a7ac2ca..3972bb520f 100644 --- a/tests/test_diffusion.py +++ b/tests/test_diffusion.py @@ -82,7 +82,6 @@ def test_fieldKh_SpatiallyVaryingDiffusion(mesh, kernel): tol = 2000 * mesh_conversion # effectively 2000 m errors (because of low numbers of particles) assert np.allclose(np.mean(pset.lon), 0, atol=tol) assert np.allclose(np.mean(pset.lat), 0, atol=tol) - assert abs(stats.skew(pset.lon)) > abs(stats.skew(pset.lat)) @pytest.mark.parametrize("lambd", [1, 5]) diff --git a/tests/test_particlefile.py b/tests/test_particlefile.py index a3b3d2f187..aa02966702 100755 --- a/tests/test_particlefile.py +++ b/tests/test_particlefile.py @@ -171,7 +171,7 @@ def test_variable_written_once(): @pytest.mark.parametrize( "dt", [ - pytest.param(-np.timedelta64(1, "s"), marks=pytest.mark.xfail(reason="need to fix backwards in time")), + # pytest.param(-np.timedelta64(1, "s"), marks=pytest.mark.xfail(reason="need to fix backwards in time")), np.timedelta64(1, "s"), ], ) diff --git a/tests/test_particleset.py b/tests/test_particleset.py index 3f18b02943..48108606df 100644 --- a/tests/test_particleset.py +++ b/tests/test_particleset.py @@ -124,7 +124,7 @@ def Addlon(particles, fieldset): # pragma: no cover particles.dlon += particles.dt / np.timedelta64(1, "s") pset.execute(Addlon, dt=np.timedelta64(2, "s"), runtime=np.timedelta64(8, "s"), verbose_progress=False) - assert np.allclose([p.lon + p.dlon for p in pset], [8 - t for t in times]) + assert np.allclose([p.lon + p.dlon for p in pset], [10 - t for t in times]) def test_populate_indices(fieldset): diff --git a/tests/test_particleset_execute.py b/tests/test_particleset_execute.py index 815ed67d3d..0b0cf2ac1f 100644 --- a/tests/test_particleset_execute.py +++ b/tests/test_particleset_execute.py @@ -162,7 +162,7 @@ def AddDt(particles, fieldset): # pragma: no cover pset = ParticleSet(fieldset, pclass=pclass, lon=0, lat=0) pset.update_dt_dtype(dt.dtype) pset.execute(AddDt, runtime=dt * 10, dt=dt) - np.testing.assert_allclose(pset[0].added_dt, 10.0 * dt / np.timedelta64(1, "s"), atol=1e-5) + np.testing.assert_allclose(pset[0].added_dt, 11.0 * dt / np.timedelta64(1, "s"), atol=1e-5) def test_pset_execute_subsecond_dt_error(fieldset): @@ -227,34 +227,35 @@ def test_execution_endtime(fieldset, starttime, endtime, dt): dt = np.timedelta64(dt, "s") pset = ParticleSet(fieldset, time=starttime, lon=0, lat=0) pset.execute(DoNothing, endtime=endtime, dt=dt) - assert abs(pset.time_nextloop - endtime) < np.timedelta64(1, "ms") + assert abs(pset.time - endtime) < np.timedelta64(1, "ms") def test_dont_run_particles_outside_starttime(fieldset): # Test forward in time (note third particle is outside endtime) start_times = [fieldset.time_interval.left + np.timedelta64(t, "s") for t in [0, 2, 10]] endtime = fieldset.time_interval.left + np.timedelta64(8, "s") + dt = np.timedelta64(1, "s") def AddLon(particles, fieldset): # pragma: no cover particles.lon += 1 pset = ParticleSet(fieldset, lon=np.zeros(len(start_times)), lat=np.zeros(len(start_times)), time=start_times) - pset.execute(AddLon, dt=np.timedelta64(1, "s"), endtime=endtime) + pset.execute(AddLon, dt=dt, endtime=endtime) - np.testing.assert_array_equal(pset.lon, [8, 6, 0]) - assert pset.time_nextloop[0:1] == endtime - assert pset.time_nextloop[2] == start_times[2] # this particle has not been executed + np.testing.assert_array_equal(pset.lon, [9, 7, 0]) + assert pset.time[0:1] == endtime + assert pset.time[2] == start_times[2] - dt # this particle has not been executed # TODO check why -dt is needed # Test backward in time (note third particle is outside endtime) start_times = [fieldset.time_interval.right - np.timedelta64(t, "s") for t in [0, 2, 10]] endtime = fieldset.time_interval.right - np.timedelta64(8, "s") pset = ParticleSet(fieldset, lon=np.zeros(len(start_times)), lat=np.zeros(len(start_times)), time=start_times) - pset.execute(AddLon, dt=-np.timedelta64(1, "s"), endtime=endtime) + pset.execute(AddLon, dt=-dt, endtime=endtime) - np.testing.assert_array_equal(pset.lon, [8, 6, 0]) - assert pset.time_nextloop[0:1] == endtime - assert pset.time_nextloop[2] == start_times[2] # this particle has not been executed + np.testing.assert_array_equal(pset.lon, [9, 7, 0]) + assert pset.time[0:1] == endtime + assert pset.time[2] == start_times[2] + dt # this particle has not been executed def test_some_particles_throw_outofbounds(zonal_flow_fieldset): @@ -336,7 +337,7 @@ def MoveLeft(particles, fieldset): # pragma: no cover lon = np.linspace(0.05, 6.95, npart) lat = np.linspace(1, 0, npart) pset = ParticleSet(fieldset, lon=lon, lat=lat) - pset.execute([MoveRight, MoveLeft], runtime=np.timedelta64(61, "s"), dt=np.timedelta64(1, "s")) + pset.execute([MoveRight, MoveLeft], runtime=np.timedelta64(60, "s"), dt=np.timedelta64(1, "s")) assert len(pset) == npart np.testing.assert_allclose(pset.lon, [6.05, 5.95], rtol=1e-5) np.testing.assert_allclose(pset.lat, lat, rtol=1e-5) @@ -354,7 +355,7 @@ def test_execution_runtime(fieldset, starttime, runtime, dt, npart): dt = np.timedelta64(dt, "s") pset = ParticleSet(fieldset, time=starttime, lon=np.zeros(npart), lat=np.zeros(npart)) pset.execute(DoNothing, runtime=runtime, dt=dt) - assert all([abs(p.time_nextloop - starttime - runtime * sign_dt) < np.timedelta64(1, "ms") for p in pset]) + assert all([abs(p.time - starttime - runtime * sign_dt) < np.timedelta64(1, "ms") for p in pset]) def test_changing_dt_in_kernel(fieldset): @@ -363,9 +364,9 @@ def KernelCounter(particles, fieldset): # pragma: no cover pset = ParticleSet(fieldset, lon=np.zeros(1), lat=np.zeros(1)) pset.execute(KernelCounter, dt=np.timedelta64(2, "s"), runtime=np.timedelta64(5, "s")) - assert pset.lon == 3 - print(pset.dt) + assert pset.lon == 4 assert pset.dt == np.timedelta64(2, "s") + assert pset.time == fieldset.time_interval.left + np.timedelta64(5, "s") @pytest.mark.parametrize("npart", [1, 100]) From 833d62f84f0dbd47e7b111b692ffc6137830ce22 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 16 Oct 2025 07:42:33 +0200 Subject: [PATCH 04/17] Fixing the first adding of dt to time By moving the position_update_kernels to after the first kernel loop --- src/parcels/_core/kernel.py | 8 ++++---- src/parcels/_core/particleset.py | 4 ---- tests/test_particleset_execute.py | 6 +++--- 3 files changed, 7 insertions(+), 11 deletions(-) diff --git a/src/parcels/_core/kernel.py b/src/parcels/_core/kernel.py index 8e04a73d65..f2222082f8 100644 --- a/src/parcels/_core/kernel.py +++ b/src/parcels/_core/kernel.py @@ -233,10 +233,6 @@ def execute(self, pset, endtime, dt): pset._data["state"][:] = StatusCode.Evaluate - if not self._positionupdate_kernels_added: - self.add_positionupdate_kernels() - self._positionupdate_kernels_added = True - while (len(pset) > 0) and np.any(np.isin(pset.state, [StatusCode.Evaluate, StatusCode.Repeat])): time_to_endtime = compute_time_direction * (endtime - pset.time) @@ -283,4 +279,8 @@ def execute(self, pset, endtime, dt): else: error_func(pset[inds].z, pset[inds].lat, pset[inds].lon) + if not self._positionupdate_kernels_added: + self.add_positionupdate_kernels() + self._positionupdate_kernels_added = True + return pset diff --git a/src/parcels/_core/particleset.py b/src/parcels/_core/particleset.py index fe0250a33a..c28e4bfcf9 100644 --- a/src/parcels/_core/particleset.py +++ b/src/parcels/_core/particleset.py @@ -538,10 +538,6 @@ def execute( next_output = start_time if output_file else None - # TODO clean up two lines below: -dt is needed because in SetCoords dt gets added again - start_time -= dt - self._data["time"][:] -= dt - time = start_time while sign_dt * (time - end_time) < 0: if next_output is not None: diff --git a/tests/test_particleset_execute.py b/tests/test_particleset_execute.py index 0b0cf2ac1f..c94a84dce7 100644 --- a/tests/test_particleset_execute.py +++ b/tests/test_particleset_execute.py @@ -207,7 +207,7 @@ def AddLat(particles, fieldset): # pragma: no cover particles.dlat += 0.1 k_add = pset.Kernel(AddLat) - for _ in range(n + 1): + for _ in range(n): pset.execute(k_add, runtime=np.timedelta64(1, "s"), dt=np.timedelta64(1, "s")) if with_delete: pset.remove_indices(len(pset) - 1) @@ -244,7 +244,7 @@ def AddLon(particles, fieldset): # pragma: no cover np.testing.assert_array_equal(pset.lon, [9, 7, 0]) assert pset.time[0:1] == endtime - assert pset.time[2] == start_times[2] - dt # this particle has not been executed # TODO check why -dt is needed + assert pset.time[2] == start_times[2] # this particle has not been executed # Test backward in time (note third particle is outside endtime) start_times = [fieldset.time_interval.right - np.timedelta64(t, "s") for t in [0, 2, 10]] @@ -255,7 +255,7 @@ def AddLon(particles, fieldset): # pragma: no cover np.testing.assert_array_equal(pset.lon, [9, 7, 0]) assert pset.time[0:1] == endtime - assert pset.time[2] == start_times[2] + dt # this particle has not been executed + assert pset.time[2] == start_times[2] # this particle has not been executed def test_some_particles_throw_outofbounds(zonal_flow_fieldset): From c145154224b3ae22176102c2643560f1f3a79aa9 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 16 Oct 2025 08:10:25 +0200 Subject: [PATCH 05/17] Removing time_nextloop altogether As not needed anymore now that SetCoords kernel is only added after first kernel loop --- docs/examples_v3/tutorial_kernelloop.ipynb | 14 ++++++-------- src/parcels/_core/kernel.py | 17 +++++++---------- src/parcels/_core/particle.py | 1 - src/parcels/_core/particlefile.py | 4 ++-- src/parcels/_core/particleset.py | 2 -- tests/test_diffusion.py | 3 ++- tests/test_particlefile.py | 3 --- 7 files changed, 17 insertions(+), 27 deletions(-) diff --git a/docs/examples_v3/tutorial_kernelloop.ipynb b/docs/examples_v3/tutorial_kernelloop.ipynb index 319f50e8cc..df41ad8413 100644 --- a/docs/examples_v3/tutorial_kernelloop.ipynb +++ b/docs/examples_v3/tutorial_kernelloop.ipynb @@ -40,17 +40,17 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Below is a structured overview of the Kernel loop is implemented. Note that this is for longitude only, but the same process is applied for latitude and z.\n", + "Below is a structured overview of the Kernel loop is implemented. Note that this is for `lon` only, but the same process is applied for `lat` and `z`.\n", "\n", - "1. Initialise an extra Variable `particles.dlon=0` and `particles.time_nextloop = particles.time`\n", + "1. Initialise an extra Variable `particles.dlon=0`\n", "\n", "2. Within the Kernel loop, for each particle:
\n", "\n", " 1. Update `particles.lon += particles.dlon`
\n", "\n", - " 2. Set variable `particles.dlon = 0`
\n", + " 2. Update `particles.time += particles.dt` (except for on the first iteration of the Kernel loop)
\n", "\n", - " 3. Update `particles.time = particles.time_nextloop`\n", + " 3. Set variable `particles.dlon = 0`
\n", "\n", " 4. For each Kernel in the list of Kernels:\n", " \n", @@ -58,9 +58,7 @@ " \n", " 2. Update `particles.dlon` by adding the change in longitude, if needed
\n", "\n", - " 5. Update `particles.time_nextloop += particles.dt`
\n", - "\n", - " 6. If `outputdt` is a multiple of `particle.time`, write `particle.lon` and `particle.time` to zarr output file
\n", + " 5. If `outputdt` is a multiple of `particle.time`, write `particle.lon` and `particle.time` to zarr output file
\n", "\n", "Besides having commutable Kernels, the main advantage of this implementation is that, when using Field Sampling with e.g. `particle.temp = fieldset.Temp[particle.time, particle.z, particle.lat, particle.lon]`, the particle location stays the same throughout the entire Kernel loop. Additionally, this implementation ensures that the particle location is the same as the location of the sampled field in the output file." ] @@ -265,7 +263,7 @@ "### 3. The last time is not written to file\n", "Because the location at the start of the loop is written at the end of the Kernel loop, the last `particle.time` of the particle is not written to file. This is similar behaviour to e.g. `np.arange(start, stop)`, which also doesn't include the `stop` value itself. \n", "\n", - "If you do want to write the last time to file, you can increase the `runtime` or `endtime` by `dt` (although this may cause a TimeExtrapolationError if your run was to the end of the available hydrodynamic data), or you can call `pfile.write_latest_locations(pset, time=pset[0].time_nextloop)`. Note that in the latter case, the particle locations (longitude, latitude and z) will be updated, but other variables will _not_ be updated as the Kernels are not run again." + "If you do want to write the last time to file, you can increase the `runtime` or `endtime` by `dt` (although this may cause a TimeExtrapolationError if your run was to the end of the available hydrodynamic data), or you can call `pfile.write_latest_locations(pset, time=pset[0].time+pset[0].dt)`. Note that in the latter case, the particle locations (longitude, latitude and z) will be updated, but other variables will _not_ be updated as the Kernels are not run again." ] }, { diff --git a/src/parcels/_core/kernel.py b/src/parcels/_core/kernel.py index f2222082f8..888bfe735a 100644 --- a/src/parcels/_core/kernel.py +++ b/src/parcels/_core/kernel.py @@ -75,7 +75,7 @@ def __init__( self._fieldset = fieldset self._ptype = ptype - self._positionupdate_kernels_added = False + self._positionupdate_kernel_added = False for f in pyfuncs: self.check_fieldsets_in_kernels(f) @@ -111,9 +111,9 @@ def remove_deleted(self, pset): if len(indices) > 0: pset.remove_indices(indices) - def add_positionupdate_kernels(self): + def add_positionupdate_kernel(self): # Adding kernels that set and update the coordinate changes - def Setcoords(particles, fieldset): # pragma: no cover + def PositionUpdate(particles, fieldset): # pragma: no cover particles.lon += particles.dlon particles.lat += particles.dlat particles.z += particles.dz @@ -123,10 +123,7 @@ def Setcoords(particles, fieldset): # pragma: no cover particles.dlat = 0 particles.dz = 0 - def UpdateTime(particles, fieldset): # pragma: no cover - particles.time_nextloop = particles.time + particles.dt # TODO remove - - self._pyfuncs = (Setcoords + self + UpdateTime)._pyfuncs + self._pyfuncs = (PositionUpdate + self)._pyfuncs def check_fieldsets_in_kernels(self, pyfunc): # TODO v4: this can go into another method? assert_is_compatible()? """ @@ -279,8 +276,8 @@ def execute(self, pset, endtime, dt): else: error_func(pset[inds].z, pset[inds].lat, pset[inds].lon) - if not self._positionupdate_kernels_added: - self.add_positionupdate_kernels() - self._positionupdate_kernels_added = True + if not self._positionupdate_kernel_added: + self.add_positionupdate_kernel() + self._positionupdate_kernel_added = True return pset diff --git a/src/parcels/_core/particle.py b/src/parcels/_core/particle.py index 64dd2a447c..6552030472 100644 --- a/src/parcels/_core/particle.py +++ b/src/parcels/_core/particle.py @@ -188,7 +188,6 @@ def get_default_particle(spatial_dtype: np.float32 | np.float64) -> ParticleClas dtype=_SAME_AS_FIELDSET_TIME_INTERVAL.VALUE, attrs={"standard_name": "time", "units": "seconds", "axis": "T"}, ), - Variable("time_nextloop", dtype=_SAME_AS_FIELDSET_TIME_INTERVAL.VALUE, to_write=False), Variable( "trajectory", dtype=np.int64, diff --git a/src/parcels/_core/particlefile.py b/src/parcels/_core/particlefile.py index 04e7b38fe3..76129425ed 100644 --- a/src/parcels/_core/particlefile.py +++ b/src/parcels/_core/particlefile.py @@ -252,11 +252,11 @@ def write_latest_locations(self, pset, time): pset : ParticleSet object to write time : - Time at which to write ParticleSet. Note that typically this would be pset.time_nextloop + Time at which to write ParticleSet. Note that typically this would be pset.time + pset.dt """ for var in ["lon", "lat", "z"]: pset._data[f"{var}"] += pset._data[f"d{var}"] - pset._data["time"] = pset._data["time_nextloop"] + pset._data["time"] += pset._data["dt"] self.write(pset, time) diff --git a/src/parcels/_core/particleset.py b/src/parcels/_core/particleset.py index c28e4bfcf9..8948f99cca 100644 --- a/src/parcels/_core/particleset.py +++ b/src/parcels/_core/particleset.py @@ -124,7 +124,6 @@ def __init__( lat=lat, z=z, time=time, - time_nextloop=time, trajectory=trajectory_ids, ), ) @@ -524,7 +523,6 @@ def execute( # Set the time of the particles if it hadn't been set on initialisation if np.isnat(self._data["time"]).any(): self._data["time"][:] = start_time - self._data["time_nextloop"][:] = start_time outputdt = output_file.outputdt if output_file else None diff --git a/tests/test_diffusion.py b/tests/test_diffusion.py index 3972bb520f..d0091c7d69 100644 --- a/tests/test_diffusion.py +++ b/tests/test_diffusion.py @@ -77,11 +77,12 @@ def test_fieldKh_SpatiallyVaryingDiffusion(mesh, kernel): np.random.seed(1636) pset = ParticleSet(fieldset=fieldset, lon=np.zeros(npart), lat=np.zeros(npart)) - pset.execute(pset.Kernel(kernel), runtime=np.timedelta64(4, "h"), dt=np.timedelta64(1, "h")) + pset.execute(pset.Kernel(kernel), runtime=np.timedelta64(3, "h"), dt=np.timedelta64(1, "h")) tol = 2000 * mesh_conversion # effectively 2000 m errors (because of low numbers of particles) assert np.allclose(np.mean(pset.lon), 0, atol=tol) assert np.allclose(np.mean(pset.lat), 0, atol=tol) + assert abs(stats.skew(pset.lon)) > abs(stats.skew(pset.lat)) @pytest.mark.parametrize("lambd", [1, 5]) diff --git a/tests/test_particlefile.py b/tests/test_particlefile.py index aa02966702..8393bc6413 100755 --- a/tests/test_particlefile.py +++ b/tests/test_particlefile.py @@ -69,12 +69,10 @@ def test_pfile_array_remove_particles(fieldset, tmp_zarrfile): ) pfile = ParticleFile(tmp_zarrfile, outputdt=np.timedelta64(1, "s")) pset._data["time"][:] = fieldset.time_interval.left - pset._data["time_nextloop"][:] = fieldset.time_interval.left pfile.write(pset, time=fieldset.time_interval.left) pset.remove_indices(3) new_time = fieldset.time_interval.left + np.timedelta64(1, "D") pset._data["time"][:] = new_time - pset._data["time_nextloop"][:] = new_time pfile.write(pset, new_time) ds = xr.open_zarr(tmp_zarrfile, decode_cf=False) timearr = ds["time"][:] @@ -446,7 +444,6 @@ def test_particlefile_write_particle_data(tmp_store): time_interval=time_interval, initial={ "time": np.full(nparticles, fill_value=left), - "time_nextloop": np.full(nparticles, fill_value=left), "lon": initial_lon, "dt": np.full(nparticles, fill_value=1.0), "trajectory": np.arange(nparticles), From 8091c1f0e72c51503df3ad2148110037f310cf39 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 16 Oct 2025 08:54:08 +0200 Subject: [PATCH 06/17] Removing write_latest_location As not necessary anymore now that the last time is written to file --- docs/examples_v3/tutorial_kernelloop.ipynb | 8 +------- src/parcels/_core/particlefile.py | 17 ----------------- 2 files changed, 1 insertion(+), 24 deletions(-) diff --git a/docs/examples_v3/tutorial_kernelloop.ipynb b/docs/examples_v3/tutorial_kernelloop.ipynb index df41ad8413..b59d51d366 100644 --- a/docs/examples_v3/tutorial_kernelloop.ipynb +++ b/docs/examples_v3/tutorial_kernelloop.ipynb @@ -257,13 +257,7 @@ "### 2. Be careful with updating particle variables that do not depend on Fields.\n", "While assigning the interpolated value of a `Field` to a Particle goes well in the loop above, this is not necessarily so for assigning other attributes. For example, a line like `particle.age += particle.dt` is executed directly so may result in the age being `dt` at `time = 0` in the output file. \n", "\n", - "A workaround is to either initialise the age to `-dt`, or to increase the `age` only when `particle.time > 0` (using an `np.where` statement).\n", - "\n", - "\n", - "### 3. The last time is not written to file\n", - "Because the location at the start of the loop is written at the end of the Kernel loop, the last `particle.time` of the particle is not written to file. This is similar behaviour to e.g. `np.arange(start, stop)`, which also doesn't include the `stop` value itself. \n", - "\n", - "If you do want to write the last time to file, you can increase the `runtime` or `endtime` by `dt` (although this may cause a TimeExtrapolationError if your run was to the end of the available hydrodynamic data), or you can call `pfile.write_latest_locations(pset, time=pset[0].time+pset[0].dt)`. Note that in the latter case, the particle locations (longitude, latitude and z) will be updated, but other variables will _not_ be updated as the Kernels are not run again." + "A workaround is to either initialise the age to `-dt`, or to increase the `age` only when `particle.time > 0` (using an `np.where` statement)." ] }, { diff --git a/src/parcels/_core/particlefile.py b/src/parcels/_core/particlefile.py index 76129425ed..4ad0ab1554 100644 --- a/src/parcels/_core/particlefile.py +++ b/src/parcels/_core/particlefile.py @@ -242,23 +242,6 @@ def _write_particle_data(self, *, particle_data, pclass, time_interval, time, in particle_data["obs_written"][indices_to_write] = obs + 1 - def write_latest_locations(self, pset, time): - """Write the current (latest) particle locations to zarr file. - This can be useful at the end of a pset.execute(), when the last locations are not written yet. - Note that this only updates the locations, not any of the other Variables. Therefore, use with care. - - Parameters - ---------- - pset : - ParticleSet object to write - time : - Time at which to write ParticleSet. Note that typically this would be pset.time + pset.dt - """ - for var in ["lon", "lat", "z"]: - pset._data[f"{var}"] += pset._data[f"d{var}"] - pset._data["time"] += pset._data["dt"] - self.write(pset, time) - def _get_store_from_pathlike(path: Path | str) -> DirectoryStore: path = str(Path(path)) # Ensure valid path, and convert to string From b2ca55d7e87d3f29cd391bdff4b950b9176ebaba Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 16 Oct 2025 08:56:54 +0200 Subject: [PATCH 07/17] reverting change to test_dont_run_particles_outside_starttime --- tests/test_particleset_execute.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/test_particleset_execute.py b/tests/test_particleset_execute.py index c94a84dce7..c62b90e246 100644 --- a/tests/test_particleset_execute.py +++ b/tests/test_particleset_execute.py @@ -234,13 +234,12 @@ def test_dont_run_particles_outside_starttime(fieldset): # Test forward in time (note third particle is outside endtime) start_times = [fieldset.time_interval.left + np.timedelta64(t, "s") for t in [0, 2, 10]] endtime = fieldset.time_interval.left + np.timedelta64(8, "s") - dt = np.timedelta64(1, "s") def AddLon(particles, fieldset): # pragma: no cover particles.lon += 1 pset = ParticleSet(fieldset, lon=np.zeros(len(start_times)), lat=np.zeros(len(start_times)), time=start_times) - pset.execute(AddLon, dt=dt, endtime=endtime) + pset.execute(AddLon, dt=np.timedelta64(1, "s"), endtime=endtime) np.testing.assert_array_equal(pset.lon, [9, 7, 0]) assert pset.time[0:1] == endtime @@ -251,7 +250,7 @@ def AddLon(particles, fieldset): # pragma: no cover endtime = fieldset.time_interval.right - np.timedelta64(8, "s") pset = ParticleSet(fieldset, lon=np.zeros(len(start_times)), lat=np.zeros(len(start_times)), time=start_times) - pset.execute(AddLon, dt=-dt, endtime=endtime) + pset.execute(AddLon, dt=-np.timedelta64(1, "s"), endtime=endtime) np.testing.assert_array_equal(pset.lon, [9, 7, 0]) assert pset.time[0:1] == endtime From 38b71cf85640d515164f1056c1f900764744d847 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 16 Oct 2025 13:51:15 +0200 Subject: [PATCH 08/17] Fixing particlefile writing for negative dt --- src/parcels/_core/kernel.py | 1 + src/parcels/_core/particleset.py | 2 +- tests/test_particlefile.py | 10 ++-------- 3 files changed, 4 insertions(+), 9 deletions(-) diff --git a/src/parcels/_core/kernel.py b/src/parcels/_core/kernel.py index 888bfe735a..a916ea4e11 100644 --- a/src/parcels/_core/kernel.py +++ b/src/parcels/_core/kernel.py @@ -276,6 +276,7 @@ def execute(self, pset, endtime, dt): else: error_func(pset[inds].z, pset[inds].lat, pset[inds].lon) + # Only add PositionUpdate kernel at the end of the first execute call to avoid adding dt to time too early if not self._positionupdate_kernel_added: self.add_positionupdate_kernel() self._positionupdate_kernel_added = True diff --git a/src/parcels/_core/particleset.py b/src/parcels/_core/particleset.py index 8948f99cca..521f78fb63 100644 --- a/src/parcels/_core/particleset.py +++ b/src/parcels/_core/particleset.py @@ -551,7 +551,7 @@ def execute( if output_file: output_file.write(self, next_output) if np.isfinite(outputdt): - next_output += outputdt + next_output += outputdt * sign_dt if verbose_progress: pbar.set_description("Integration time: " + str(time)) diff --git a/tests/test_particlefile.py b/tests/test_particlefile.py index 8393bc6413..926ab825e9 100755 --- a/tests/test_particlefile.py +++ b/tests/test_particlefile.py @@ -166,13 +166,7 @@ def test_variable_written_once(): ... -@pytest.mark.parametrize( - "dt", - [ - # pytest.param(-np.timedelta64(1, "s"), marks=pytest.mark.xfail(reason="need to fix backwards in time")), - np.timedelta64(1, "s"), - ], -) +@pytest.mark.parametrize("dt", [-np.timedelta64(1, "s"), np.timedelta64(1, "s")]) @pytest.mark.parametrize("maxvar", [2, 4, 10]) def test_pset_repeated_release_delayed_adding_deleting(fieldset, tmp_zarrfile, dt, maxvar): """Tests that if particles are released and deleted based on age that resulting output file is correct.""" @@ -190,7 +184,7 @@ def test_pset_repeated_release_delayed_adding_deleting(fieldset, tmp_zarrfile, d lon=np.zeros(npart), lat=np.zeros(npart), pclass=MyParticle, - time=fieldset.time_interval.left + np.array([np.timedelta64(i, "s") for i in range(npart)]), + time=fieldset.time_interval.left + np.array([np.timedelta64(i + 1, "s") for i in range(npart)]), ) pfile = ParticleFile(tmp_zarrfile, outputdt=abs(dt), chunks=(1, 1)) From 50f9603e76e2186341fd4d11e4e6edb42977cc99 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 16 Oct 2025 13:58:37 +0200 Subject: [PATCH 09/17] Testing for non-negative runtime and outputdt --- src/parcels/_core/particlefile.py | 3 +++ src/parcels/_core/particleset.py | 2 ++ 2 files changed, 5 insertions(+) diff --git a/src/parcels/_core/particlefile.py b/src/parcels/_core/particlefile.py index 4ad0ab1554..1c7ea59ecf 100644 --- a/src/parcels/_core/particlefile.py +++ b/src/parcels/_core/particlefile.py @@ -71,6 +71,9 @@ def __init__(self, store, outputdt, chunks=None, create_new_zarrfile=True): if not isinstance(outputdt, np.timedelta64): raise ValueError(f"Expected outputdt to be a np.timedelta64 or datetime.timedelta, got {type(outputdt)}") + if outputdt <= np.timedelta64(0, "s"): + raise ValueError(f"outputdt must be a positive non-zero timedelta. Got {outputdt=!r}") + self._outputdt = outputdt _assert_valid_chunks_tuple(chunks) diff --git a/src/parcels/_core/particleset.py b/src/parcels/_core/particleset.py index 521f78fb63..477f3426ab 100644 --- a/src/parcels/_core/particleset.py +++ b/src/parcels/_core/particleset.py @@ -515,6 +515,8 @@ def execute( raise ValueError( f"The runtime must be a datetime.timedelta or np.timedelta64 object. Got {type(runtime)}" ) from e + if runtime < np.timedelta64(0, "s"): + raise ValueError(f"The runtime must be a non-negative timedelta. Got {runtime=!r}") start_time, end_time = _get_simulation_start_and_end_times( self.fieldset.time_interval, self._data["time"], runtime, endtime, sign_dt From 930d42347ae6800376d0b3fd6eeaa022a6b2ae6b Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Mon, 20 Oct 2025 09:19:43 +0200 Subject: [PATCH 10/17] Fixing writing of Kernel loop with delayed starts and sampling --- src/parcels/_core/kernel.py | 12 +++++++----- src/parcels/_core/statuscodes.py | 1 + tests/test_particlefile.py | 7 +------ 3 files changed, 9 insertions(+), 11 deletions(-) diff --git a/src/parcels/_core/kernel.py b/src/parcels/_core/kernel.py index a916ea4e11..9e5551efd2 100644 --- a/src/parcels/_core/kernel.py +++ b/src/parcels/_core/kernel.py @@ -233,7 +233,10 @@ def execute(self, pset, endtime, dt): while (len(pset) > 0) and np.any(np.isin(pset.state, [StatusCode.Evaluate, StatusCode.Repeat])): time_to_endtime = compute_time_direction * (endtime - pset.time) - if all(time_to_endtime <= 0): + evaluate_particles = (np.isin(pset.state, [StatusCode.Success, StatusCode.Evaluate])) & ( + time_to_endtime >= 0 + ) + if not np.any(evaluate_particles): return StatusCode.Success # adapt dt to end exactly on endtime @@ -243,7 +246,6 @@ def execute(self, pset, endtime, dt): pset.dt = np.minimum(np.maximum(pset.dt, -time_to_endtime), 0) # run kernels for all particles that need to be evaluated - evaluate_particles = (pset.state == StatusCode.Evaluate) & (pset.dt != 0) for f in self._pyfuncs: f(pset[evaluate_particles], self._fieldset) @@ -257,9 +259,9 @@ def execute(self, pset, endtime, dt): if not hasattr(self.fieldset, "RK45_tol"): pset._data["dt"][:] = dt - # Reset particle state for particles that signalled success and have not reached endtime yet - particles_to_evaluate = (pset.state == StatusCode.Success) & (time_to_endtime > 0) - pset[particles_to_evaluate].state = StatusCode.Evaluate + # Set particle state for particles that reached endtime + particles_endofloop = (pset.state == StatusCode.Evaluate) & (pset.time == endtime) + pset[particles_endofloop].state = StatusCode.EndofLoop # delete particles that signalled deletion self.remove_deleted(pset) diff --git a/src/parcels/_core/statuscodes.py b/src/parcels/_core/statuscodes.py index e754a08d08..515967a297 100644 --- a/src/parcels/_core/statuscodes.py +++ b/src/parcels/_core/statuscodes.py @@ -20,6 +20,7 @@ class StatusCode: """Class defining the status codes for particles.state.""" Success = 0 + EndofLoop = 1 Evaluate = 10 Repeat = 20 Delete = 30 diff --git a/tests/test_particlefile.py b/tests/test_particlefile.py index 926ab825e9..e48bd08377 100755 --- a/tests/test_particlefile.py +++ b/tests/test_particlefile.py @@ -171,9 +171,7 @@ def test_variable_written_once(): def test_pset_repeated_release_delayed_adding_deleting(fieldset, tmp_zarrfile, dt, maxvar): """Tests that if particles are released and deleted based on age that resulting output file is correct.""" npart = 10 - runtime = np.timedelta64(npart, "s") fieldset.add_constant("maxvar", maxvar) - pset = None MyParticle = Particle.add_variable( [Variable("sample_var", initial=0.0), Variable("v_once", dtype=np.float64, initial=0.0, to_write="once")] @@ -200,11 +198,8 @@ def IncrLon(particles, fieldset): # pragma: no cover pset.execute(IncrLon, dt=dt, runtime=np.timedelta64(1, "s"), output_file=pfile) ds = xr.open_zarr(tmp_zarrfile, decode_cf=False) - pytest.skip( - "TODO v4: Set decode_cf=True, which will mean that missing values get decoded to NaT rather than fill value" - ) samplevar = ds["sample_var"][:] - assert samplevar.shape == (runtime, min(maxvar + 1, runtime)) + assert samplevar.shape == (npart, min(maxvar, npart + 1)) # test whether samplevar[:, k] = k for k in range(samplevar.shape[1]): assert np.allclose([p for p in samplevar[:, k] if np.isfinite(p)], k + 1) From 48da5dd0c197d96faecfc4185b3eb6fcccf58d39 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Mon, 20 Oct 2025 09:51:59 +0200 Subject: [PATCH 11/17] Adding microsecond dt support And un-xfailing another particlefile unit test --- src/parcels/_core/particleset.py | 2 +- tests/test_particlefile.py | 12 ++++-------- 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/src/parcels/_core/particleset.py b/src/parcels/_core/particleset.py index 477f3426ab..e98d202d71 100644 --- a/src/parcels/_core/particleset.py +++ b/src/parcels/_core/particleset.py @@ -420,7 +420,7 @@ def update_dt_dtype(self, dt_dtype: np.dtype): dt_dtype : np.dtype New dtype for dt. """ - if dt_dtype not in [np.timedelta64, "timedelta64[ns]", "timedelta64[ms]", "timedelta64[s]"]: + if dt_dtype not in [np.timedelta64, "timedelta64[ns]", "timedelta64[μs]", "timedelta64[ms]", "timedelta64[s]"]: raise ValueError(f"dt_dtype must be a numpy timedelta64 dtype. Got {dt_dtype=!r}") self._data["dt"] = self._data["dt"].astype(dt_dtype) diff --git a/tests/test_particlefile.py b/tests/test_particlefile.py index e48bd08377..98f5121899 100755 --- a/tests/test_particlefile.py +++ b/tests/test_particlefile.py @@ -112,20 +112,16 @@ def test_pfile_array_remove_all_particles(fieldset, chunks_obs, tmp_zarrfile): assert np.all(np.isnan(ds["time"][:, 1:])) -@pytest.mark.skip(reason="TODO v4: stuck in infinite loop") def test_variable_write_double(fieldset, tmp_zarrfile): def Update_lon(particles, fieldset): # pragma: no cover particles.dlon += 0.1 + dt = np.timedelta64(1, "us") particle = get_default_particle(np.float64) pset = ParticleSet(fieldset, pclass=particle, lon=[0], lat=[0]) - ofile = ParticleFile(tmp_zarrfile, outputdt=np.timedelta64(10, "us")) - pset.execute( - pset.Kernel(Update_lon), - runtime=np.timedelta64(1, "ms"), - dt=np.timedelta64(10, "us"), - output_file=ofile, - ) + pset.update_dt_dtype(dt.dtype) + ofile = ParticleFile(tmp_zarrfile, outputdt=dt) + pset.execute(Update_lon, runtime=np.timedelta64(10, "us"), dt=dt, output_file=ofile) ds = xr.open_zarr(tmp_zarrfile, decode_cf=False) # TODO v4: Fix metadata and re-enable decode_cf lons = ds["lon"][:] From 65a7bff0cf0c54a9daa9c0982a51afd66411ab3b Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Mon, 20 Oct 2025 12:38:31 +0200 Subject: [PATCH 12/17] Adding extra particlefile unit test --- tests/test_particlefile.py | 28 +++++++++++++--------------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/tests/test_particlefile.py b/tests/test_particlefile.py index 98f5121899..0db92f0f6c 100755 --- a/tests/test_particlefile.py +++ b/tests/test_particlefile.py @@ -178,7 +178,7 @@ def test_pset_repeated_release_delayed_adding_deleting(fieldset, tmp_zarrfile, d lon=np.zeros(npart), lat=np.zeros(npart), pclass=MyParticle, - time=fieldset.time_interval.left + np.array([np.timedelta64(i + 1, "s") for i in range(npart)]), + time=fieldset.time_interval.left + [np.timedelta64(i + 1, "s") for i in range(npart)], ) pfile = ParticleFile(tmp_zarrfile, outputdt=abs(dt), chunks=(1, 1)) @@ -203,25 +203,23 @@ def IncrLon(particles, fieldset): # pragma: no cover assert filesize < 1024 * 65 # test that chunking leads to filesize less than 65KB -@pytest.mark.xfail(reason="need to fix backwards in time") def test_write_timebackward(fieldset, tmp_zarrfile): - def Update_lon(particles, fieldset): # pragma: no cover - dt = particles.dt / np.timedelta64(1, "s") - particles.dlon -= 0.1 * dt - - pset = ParticleSet( - fieldset, - pclass=Particle, - lat=np.linspace(0, 1, 3), - lon=[0, 0, 0], - time=np.array([np.datetime64("2000-01-01") for _ in range(3)]), - ) + release_time = fieldset.time_interval.left + [np.timedelta64(i + 1, "s") for i in range(3)] + pset = ParticleSet(fieldset, lat=[0, 1, 2], lon=[0, 0, 0], time=release_time) pfile = ParticleFile(tmp_zarrfile, outputdt=np.timedelta64(1, "s")) - pset.execute(pset.Kernel(Update_lon), runtime=np.timedelta64(1, "s"), dt=-np.timedelta64(1, "s"), output_file=pfile) - ds = xr.open_zarr(tmp_zarrfile) + pset.execute(DoNothing, runtime=np.timedelta64(3, "s"), dt=-np.timedelta64(1, "s"), output_file=pfile) + + # TODO v4: Fix decode_cf and remove the following lines + ds = xr.open_zarr(tmp_zarrfile, decode_cf=False) trajs = ds["trajectory"][:] + + output_time = ds["time"][:].values + output_time = np.where(output_time > 100.0, np.nan, output_time) # ignore fill values + assert trajs.values.dtype == "int64" assert np.all(np.diff(trajs.values) < 0) # all particles written in order of release + doutput_time = np.diff(output_time, axis=1) + assert np.all(doutput_time[~np.isnan(doutput_time)] < 0) # all times written in decreasing order @pytest.mark.xfail From c821248fd0b49f60f6115ecfb99b9873ebe04167 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Tue, 21 Oct 2025 08:51:57 +0200 Subject: [PATCH 13/17] Fixing another particlefile unit test --- tests/test_particlefile.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/tests/test_particlefile.py b/tests/test_particlefile.py index 0db92f0f6c..0a3ff6f924 100755 --- a/tests/test_particlefile.py +++ b/tests/test_particlefile.py @@ -275,20 +275,19 @@ def SampleP(particles, fieldset): # pragma: no cover assert fieldset.U.grid.lat[yi] <= lat < fieldset.U.grid.lat[yi + 1] -@pytest.mark.skip -@pytest.mark.v4alpha def test_reset_dt(fieldset, tmp_zarrfile): # Assert that p.dt gets reset when a write_time is not a multiple of dt # for p.dt=0.02 to reach outputdt=0.05 and endtime=0.1, the steps should be [0.2, 0.2, 0.1, 0.2, 0.2, 0.1], resulting in 6 kernel executions + dt = np.timedelta64(20, "ms") def Update_lon(particles, fieldset): # pragma: no cover particles.dlon += 0.1 particle = get_default_particle(np.float64) pset = ParticleSet(fieldset, pclass=particle, lon=[0], lat=[0]) + pset.update_dt_dtype(dt.dtype) ofile = ParticleFile(tmp_zarrfile, outputdt=np.timedelta64(50, "ms")) - dt = np.timedelta64(20, "ms") - pset.execute(pset.Kernel(Update_lon), runtime=6 * dt, dt=dt, output_file=ofile) + pset.execute(pset.Kernel(Update_lon), runtime=5 * dt, dt=dt, output_file=ofile) assert np.allclose(pset.lon, 0.6) From 2899e375d3e4e77bb1e3aa280f50e9e6041e2cbe Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Tue, 21 Oct 2025 15:59:43 +0200 Subject: [PATCH 14/17] Add test on writing particle age --- tests/test_particlefile.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/tests/test_particlefile.py b/tests/test_particlefile.py index 0a3ff6f924..c06411c0bb 100755 --- a/tests/test_particlefile.py +++ b/tests/test_particlefile.py @@ -275,6 +275,31 @@ def SampleP(particles, fieldset): # pragma: no cover assert fieldset.U.grid.lat[yi] <= lat < fieldset.U.grid.lat[yi + 1] +@pytest.mark.parametrize("outputdt", [np.timedelta64(1, "s"), np.timedelta64(2, "s"), np.timedelta64(3, "s")]) +def test_time_is_age(fieldset, tmp_zarrfile, outputdt): + # Test that particle age is same as time - initial_time + npart = 10 + + AgeParticle = get_default_particle(np.float64).add_variable(Variable("age", initial=0.0)) + + def IncreaseAge(particles, fieldset): # pragma: no cover + particles.age += particles.dt / np.timedelta64(1, "s") + + time = fieldset.time_interval.left + np.arange(npart) * np.timedelta64(1, "s") + pset = ParticleSet(fieldset, pclass=AgeParticle, lon=npart * [0], lat=npart * [0], time=time) + ofile = ParticleFile(tmp_zarrfile, outputdt=outputdt) + + pset.execute(IncreaseAge, runtime=np.timedelta64(npart * 2, "s"), dt=np.timedelta64(1, "s"), output_file=ofile) + + # TODO v4: Fix metadata and re-enable decode_cf + ds = xr.open_zarr(tmp_zarrfile, decode_cf=False) + age = ds["age"][:].values + ds_time = np.zeros_like(age) + for i in range(npart): + ds_time[i, :] = ds.time.values[i, :] - (time[i] - fieldset.time_interval.left) / np.timedelta64(1, "s") + assert np.allclose(age[~np.isnan(age)], ds_time[~np.isnan(age)]) + + def test_reset_dt(fieldset, tmp_zarrfile): # Assert that p.dt gets reset when a write_time is not a multiple of dt # for p.dt=0.02 to reach outputdt=0.05 and endtime=0.1, the steps should be [0.2, 0.2, 0.1, 0.2, 0.2, 0.1], resulting in 6 kernel executions From 268d294d4401a8c378e16925b7ec742b509c5259 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Tue, 21 Oct 2025 16:11:06 +0200 Subject: [PATCH 15/17] Remvoing age-related caveat from Kernel loop tutorial --- docs/examples_v3/tutorial_kernelloop.ipynb | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/docs/examples_v3/tutorial_kernelloop.ipynb b/docs/examples_v3/tutorial_kernelloop.ipynb index b59d51d366..e10d2c4101 100644 --- a/docs/examples_v3/tutorial_kernelloop.ipynb +++ b/docs/examples_v3/tutorial_kernelloop.ipynb @@ -240,24 +240,16 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Caveats" + "## Caveat: Avoid updating particle locations directly in Kernels" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "There are a few important considerations to take into account when writing Kernels\n", - "\n", - "### 1. Avoid updating particle locations directly in Kernels\n", "It is better not to update `particle.lon` directly in a Kernel, as it can interfere with the loop above. Assigning a value to `particle.lon` in a Kernel will throw a warning. \n", "\n", - "Instead, update the local variable `particle.dlon`.\n", - "\n", - "### 2. Be careful with updating particle variables that do not depend on Fields.\n", - "While assigning the interpolated value of a `Field` to a Particle goes well in the loop above, this is not necessarily so for assigning other attributes. For example, a line like `particle.age += particle.dt` is executed directly so may result in the age being `dt` at `time = 0` in the output file. \n", - "\n", - "A workaround is to either initialise the age to `-dt`, or to increase the `age` only when `particle.time > 0` (using an `np.where` statement)." + "Instead, update the local variable `particle.dlon`." ] }, { From 0eea8b5ecf48af4f78ae7fe794487872d1ee4d63 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Tue, 21 Oct 2025 16:46:05 +0200 Subject: [PATCH 16/17] Fixing more particlefile unit tests --- tests/test_particlefile.py | 35 +++++++++++++++++------------------ 1 file changed, 17 insertions(+), 18 deletions(-) diff --git a/tests/test_particlefile.py b/tests/test_particlefile.py index c06411c0bb..503b0613e3 100755 --- a/tests/test_particlefile.py +++ b/tests/test_particlefile.py @@ -7,8 +7,18 @@ import xarray as xr from zarr.storage import MemoryStore -import parcels -from parcels import Field, FieldSet, Particle, ParticleFile, ParticleSet, StatusCode, Variable, VectorField, XGrid +from parcels import ( + Field, + FieldSet, + Particle, + ParticleFile, + ParticleSet, + StatusCode, + Variable, + VectorField, + XGrid, + download_example_dataset, +) from parcels._core.particle import Particle, create_particle_data, get_default_particle from parcels._core.utils.time import TimeInterval from parcels._datasets.structured.generic import datasets @@ -317,13 +327,11 @@ def Update_lon(particles, fieldset): # pragma: no cover assert np.allclose(pset.lon, 0.6) -@pytest.mark.v4alpha -@pytest.mark.xfail def test_correct_misaligned_outputdt_dt(fieldset, tmp_zarrfile): """Testing that outputdt does not need to be a multiple of dt.""" def Update_lon(particles, fieldset): # pragma: no cover - particles.dlon += particles.dt / np.timedelta64(1, "s") + particles.lon += particles.dt / np.timedelta64(1, "s") particle = get_default_particle(np.float64) pset = ParticleSet(fieldset, pclass=particle, lon=[0], lat=[0]) @@ -332,9 +340,7 @@ def Update_lon(particles, fieldset): # pragma: no cover ds = xr.open_zarr(tmp_zarrfile) assert np.allclose(ds.lon.values, [0, 3, 6, 9]) - assert np.allclose( - ds.time.values[0, :], [np.timedelta64(t, "s") for t in [0, 3, 6, 9]], atol=np.timedelta64(1, "ns") - ) + assert np.allclose((ds.time.values - ds.time.values[0, 0]) / np.timedelta64(1, "s"), [0, 3, 6, 9]) def setup_pset_execute(*, fieldset: FieldSet, outputdt: timedelta, execute_kwargs, particle_class=Particle): @@ -382,7 +388,6 @@ def test_pset_execute_output_time_forwards(fieldset): ) -@pytest.mark.skip(reason="backwards in time not yet working") def test_pset_execute_outputdt_backwards(fieldset): """Testing output data dt matches outputdt in backwards time.""" outputdt = timedelta(hours=1) @@ -394,7 +399,6 @@ def test_pset_execute_outputdt_backwards(fieldset): assert np.all(file_outputdt == np.timedelta64(-outputdt)) -@pytest.mark.xfail(reason="TODO v4: Update dataset loading") def test_pset_execute_outputdt_backwards_fieldset_timevarying(): """test_pset_execute_outputdt_backwards() still passed despite #1722 as it doesn't account for time-varying fields, which for some reason #1722 @@ -404,14 +408,9 @@ def test_pset_execute_outputdt_backwards_fieldset_timevarying(): dt = -timedelta(minutes=5) # TODO: Not ideal using the `download_example_dataset` here, but I'm struggling to recreate this error using the test suite fieldsets we have - example_dataset_folder = parcels.download_example_dataset("MovingEddies_data") - filenames = { - "U": str(example_dataset_folder / "moving_eddiesU.nc"), - "V": str(example_dataset_folder / "moving_eddiesV.nc"), - } - variables = {"U": "vozocrtx", "V": "vomecrty"} - dimensions = {"lon": "nav_lon", "lat": "nav_lat", "time": "time_counter"} - fieldset = parcels.FieldSet.from_netcdf(filenames, variables, dimensions) + example_dataset_folder = download_example_dataset("CopernicusMarine_data_for_Argo_tutorial") + ds_in = xr.open_mfdataset(f"{example_dataset_folder}/*.nc", combine="by_coords") + fieldset = FieldSet.from_copernicusmarine(ds_in) ds = setup_pset_execute(outputdt=outputdt, execute_kwargs=dict(runtime=runtime, dt=dt), fieldset=fieldset) file_outputdt = ds.isel(trajectory=0).time.diff(dim="obs").values From c8c61d28c698860d80185ab0f9208e6628780bb2 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Fri, 24 Oct 2025 15:33:39 +0200 Subject: [PATCH 17/17] More unit tests fixed now that decode_cf works --- tests/test_particlefile.py | 22 ++++++++-------------- 1 file changed, 8 insertions(+), 14 deletions(-) diff --git a/tests/test_particlefile.py b/tests/test_particlefile.py index 48d7328422..958e50d71c 100755 --- a/tests/test_particlefile.py +++ b/tests/test_particlefile.py @@ -107,11 +107,8 @@ def test_pfile_array_remove_all_particles(fieldset, chunks_obs, tmp_zarrfile): pfile.write(pset, fieldset.time_interval.left + np.timedelta64(1, "D")) pfile.write(pset, fieldset.time_interval.left + np.timedelta64(2, "D")) - ds = xr.open_zarr(tmp_zarrfile, decode_cf=False).load() - pytest.skip( - "TODO v4: Set decode_cf=True, which will mean that missing values get decoded to NaT rather than fill value" - ) - assert np.allclose(ds["time"][:, 0], np.timedelta64(0, "s"), atol=np.timedelta64(1, "ms")) + ds = xr.open_zarr(tmp_zarrfile) + np.testing.assert_allclose(ds["time"][:, 0] - fieldset.time_interval.left, np.timedelta64(0, "s")) if chunks_obs is not None: assert ds["time"][:].shape == chunks else: @@ -200,7 +197,7 @@ def IncrLon(particles, fieldset): # pragma: no cover for _ in range(npart): pset.execute(IncrLon, dt=dt, runtime=np.timedelta64(1, "s"), output_file=pfile) - ds = xr.open_zarr(tmp_zarrfile, decode_cf=False) + ds = xr.open_zarr(tmp_zarrfile) samplevar = ds["sample_var"][:] assert samplevar.shape == (npart, min(maxvar, npart + 1)) # test whether samplevar[:, k] = k @@ -216,12 +213,10 @@ def test_write_timebackward(fieldset, tmp_zarrfile): pfile = ParticleFile(tmp_zarrfile, outputdt=np.timedelta64(1, "s")) pset.execute(DoNothing, runtime=np.timedelta64(3, "s"), dt=-np.timedelta64(1, "s"), output_file=pfile) - # TODO v4: Fix decode_cf and remove the following lines - ds = xr.open_zarr(tmp_zarrfile, decode_cf=False) + ds = xr.open_zarr(tmp_zarrfile) trajs = ds["trajectory"][:] output_time = ds["time"][:].values - output_time = np.where(output_time > 100.0, np.nan, output_time) # ignore fill values assert trajs.values.dtype == "int64" assert np.all(np.diff(trajs.values) < 0) # all particles written in order of release @@ -298,13 +293,12 @@ def IncreaseAge(particles, fieldset): # pragma: no cover pset.execute(IncreaseAge, runtime=np.timedelta64(npart * 2, "s"), dt=np.timedelta64(1, "s"), output_file=ofile) - # TODO v4: Fix metadata and re-enable decode_cf - ds = xr.open_zarr(tmp_zarrfile, decode_cf=False) + ds = xr.open_zarr(tmp_zarrfile) age = ds["age"][:].values - ds_time = np.zeros_like(age) + ds_timediff = np.zeros_like(age) for i in range(npart): - ds_time[i, :] = ds.time.values[i, :] - (time[i] - fieldset.time_interval.left) / np.timedelta64(1, "s") - assert np.allclose(age[~np.isnan(age)], ds_time[~np.isnan(age)]) + ds_timediff[i, :] = (ds.time.values[i, :] - time[i]) / np.timedelta64(1, "s") + np.testing.assert_equal(age, ds_timediff) def test_reset_dt(fieldset, tmp_zarrfile):