diff --git a/.github/workflows/codetools.yml b/.github/workflows/codetools.yml new file mode 100644 index 00000000..b53a9d5b --- /dev/null +++ b/.github/workflows/codetools.yml @@ -0,0 +1,33 @@ +name: ci + +on: [push, pull_request] + +env: + PACKAGE: virtual_ship + +jobs: + codetools: + runs-on: ubuntu-20.04 + strategy: + matrix: + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5.1.0 + with: + python-version: ${{ matrix.python-version }} + - name: install + run: pip install ".[dev]" + - name: flake8 + run: flake8 ./$PACKAGE + - name: pydocstyle + run: pydocstyle ./$PACKAGE + - name: sort-all + run: | + find ./$PACKAGE -type f -name '__init__.py' -print0 | xargs -0 sort-all + [[ -z $(git status -s) ]] + git checkout -- . + - name: black + run: black --diff --check ./$PACKAGE + - name: isort + run: isort --check-only --diff ./$PACKAGE diff --git a/codetools.sh b/codetools.sh new file mode 100644 index 00000000..960a0eff --- /dev/null +++ b/codetools.sh @@ -0,0 +1,38 @@ +#!/bin/sh + +# Runs all codetools and attempts to apply fixes wherever possible. +# Not suitable for the CI as that should not make any changes. + +set -e + +# Set working directory to the directory of this script. +cd "$(dirname "$0")" + +PACKAGE=virtual_ship + +echo "--------------" +echo "flake8" +echo "--------------" +flake8 ./$PACKAGE +# darglint is ran as a plugin for flake8. + +echo "--------------" +echo "pydocstyle" +echo "--------------" +pydocstyle ./$PACKAGE + +echo "--------------" +echo "sort-all" +echo "--------------" +find ./$PACKAGE -type f -name '__init__.py' -print0 | xargs -0 sort-all + +echo "--------------" +echo "black" +echo "--------------" +black ./$PACKAGE + +echo "--------------" +echo "isort" +echo "--------------" +isort ./$PACKAGE + diff --git a/pyproject.toml b/pyproject.toml index 61083232..69ce0bf2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,7 @@ dependencies = [ "pyproj >= 3, < 4", "parcels >= 3, < 4", "scipy >= 1, < 2", - "xarray >= 2023, < 2024" + "xarray >= 2023, < 2024", ] [project.urls] @@ -44,8 +44,21 @@ packages = ["virtual_ship"] write_to = "virtual_ship/_version_setup.py" local_scheme = "no-local-version" +[project.optional-dependencies] +dev = [ + "black == 24.4.0", + "darglint == 1.8.1", + "flake8 == 7.0.0", + "Flake8-pyproject == 1.2.3", + "isort == 5.13.2", + "pydocstyle == 6.3.0", + "sort-all == 1.2.0", +] + [tool.isort] profile = "black" skip_gitignore = true -# Other tools will be added soon. +[tool.flake8] +extend-ignore = "E501" # Don't check line length. +docstring_style = "sphinx" # Use sphinx docstring style for darglint plugin. diff --git a/virtual_ship/__init__.py b/virtual_ship/__init__.py index e69de29b..d885347c 100644 --- a/virtual_ship/__init__.py +++ b/virtual_ship/__init__.py @@ -0,0 +1 @@ +"""Code for the Virtual Ship Classroom, where Marine Scientists can combine Copernicus Marine Data with an OceanParcels ship to go on a virtual expedition.""" diff --git a/virtual_ship/argo_deployments.py b/virtual_ship/argo_deployments.py index 65b3bc56..5a31870f 100644 --- a/virtual_ship/argo_deployments.py +++ b/virtual_ship/argo_deployments.py @@ -1,12 +1,30 @@ -import os -import math -import numpy as np +"""argo_deployments function.""" + import datetime +import math +import os from datetime import timedelta -from parcels import FieldSet, JITParticle, Variable, ParticleSet, AdvectionRK4, StatusCode + +import numpy as np +from parcels import ( + AdvectionRK4, + FieldSet, + JITParticle, + ParticleSet, + StatusCode, + Variable, +) + def argo_deployments(config, argo_time): - '''Deploys argo floats, returns results folder''' + """ + Deploy argo floats. + + :param config: The cruise configuration. + :param argo_time: TODO + """ + # particle_* such as particle_ddepth are local variables defined by parcels. + # See https://docs.oceanparcels.org/en/latest/examples/tutorial_kernelloop.html#Background if len(config.argo_deploylocations) > 0: @@ -17,7 +35,9 @@ def ArgoVerticalMovement(particle, fieldset, time): if particle.cycle_phase == 0: # Phase 0: Sinking with vertical_speed until depth is driftdepth - particle_ddepth += fieldset.vertical_speed * particle.dt + particle_ddepth += ( # noqa See comment above about particle_* variables. + fieldset.vertical_speed * particle.dt + ) if particle.depth + particle_ddepth <= fieldset.driftdepth: particle_ddepth = fieldset.driftdepth - particle.depth particle.cycle_phase = 1 @@ -39,15 +59,23 @@ def ArgoVerticalMovement(particle, fieldset, time): elif particle.cycle_phase == 3: # Phase 3: Rising with vertical_speed until at surface particle_ddepth -= fieldset.vertical_speed * particle.dt - particle.cycle_age += particle.dt # solve issue of not updating cycle_age during ascent + particle.cycle_age += ( + particle.dt + ) # solve issue of not updating cycle_age during ascent if particle.depth + particle_ddepth >= fieldset.mindepth: particle_ddepth = fieldset.mindepth - particle.depth - particle.temperature = math.nan # reset temperature to NaN at end of sampling cycle + particle.temperature = ( + math.nan + ) # reset temperature to NaN at end of sampling cycle particle.salinity = math.nan # idem particle.cycle_phase = 4 else: - particle.temperature = fieldset.T[time, particle.depth, particle.lat, particle.lon] - particle.salinity = fieldset.S[time, particle.depth, particle.lat, particle.lon] + particle.temperature = fieldset.T[ + time, particle.depth, particle.lat, particle.lon + ] + particle.salinity = fieldset.S[ + time, particle.depth, particle.lat, particle.lon + ] elif particle.cycle_phase == 4: # Phase 4: Transmitting at surface until cycletime is reached @@ -84,39 +112,78 @@ class ArgoParticle(JITParticle): time = argo_time # Create and execute argo particles - argoset = ParticleSet(fieldset=fieldset, pclass=ArgoParticle, lon=lon, lat=lat, depth=np.repeat(fieldset.mindepth,len(time)), time=time) - argo_output_file = argoset.ParticleFile(name=os.path.join("results","Argos.zarr"), outputdt=timedelta(minutes=5), chunks=(1,500)) + argoset = ParticleSet( + fieldset=fieldset, + pclass=ArgoParticle, + lon=lon, + lat=lat, + depth=np.repeat(fieldset.mindepth, len(time)), + time=time, + ) + argo_output_file = argoset.ParticleFile( + name=os.path.join("results", "Argos.zarr"), + outputdt=timedelta(minutes=5), + chunks=(1, 500), + ) fieldset_endtime = fieldset.time_origin.fulltime(fieldset.U.grid.time_full[-1]) - argo_endtime = np.array((datetime.datetime.strptime(config.requested_ship_time["start"],"%Y-%m-%dT%H:%M:%S") + timedelta(weeks=6))).astype('datetime64[ms]') + argo_endtime = np.array( + ( + datetime.datetime.strptime( + config.requested_ship_time["start"], "%Y-%m-%dT%H:%M:%S" + ) + + timedelta(weeks=6) + ) + ).astype("datetime64[ms]") argoset.execute( - [ArgoVerticalMovement, AdvectionRK4, KeepAtSurface, CheckError], # list of kernels to be executed - endtime=min(fieldset_endtime, argo_endtime), dt=timedelta(minutes=5), - output_file=argo_output_file + [ + ArgoVerticalMovement, + AdvectionRK4, + KeepAtSurface, + CheckError, + ], # list of kernels to be executed + endtime=min(fieldset_endtime, argo_endtime), + dt=timedelta(minutes=5), + output_file=argo_output_file, ) + def create_argo_fieldset(config): - '''Creates fieldset from netcdf files for argo floats, returns fieldset with negative depth values''' + """ + Create a fieldset from netcdf files for argo floats, return fieldset with negative depth values. + :param config: The cruise configuration. + :returns: The fieldset. + """ datadirname = os.path.dirname(__file__) filenames = { "U": os.path.join(datadirname, "argodata_UV.nc"), "V": os.path.join(datadirname, "argodata_UV.nc"), "S": os.path.join(datadirname, "argodata_S.nc"), - "T": os.path.join(datadirname, "argodata_T.nc")} - variables = {'U': 'uo', 'V': 'vo', 'S': 'so', 'T': 'thetao'} - dimensions = {'lon': 'longitude', 'lat': 'latitude', 'time': 'time', 'depth': 'depth'} + "T": os.path.join(datadirname, "argodata_T.nc"), + } + variables = {"U": "uo", "V": "vo", "S": "so", "T": "thetao"} + dimensions = { + "lon": "longitude", + "lat": "latitude", + "time": "time", + "depth": "depth", + } # create the fieldset and set interpolation methods - fieldset = FieldSet.from_netcdf(filenames, variables, dimensions, allow_time_extrapolation=False) + fieldset = FieldSet.from_netcdf( + filenames, variables, dimensions, allow_time_extrapolation=False + ) fieldset.T.interp_method = "linear_invdist_land_tracer" for g in fieldset.gridset.grids: if max(g.depth) > 0: g.depth = -g.depth # make depth negative fieldset.mindepth = -fieldset.U.depth[0] # uppermost layer in the hydrodynamic data - fieldset.add_constant('driftdepth', config.argo_characteristics["driftdepth"]) - fieldset.add_constant('maxdepth', config.argo_characteristics["maxdepth"]) - fieldset.add_constant('vertical_speed', config.argo_characteristics["vertical_speed"]) - fieldset.add_constant('cycle_days', config.argo_characteristics["cycle_days"]) - fieldset.add_constant('drift_days', config.argo_characteristics["drift_days"]) + fieldset.add_constant("driftdepth", config.argo_characteristics["driftdepth"]) + fieldset.add_constant("maxdepth", config.argo_characteristics["maxdepth"]) + fieldset.add_constant( + "vertical_speed", config.argo_characteristics["vertical_speed"] + ) + fieldset.add_constant("cycle_days", config.argo_characteristics["cycle_days"]) + fieldset.add_constant("drift_days", config.argo_characteristics["drift_days"]) return fieldset diff --git a/virtual_ship/costs.py b/virtual_ship/costs.py index 7ca62de6..6c27f81f 100644 --- a/virtual_ship/costs.py +++ b/virtual_ship/costs.py @@ -1,13 +1,21 @@ +"""costs function.""" + + def costs(config, total_time): - '''Calculates cost of the virtual ship (in US$)''' + """ + Calculate the cost of the virtual ship (in US$). + :param config: The cruise configuration. + :param total_time: Time cruised in seconds. + :returns: The calculated cost of the cruise. + """ ship_cost_per_day = 30000 drifter_deploy_cost = 2500 argo_deploy_cost = 15000 - ship_cost = ship_cost_per_day/24 * total_time//3600 + ship_cost = ship_cost_per_day / 24 * total_time // 3600 argo_cost = len(config.argo_deploylocations) * argo_deploy_cost drifter_cost = len(config.drifter_deploylocations) * drifter_deploy_cost - + cost = ship_cost + argo_cost + drifter_cost return cost diff --git a/virtual_ship/drifter_deployments.py b/virtual_ship/drifter_deployments.py index dda6a62f..89e19039 100644 --- a/virtual_ship/drifter_deployments.py +++ b/virtual_ship/drifter_deployments.py @@ -1,23 +1,35 @@ -import os -import numpy as np +"""drifter_deployments function.""" + import datetime +import os from datetime import timedelta -from parcels import FieldSet, JITParticle, Variable, ParticleSet, AdvectionRK4 + +import numpy as np +from parcels import AdvectionRK4, FieldSet, JITParticle, ParticleSet, Variable + def drifter_deployments(config, drifter_time): + """ + Deploy drifters. + :param config: The cruise configuration. + :param drifter_time: TODO + """ if len(config.drifter_deploylocations) > 0: fieldset = create_drifter_fieldset(config) # Create particle to sample water underway class DrifterParticle(JITParticle): - """Define a new particle class that samples Temperature as a surface drifter""" + """Define a new particle class that samples Temperature as a surface drifter.""" + temperature = Variable("temperature", initial=np.nan) # define function sampling Temperature def SampleT(particle, fieldset, time): - particle.temperature = fieldset.T[time, particle.depth, particle.lat, particle.lon] + particle.temperature = fieldset.T[ + time, particle.depth, particle.lat, particle.lon + ] def CheckError(particle, fieldset, time): if particle.state >= 50: # This captures all Errors @@ -32,31 +44,61 @@ def CheckError(particle, fieldset, time): time = drifter_time # Create and execute drifter particles - pset = ParticleSet(fieldset=fieldset, pclass=DrifterParticle, lon=lon, lat=lat, depth=np.repeat(fieldset.mindepth,len(time)), time=time) - output_file = pset.ParticleFile(name=os.path.join("results","Drifters.zarr"), outputdt=timedelta(hours=1)) + pset = ParticleSet( + fieldset=fieldset, + pclass=DrifterParticle, + lon=lon, + lat=lat, + depth=np.repeat(fieldset.mindepth, len(time)), + time=time, + ) + output_file = pset.ParticleFile( + name=os.path.join("results", "Drifters.zarr"), outputdt=timedelta(hours=1) + ) fieldset_endtime = fieldset.time_origin.fulltime(fieldset.U.grid.time_full[-1]) - drifter_endtime = np.array((datetime.datetime.strptime(config.requested_ship_time["start"],"%Y-%m-%dT%H:%M:%S") + timedelta(weeks=6))).astype('datetime64[ms]') + drifter_endtime = np.array( + ( + datetime.datetime.strptime( + config.requested_ship_time["start"], "%Y-%m-%dT%H:%M:%S" + ) + + timedelta(weeks=6) + ) + ).astype("datetime64[ms]") pset.execute( [AdvectionRK4, SampleT, CheckError], - endtime=min(fieldset_endtime, drifter_endtime), dt=timedelta(minutes=5), - output_file=output_file + endtime=min(fieldset_endtime, drifter_endtime), + dt=timedelta(minutes=5), + output_file=output_file, ) - + + def create_drifter_fieldset(config): - '''Creates fieldset from netcdf files for drifters, returns fieldset with negative depth values''' + """ + Create a fieldset from netcdf files for drifters, returns fieldset with negative depth values. + :param config: The cruise configuration. + :returns: The fieldset. + """ datadirname = os.path.dirname(__file__) filenames = { "U": os.path.join(datadirname, "drifterdata_UV.nc"), "V": os.path.join(datadirname, "drifterdata_UV.nc"), - "T": os.path.join(datadirname, "drifterdata_T.nc")} - variables = {'U': 'uo', 'V': 'vo', 'T': 'thetao'} - dimensions = {'lon': 'longitude', 'lat': 'latitude', 'time': 'time', 'depth': 'depth'} + "T": os.path.join(datadirname, "drifterdata_T.nc"), + } + variables = {"U": "uo", "V": "vo", "T": "thetao"} + dimensions = { + "lon": "longitude", + "lat": "latitude", + "time": "time", + "depth": "depth", + } # create the fieldset and set interpolation methods - fieldset = FieldSet.from_netcdf(filenames, variables, dimensions, allow_time_extrapolation=False) + fieldset = FieldSet.from_netcdf( + filenames, variables, dimensions, allow_time_extrapolation=False + ) fieldset.T.interp_method = "linear_invdist_land_tracer" for g in fieldset.gridset.grids: if max(g.depth) > 0: diff --git a/virtual_ship/postprocess.py b/virtual_ship/postprocess.py index 8ec6f225..a234dab9 100644 --- a/virtual_ship/postprocess.py +++ b/virtual_ship/postprocess.py @@ -1,21 +1,24 @@ +"""postprocess function.""" + import os +import shutil + import numpy as np import xarray as xr -import shutil from scipy.ndimage import uniform_filter1d -def postprocess(): - '''Postprocesses CTD data and writes to csv files''' - if os.path.isdir(os.path.join("results","CTDs")): +def postprocess(): + """Postprocesses CTD data and writes to csv files.""" + if os.path.isdir(os.path.join("results", "CTDs")): i = 0 - filenames = os.listdir(os.path.join("results","CTDs")) + filenames = os.listdir(os.path.join("results", "CTDs")) for filename in sorted(filenames): if filename.endswith(".zarr"): - try: #too many errors, just skip the faulty zarr files + try: # too many errors, just skip the faulty zarr files i += 1 # Open output and read to x, y, z - ds = xr.open_zarr(os.path.join("results","CTDs",filename)) + ds = xr.open_zarr(os.path.join("results", "CTDs", filename)) x = ds["lon"][:].squeeze() y = ds["lat"][:].squeeze() z = ds["z"][:].squeeze() @@ -24,27 +27,44 @@ def postprocess(): S = ds["salinity"][:].squeeze() ds.close() - random_walk = np.random.random()/10 - z_norm = (z-np.min(z))/(np.max(z)-np.min(z)) + random_walk = np.random.random() / 10 + z_norm = (z - np.min(z)) / (np.max(z) - np.min(z)) t_norm = np.linspace(0, 1, num=len(time)) # add smoothed random noise scaled with depth # and random (reversed) diversion from initial through time scaled with depth S = S + uniform_filter1d( - np.random.random(S.shape)/5*(1-z_norm) + - random_walk*(np.max(S).values - np.min(S).values)*(1-z_norm)*t_norm/10, - max(int(len(time)/40), 1)) + np.random.random(S.shape) / 5 * (1 - z_norm) + + random_walk + * (np.max(S).values - np.min(S).values) + * (1 - z_norm) + * t_norm + / 10, + max(int(len(time) / 40), 1), + ) T = T + uniform_filter1d( - np.random.random(T.shape)*5*(1-z_norm) - - random_walk/2*(np.max(T).values - np.min(T).values)*(1-z_norm)*t_norm/10, - max(int(len(time)/20), 1)) + np.random.random(T.shape) * 5 * (1 - z_norm) + - random_walk + / 2 + * (np.max(T).values - np.min(T).values) + * (1 - z_norm) + * t_norm + / 10, + max(int(len(time) / 20), 1), + ) # reshaping data to export to csv - header = f"pressure [dbar],temperature [degC],salinity [g kg-1]" + header = "pressure [dbar],temperature [degC],salinity [g kg-1]" data = np.column_stack([-z, T, S]) - new_line = '\n' - np.savetxt(f"{os.path.join('results','CTDs','CTD_station_')}{i}.csv", data, fmt="%.4f", header=header, delimiter=',', - comments=f'longitude,{x[0].values},"{x.attrs}"{new_line}latitude,{y[0].values},"{y.attrs}"{new_line}start time,{time[0].values}{new_line}end time,{time[-1].values}{new_line}') - shutil.rmtree(os.path.join("results","CTDs",filename)) + new_line = "\n" + np.savetxt( + f"{os.path.join('results', 'CTDs', 'CTD_station_')}{i}.csv", + data, + fmt="%.4f", + header=header, + delimiter=",", + comments=f'longitude,{x[0].values},"{x.attrs}"{new_line}latitude,{y[0].values},"{y.attrs}"{new_line}start time,{time[0].values}{new_line}end time,{time[-1].values}{new_line}', + ) + shutil.rmtree(os.path.join("results", "CTDs", filename)) except TypeError: print(f"CTD file {filename} seems faulty, skipping.") continue diff --git a/virtual_ship/sailship.py b/virtual_ship/sailship.py index 1b1c5113..f753677d 100644 --- a/virtual_ship/sailship.py +++ b/virtual_ship/sailship.py @@ -1,13 +1,21 @@ +"""sailship function.""" + import os -from parcels import Field, FieldSet, JITParticle, Variable, ParticleSet, AdvectionRK4, StatusCode +from datetime import timedelta + import numpy as np -from shapely.geometry import Point, Polygon import pyproj -from datetime import timedelta +from parcels import Field, FieldSet, JITParticle, ParticleSet, Variable +from shapely.geometry import Point, Polygon + def sailship(config): - '''Uses parcels to simulate the ship, take CTDs and measure ADCP and underwaydata, returns results folder''' + """ + Use parcels to simulate the ship, take CTDs and measure ADCP and underwaydata. + :param config: The cruise configuration. + :returns: drifter_time, argo_time, total_time + """ # Create fieldset and retreive final schip route as sample_lons and sample_lats fieldset = create_fieldset(config) @@ -16,32 +24,43 @@ def sailship(config): # Create Vessel Mounted ADCP like particles to sample the ocean class VM_ADCPParticle(JITParticle): - """Define a new particle class that does Vessel Mounted ADCP like measurements""" - U = Variable('U', dtype=np.float32, initial=0.0) - V = Variable('V', dtype=np.float32, initial=0.0) + """Define a new particle class that does Vessel Mounted ADCP like measurements.""" + + U = Variable("U", dtype=np.float32, initial=0.0) + V = Variable("V", dtype=np.float32, initial=0.0) # Create particle to sample water underway class UnderwayDataParticle(JITParticle): - """Define a new particle class that samples water directly under the hull""" + """Define a new particle class that samples water directly under the hull.""" + salinity = Variable("salinity", initial=np.nan) temperature = Variable("temperature", initial=np.nan) # Create CTD like particles to sample the ocean class CTDParticle(JITParticle): - """Define a new particle class that does CTD like measurements""" + """Define a new particle class that does CTD like measurements.""" + salinity = Variable("salinity", initial=np.nan) temperature = Variable("temperature", initial=np.nan) raising = Variable("raising", dtype=np.int32, initial=0.0) # define ADCP sampling function without conversion (because of A grid) def SampleVel(particle, fieldset, time): - particle.U, particle.V = fieldset.UV.eval(time, particle.depth, particle.lat, particle.lon, applyConversion=False) + particle.U, particle.V = fieldset.UV.eval( + time, particle.depth, particle.lat, particle.lon, applyConversion=False + ) # define function lowering and raising CTD def CTDcast(particle, fieldset, time): # TODO question: if is executed every time... move outside function? Not if "drifting" now possible - if -fieldset.bathymetry[time, particle.depth, particle.lat, particle.lon] > fieldset.max_depth: - maxdepth = -fieldset.bathymetry[time, particle.depth, particle.lat, particle.lon] + 20 + if ( + -fieldset.bathymetry[time, particle.depth, particle.lat, particle.lon] + > fieldset.max_depth + ): + maxdepth = ( + -fieldset.bathymetry[time, particle.depth, particle.lat, particle.lon] + + 20 + ) else: maxdepth = fieldset.max_depth winch_speed = -1.0 # sink and rise speed in m/s @@ -67,29 +86,47 @@ def SampleS(particle, fieldset, time): # define function sampling Temperature def SampleT(particle, fieldset, time): - particle.temperature = fieldset.T[time, particle.depth, particle.lat, particle.lon] + particle.temperature = fieldset.T[ + time, particle.depth, particle.lat, particle.lon + ] # Create ADCP like particleset and output file - ADCP_bins = np.arange(config.ADCP_settings["max_depth"], -5, config.ADCP_settings["bin_size_m"]) + ADCP_bins = np.arange( + config.ADCP_settings["max_depth"], -5, config.ADCP_settings["bin_size_m"] + ) vert_particles = len(ADCP_bins) pset_ADCP = ParticleSet.from_list( - fieldset=fieldset, pclass=VM_ADCPParticle, lon=np.full(vert_particles,sample_lons[0]), - lat=np.full(vert_particles,sample_lats[0]), depth=ADCP_bins, time=0 + fieldset=fieldset, + pclass=VM_ADCPParticle, + lon=np.full(vert_particles, sample_lons[0]), + lat=np.full(vert_particles, sample_lats[0]), + depth=ADCP_bins, + time=0, ) - adcp_output_file = pset_ADCP.ParticleFile(name=os.path.join("results","ADCP.zarr")) - adcp_dt = timedelta(minutes=5).total_seconds() # timestep of ADCP output, every 5 min + adcp_output_file = pset_ADCP.ParticleFile(name=os.path.join("results", "ADCP.zarr")) + adcp_dt = timedelta( + minutes=5 + ).total_seconds() # timestep of ADCP output, every 5 min # Create underway particle pset_UnderwayData = ParticleSet.from_list( - fieldset=fieldset, pclass=UnderwayDataParticle, lon=sample_lons[0], - lat=sample_lats[0], depth=-2, time=0 + fieldset=fieldset, + pclass=UnderwayDataParticle, + lon=sample_lons[0], + lat=sample_lats[0], + depth=-2, + time=0, + ) + UnderwayData_output_file = pset_UnderwayData.ParticleFile( + name=os.path.join("results", "UnderwayData.zarr") ) - UnderwayData_output_file = pset_UnderwayData.ParticleFile(name=os.path.join("results","UnderwayData.zarr")) # initialize CTD station number and time total_time = timedelta(hours=0).total_seconds() ctd = 0 - ctd_dt = timedelta(seconds=10) # timestep of CTD output reflecting post-process binning into 10m bins + ctd_dt = timedelta( + seconds=10 + ) # timestep of CTD output reflecting post-process binning into 10m bins # initialize drifters and argo floats drifter = 0 @@ -98,49 +135,85 @@ def SampleT(particle, fieldset, time): argo_time = [] # run the model for the length of the sample_lons list - for i in range(len(sample_lons)-1): + for i in range(len(sample_lons) - 1): if i % 96 == 0: print(f"Gathered data {timedelta(seconds=total_time)} hours since start.") # execute the ADCP kernels to sample U and V and underway T and S if config.ADCP_data: - pset_ADCP.execute([SampleVel], dt=adcp_dt, runtime=1, verbose_progress=False) + pset_ADCP.execute( + [SampleVel], dt=adcp_dt, runtime=1, verbose_progress=False + ) adcp_output_file.write(pset_ADCP, time=pset_ADCP[0].time) if config.underway_data: - pset_UnderwayData.execute([SampleS, SampleT], dt=adcp_dt, runtime=1, verbose_progress=False) + pset_UnderwayData.execute( + [SampleS, SampleT], dt=adcp_dt, runtime=1, verbose_progress=False + ) UnderwayData_output_file.write(pset_UnderwayData, time=pset_ADCP[0].time) if pset_ADCP[0].time > fieldset.maxtime: - print("Ship time is over, waiting for drifters and/or Argo floats to finish.") + print( + "Ship time is over, waiting for drifters and/or Argo floats to finish." + ) return drifter_time, argo_time # check if virtual ship is at a CTD station if ctd < len(config.CTD_locations): - if abs(sample_lons[i] - config.CTD_locations[ctd][0]) < 0.001 and abs(sample_lats[i] - config.CTD_locations[ctd][1]) < 0.001: + if ( + abs(sample_lons[i] - config.CTD_locations[ctd][0]) < 0.001 + and abs(sample_lats[i] - config.CTD_locations[ctd][1]) < 0.001 + ): ctd += 1 # release CTD particle - pset_CTD = ParticleSet(fieldset=fieldset, pclass=CTDParticle, lon=sample_lons[i], lat=sample_lats[i], depth=fieldset.mindepth, time=total_time) + pset_CTD = ParticleSet( + fieldset=fieldset, + pclass=CTDParticle, + lon=sample_lons[i], + lat=sample_lats[i], + depth=fieldset.mindepth, + time=total_time, + ) # create a ParticleFile to store the CTD output - ctd_output_file = pset_CTD.ParticleFile(name=f"{os.path.join('results','CTDs','CTD_')}{ctd:03d}.zarr", outputdt=ctd_dt) + ctd_output_file = pset_CTD.ParticleFile( + name=f"{os.path.join('results', 'CTDs', 'CTD_')}{ctd:03d}.zarr", + outputdt=ctd_dt, + ) # record the temperature and salinity of the particle - pset_CTD.execute([SampleS, SampleT, CTDcast], runtime=timedelta(hours=8), dt=ctd_dt, output_file=ctd_output_file, verbose_progress=False) - total_time = pset_CTD.time[0] + timedelta(minutes=20).total_seconds() # add CTD time and 20 minutes for deployment + pset_CTD.execute( + [SampleS, SampleT, CTDcast], + runtime=timedelta(hours=8), + dt=ctd_dt, + output_file=ctd_output_file, + verbose_progress=False, + ) + total_time = ( + pset_CTD.time[0] + timedelta(minutes=20).total_seconds() + ) # add CTD time and 20 minutes for deployment # check if we are at a drifter deployment location if drifter < len(config.drifter_deploylocations): - while abs(sample_lons[i] - config.drifter_deploylocations[drifter][0]) < 0.01 and abs(sample_lats[i] - config.drifter_deploylocations[drifter][1]) < 0.01: + while ( + abs(sample_lons[i] - config.drifter_deploylocations[drifter][0]) < 0.01 + and abs(sample_lats[i] - config.drifter_deploylocations[drifter][1]) + < 0.01 + ): drifter_time.append(total_time) drifter += 1 - print(f"Drifter {drifter} deployed at {sample_lons[i]}, {sample_lats[i]}") + print( + f"Drifter {drifter} deployed at {sample_lons[i]}, {sample_lats[i]}" + ) if drifter == len(config.drifter_deploylocations): break # check if we are at a argo deployment location if argo < len(config.argo_deploylocations): - while abs(sample_lons[i] - config.argo_deploylocations[argo][0]) < 0.01 and abs(sample_lats[i] - config.argo_deploylocations[argo][1]) < 0.01: + while ( + abs(sample_lons[i] - config.argo_deploylocations[argo][0]) < 0.01 + and abs(sample_lats[i] - config.argo_deploylocations[argo][1]) < 0.01 + ): argo_time.append(total_time) argo += 1 print(f"Argo {argo} deployed at {sample_lons[i]}, {sample_lats[i]}") @@ -148,10 +221,10 @@ def SampleT(particle, fieldset, time): break # update the particle time and location - pset_ADCP.lon_nextloop[:] = sample_lons[i+1] - pset_ADCP.lat_nextloop[:] = sample_lats[i+1] - pset_UnderwayData.lon_nextloop[:] = sample_lons[i+1] - pset_UnderwayData.lat_nextloop[:] = sample_lats[i+1] + pset_ADCP.lon_nextloop[:] = sample_lons[i + 1] + pset_ADCP.lat_nextloop[:] = sample_lats[i + 1] + pset_UnderwayData.lon_nextloop[:] = sample_lons[i + 1] + pset_UnderwayData.lat_nextloop[:] = sample_lats[i + 1] total_time += adcp_dt pset_ADCP.time_nextloop[:] = total_time @@ -162,27 +235,44 @@ def SampleT(particle, fieldset, time): pset_ADCP.execute(SampleVel, dt=adcp_dt, runtime=1, verbose_progress=False) adcp_output_file.write_latest_locations(pset_ADCP, time=total_time) if config.underway_data: - pset_UnderwayData.execute([SampleS, SampleT], dt=adcp_dt, runtime=1, verbose_progress=False) - UnderwayData_output_file.write_latest_locations(pset_UnderwayData, time=total_time) + pset_UnderwayData.execute( + [SampleS, SampleT], dt=adcp_dt, runtime=1, verbose_progress=False + ) + UnderwayData_output_file.write_latest_locations( + pset_UnderwayData, time=total_time + ) print("Cruise has ended. Please wait for drifters and/or Argo floats to finish.") return drifter_time, argo_time, total_time def create_fieldset(config): - '''Creates fieldset from netcdf files and adds bathymetry data for CTD cast, returns fieldset with negative depth values''' + """ + Create fieldset from netcdf files and adds bathymetry data for CTD cast, returns fieldset with negative depth values. + :param config: The cruise configuration. + :returns: The fieldset. + :raises ValueError: If downloaded data is not as expected. + """ datadirname = os.path.dirname(__file__) filenames = { "U": os.path.join(datadirname, "studentdata_UV.nc"), "V": os.path.join(datadirname, "studentdata_UV.nc"), "S": os.path.join(datadirname, "studentdata_S.nc"), - "T": os.path.join(datadirname, "studentdata_T.nc")} - variables = {'U': 'uo', 'V': 'vo', 'S': 'so', 'T': 'thetao'} - dimensions = {'lon': 'longitude', 'lat': 'latitude', 'time': 'time', 'depth': 'depth'} + "T": os.path.join(datadirname, "studentdata_T.nc"), + } + variables = {"U": "uo", "V": "vo", "S": "so", "T": "thetao"} + dimensions = { + "lon": "longitude", + "lat": "latitude", + "time": "time", + "depth": "depth", + } # create the fieldset and set interpolation methods - fieldset = FieldSet.from_netcdf(filenames, variables, dimensions, allow_time_extrapolation=True) + fieldset = FieldSet.from_netcdf( + filenames, variables, dimensions, allow_time_extrapolation=True + ) fieldset.T.interp_method = "linear_invdist_land_tracer" fieldset.S.interp_method = "linear_invdist_land_tracer" for g in fieldset.gridset.grids: @@ -190,53 +280,76 @@ def create_fieldset(config): g.depth = -g.depth # make depth negative fieldset.mindepth = -fieldset.U.depth[0] # uppermost layer in the hydrodynamic data if config.CTD_settings["max_depth"] == "max": - fieldset.add_constant('max_depth', -fieldset.U.depth[-1]) + fieldset.add_constant("max_depth", -fieldset.U.depth[-1]) else: - fieldset.add_constant('max_depth', config.CTD_settings["max_depth"]) + fieldset.add_constant("max_depth", config.CTD_settings["max_depth"]) fieldset.add_constant("maxtime", fieldset.U.grid.time_full[-1]) # add bathymetry data to the fieldset for CTD cast bathymetry_file = os.path.join(datadirname, "GLO-MFC_001_024_mask_bathy.nc") - bathymetry_variables = ('bathymetry', 'deptho') - bathymetry_dimensions = {'lon': 'longitude', 'lat': 'latitude'} - bathymetry_field = Field.from_netcdf(bathymetry_file, bathymetry_variables, bathymetry_dimensions) + bathymetry_variables = ("bathymetry", "deptho") + bathymetry_dimensions = {"lon": "longitude", "lat": "latitude"} + bathymetry_field = Field.from_netcdf( + bathymetry_file, bathymetry_variables, bathymetry_dimensions + ) fieldset.add_field(bathymetry_field) # read in data already - fieldset.computeTimeChunk(0,1) + fieldset.computeTimeChunk(0, 1) if fieldset.U.lon.min() > config.region_of_interest["West"]: - raise ValueError("FieldSet western boundary is outside region of interest. Please run download_data.py again.") + raise ValueError( + "FieldSet western boundary is outside region of interest. Please run download_data.py again." + ) if fieldset.U.lon.max() < config.region_of_interest["East"]: - raise ValueError("FieldSet eastern boundary is outside region of interest. Please run download_data.py again.") + raise ValueError( + "FieldSet eastern boundary is outside region of interest. Please run download_data.py again." + ) if fieldset.U.lat.min() > config.region_of_interest["South"]: - raise ValueError("FieldSet southern boundary is outside region of interest. Please run download_data.py again.") + raise ValueError( + "FieldSet southern boundary is outside region of interest. Please run download_data.py again." + ) if fieldset.U.lat.max() < config.region_of_interest["North"]: - raise ValueError("FieldSet northern boundary is outside region of interest. Please run download_data.py again.") + raise ValueError( + "FieldSet northern boundary is outside region of interest. Please run download_data.py again." + ) return fieldset + def shiproute(config): - '''Takes in route coordinates and returns lat and lon points within region of interest to sample''' + """ + Take in route coordinates and return lat and lon points within region of interest to sample. + :param config: The cruise configuration. + :returns: lat and lon points within region of interest to sample. + """ # Initialize lists to store intermediate points lons = [] lats = [] # Loop over station coordinates and calculate intermediate points along great circle path - for i in (range(len(config.route_coordinates)-1)): + for i in range(len(config.route_coordinates) - 1): startlong = config.route_coordinates[i][0] startlat = config.route_coordinates[i][1] - endlong = config.route_coordinates[i+1][0] - endlat = config.route_coordinates[i+1][1] + endlong = config.route_coordinates[i + 1][0] + endlat = config.route_coordinates[i + 1][1] # calculate line string along path with segments every 5 min for ADCP measurements # current cruise speed is 10knots = 5.14 m/s * 60*5 = 1542 m every 5 min # Realistic time between measurements is 2 min on Pelagia according to Floran cruise_speed = 5.14 - geod = pyproj.Geod(ellps='WGS84') + geod = pyproj.Geod(ellps="WGS84") azimuth1, azimuth2, distance = geod.inv(startlong, startlat, endlong, endlat) - if distance > (cruise_speed*60*5): - r = geod.inv_intermediate(startlong, startlat, endlong, endlat, del_s=1545, initial_idx=0, return_back_azimuth=False) - lons = np.append(lons, r.lons) # stored as a list of arrays + if distance > (cruise_speed * 60 * 5): + r = geod.inv_intermediate( + startlong, + startlat, + endlong, + endlat, + del_s=1545, + initial_idx=0, + return_back_azimuth=False, + ) + lons = np.append(lons, r.lons) # stored as a list of arrays lats = np.append(lats, r.lats) else: lons = np.append(lons, endlong) diff --git a/virtual_ship/virtual_ship_configuration.py b/virtual_ship/virtual_ship_configuration.py index d90a1397..fb796dc8 100644 --- a/virtual_ship/virtual_ship_configuration.py +++ b/virtual_ship/virtual_ship_configuration.py @@ -1,12 +1,23 @@ -from shapely.geometry import Point, Polygon -import os -import json +"""VirtualShipConfiguration class.""" + import datetime +import json +import os from datetime import timedelta +from shapely.geometry import Point, Polygon + class VirtualShipConfiguration: + """Configuration of the virtual ship, initialized from a json file.""" + def __init__(self, json_file): + """ + Initialize this object. + + :param json_file: Path to the JSON file to init from. + :raises ValueError: If JSON file not valid. + """ with open(os.path.join(os.path.dirname(__file__), json_file), "r") as file: json_input = json.loads(file.read()) for key in json_input: @@ -59,12 +70,12 @@ def __init__(self, json_file): raise ValueError( "CTD coordinates need to be within the region of interest" ) - if type(self.CTD_settings["max_depth"]) != int: + if type(self.CTD_settings["max_depth"]) is not int: if self.CTD_settings["max_depth"] != "max": raise ValueError( 'Specify "max" for maximum depth or a negative integer for max_depth in CTD_settings' ) - if type(self.CTD_settings["max_depth"]) == int: + if type(self.CTD_settings["max_depth"]) is int: if self.CTD_settings["max_depth"] > 0: raise ValueError("Invalid depth for CTD") if len(self.drifter_deploylocations) > 30: @@ -116,7 +127,7 @@ def __init__(self, json_file): raise ValueError( "Specify negative depth. Max depth for argo is -5727 m due to data availability" ) - if type(self.argo_characteristics["vertical_speed"]) != float: + if type(self.argo_characteristics["vertical_speed"]) is not float: raise ValueError("Specify vertical speed for argo with decimals in m/s") if self.argo_characteristics["vertical_speed"] > 0: self.argo_characteristics["vertical_speed"] = -self.argo_characteristics[