From 65baa325a9c7c336ccc1d55b0410088686c388a0 Mon Sep 17 00:00:00 2001 From: Nick Hodgskin <36369090+VeckoTheGecko@users.noreply.github.com> Date: Fri, 3 Oct 2025 13:58:10 +0200 Subject: [PATCH 01/15] Apply needs-triage label to incoming issues (#212) --- .github/ISSUE_TEMPLATE/bug_report.md | 2 +- .github/ISSUE_TEMPLATE/feature_request.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 138cbb37..b5373470 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -1,7 +1,7 @@ --- name: Bug report about: Create a report to help us improve -title: "" +title: ["needs-triage"] labels: bug assignees: "" --- diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index 8e47557f..1adc441f 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -1,7 +1,7 @@ --- name: Feature request about: Suggest an idea for this project -title: "" +title: ["needs-triage"] labels: enhancement assignees: "" --- From 99213fcc14fd8cc8f570d687242649d181fd9e9b Mon Sep 17 00:00:00 2001 From: j-atkins <106238905+j-atkins@users.noreply.github.com> Date: Thu, 16 Oct 2025 17:44:49 +0200 Subject: [PATCH 02/15] unify schedule.yaml and ship_config.yaml to singular expedition.yaml --- docs/user-guide/quickstart.md | 8 +- src/virtualship/cli/commands.py | 33 +- src/virtualship/expedition/do_expedition.py | 8 +- src/virtualship/models/__init__.py | 9 +- src/virtualship/models/expedition.py | 451 ++++++++++++++++++++ src/virtualship/models/schedule.py | 236 ---------- src/virtualship/models/ship_config.py | 320 -------------- src/virtualship/static/expedition.yaml | 74 ++++ src/virtualship/static/schedule.yaml | 42 -- src/virtualship/static/ship_config.yaml | 30 -- src/virtualship/utils.py | 56 ++- 11 files changed, 576 insertions(+), 691 deletions(-) create mode 100644 src/virtualship/models/expedition.py delete mode 100644 src/virtualship/models/schedule.py delete mode 100644 src/virtualship/models/ship_config.py create mode 100644 src/virtualship/static/expedition.yaml delete mode 100644 src/virtualship/static/schedule.yaml delete mode 100644 src/virtualship/static/ship_config.yaml diff --git a/docs/user-guide/quickstart.md b/docs/user-guide/quickstart.md index 59a514c7..04dbea90 100644 --- a/docs/user-guide/quickstart.md +++ b/docs/user-guide/quickstart.md @@ -46,10 +46,10 @@ virtualship init EXPEDITION_NAME --from-mfp CoordinatesExport.xlsx The `CoordinatesExport.xlsx` in the `virtualship init` command refers to the .xlsx file exported from MFP. Replace the filename with the name of your exported .xlsx file (and make sure to move it from the Downloads to the folder/directory in which you are running the expedition). ``` -This will create a folder/directory called `EXPEDITION_NAME` with two files: `schedule.yaml` and `ship_config.yaml` based on the sampling site coordinates that you specified in your MFP export. The `--from-mfp` flag indictates that the exported coordinates will be used. +This will create a folder/directory called `EXPEDITION_NAME` with a single file: `expedition.yaml` containing details on both the expedition schedule and ship configuration, based on the sampling site coordinates that you specified in your MFP export. The `--from-mfp` flag indicates that the exported coordinates will be used. ```{note} -For advanced users: it is also possible to run the expedition initialisation step without an MFP .xlsx export file. In this case you should simply run `virtualship init EXPEDITION_NAME` in the CLI. This will write example `schedule.yaml` and `ship_config.yaml` files in the `EXPEDITION_NAME` folder/directory. These files contain example waypoints, timings and instrument selections, but can be edited or propagated through the rest of the workflow unedited to run a sample expedition. +For advanced users: it is also possible to run the expedition initialisation step without an MFP .xlsx export file. In this case you should simply run `virtualship init EXPEDITION_NAME` in the CLI. This will write an example `expedition.yaml` file in the `EXPEDITION_NAME` folder/directory. This file contains example waypoints, timings, instrument selections, and ship configuration, but can be edited or propagated through the rest of the workflow unedited to run a sample expedition. ``` ## Expedition scheduling & ship configuration @@ -61,7 +61,7 @@ virtualship plan EXPEDITION_NAME ``` ```{tip} -Using the `virtualship plan` tool is optional. Advanced users can also edit the `schedule.yaml` and `ship_config.yaml` files directly if preferred. +Using the `virtualship plan` tool is optional. Advanced users can also edit the `expedition.yaml` file directly if preferred. ``` The planning tool should look something like this and offers an intuitive way to make your selections: @@ -111,7 +111,7 @@ For advanced users: you can also make further customisations to behaviours of al When you are happy with your ship configuration and schedule plan, press _Save Changes_. ```{note} -On pressing _Save Changes_ the tool will check the selections are valid (for example that the ship will be able to reach each waypoint in time). If they are, the changes will be saved to the `ship_config.yaml` and `schedule.yaml` files, ready for the next steps. If your selections are invalid you should be provided with information on how to fix them. +On pressing _Save Changes_ the tool will check the selections are valid (for example that the ship will be able to reach each waypoint in time). If they are, the changes will be saved to the `expedition.yaml` file, ready for the next steps. If your selections are invalid you should be provided with information on how to fix them. ``` ## Fetch the data diff --git a/src/virtualship/cli/commands.py b/src/virtualship/cli/commands.py index 72d37866..666536eb 100644 --- a/src/virtualship/cli/commands.py +++ b/src/virtualship/cli/commands.py @@ -7,8 +7,7 @@ from virtualship.cli._plan import _plan from virtualship.expedition.do_expedition import do_expedition from virtualship.utils import ( - SCHEDULE, - SHIP_CONFIG, + EXPEDITION, mfp_to_yaml, ) @@ -28,47 +27,39 @@ ) def init(path, from_mfp): """ - Initialize a directory for a new expedition, with an example schedule and ship config files. + Initialize a directory for a new expedition, with an expedition.yaml file. - If --mfp-file is provided, it will generate the schedule from the MPF file instead. + If --mfp-file is provided, it will generate the expedition.yaml from the MPF file instead. """ path = Path(path) path.mkdir(exist_ok=True) - config = path / SHIP_CONFIG - schedule = path / SCHEDULE + expedition = path / EXPEDITION - if config.exists(): + if expedition.exists(): raise FileExistsError( - f"File '{config}' already exist. Please remove it or choose another directory." + f"File '{expedition}' already exist. Please remove it or choose another directory." ) - if schedule.exists(): - raise FileExistsError( - f"File '{schedule}' already exist. Please remove it or choose another directory." - ) - - config.write_text(utils.get_example_config()) if from_mfp: mfp_file = Path(from_mfp) # Generate schedule.yaml from the MPF file click.echo(f"Generating schedule from {mfp_file}...") - mfp_to_yaml(mfp_file, schedule) + mfp_to_yaml(mfp_file, expedition) click.echo( "\nāš ļø The generated schedule does not contain TIME values or INSTRUMENT selections. āš ļø" "\n\nNow please either use the `\033[4mvirtualship plan\033[0m` app to complete the schedule configuration, " - "\nOR edit 'schedule.yaml' and manually add the necessary time values and instrument selections." - "\n\nIf editing 'schedule.yaml' manually:" + "\nOR edit 'expedition.yaml' and manually add the necessary time values and instrument selections under the 'schedule' heading." + "\n\nIf editing 'expedition.yaml' manually:" "\n\nšŸ•’ Expected time format: 'YYYY-MM-DD HH:MM:SS' (e.g., '2023-10-20 01:00:00')." "\n\nšŸŒ”ļø Expected instrument(s) format: one line per instrument e.g." f"\n\n{' ' * 15}waypoints:\n{' ' * 15}- instrument:\n{' ' * 19}- CTD\n{' ' * 19}- ARGO_FLOAT\n" ) else: - # Create a default example schedule - # schedule_body = utils.get_example_schedule() - schedule.write_text(utils.get_example_schedule()) + # Create a default example expedition YAML + expedition.write_text(utils.get_example_expedition()) - click.echo(f"Created '{config.name}' and '{schedule.name}' at {path}.") + click.echo(f"Created '{expedition.name}' at {path}.") @click.command() diff --git a/src/virtualship/expedition/do_expedition.py b/src/virtualship/expedition/do_expedition.py index 56ee79fa..e4e7dc08 100644 --- a/src/virtualship/expedition/do_expedition.py +++ b/src/virtualship/expedition/do_expedition.py @@ -10,8 +10,7 @@ from virtualship.models import Schedule, ShipConfig from virtualship.utils import ( CHECKPOINT, - _get_schedule, - _get_ship_config, + _get_expedition, ) from .checkpoint import Checkpoint @@ -38,8 +37,9 @@ def do_expedition(expedition_dir: str | Path, input_data: Path | None = None) -> if isinstance(expedition_dir, str): expedition_dir = Path(expedition_dir) - ship_config = _get_ship_config(expedition_dir) - schedule = _get_schedule(expedition_dir) + expedition_config = _get_expedition(expedition_dir) + ship_config = expedition_config.ship_config + schedule = expedition_config.schedule # Verify ship_config file is consistent with schedule ship_config.verify(schedule) diff --git a/src/virtualship/models/__init__.py b/src/virtualship/models/__init__.py index 48106056..d7d70cd6 100644 --- a/src/virtualship/models/__init__.py +++ b/src/virtualship/models/__init__.py @@ -1,18 +1,20 @@ """Pydantic models and data classes used to configure virtualship (i.e., in the configuration files or settings).""" -from .location import Location -from .schedule import Schedule, Waypoint -from .ship_config import ( +from .expedition import ( ADCPConfig, ArgoFloatConfig, CTD_BGCConfig, CTDConfig, DrifterConfig, + Expedition, InstrumentType, + Schedule, ShipConfig, ShipUnderwaterSTConfig, + Waypoint, XBTConfig, ) +from .location import Location from .space_time_region import ( SpaceTimeRegion, SpatialRange, @@ -39,4 +41,5 @@ "TimeRange", "SpaceTimeRegion", "Spacetime", + "Expedition", ] diff --git a/src/virtualship/models/expedition.py b/src/virtualship/models/expedition.py new file mode 100644 index 00000000..1f91d6de --- /dev/null +++ b/src/virtualship/models/expedition.py @@ -0,0 +1,451 @@ +from __future__ import annotations + +import itertools +from datetime import datetime, timedelta +from enum import Enum +from typing import TYPE_CHECKING + +import pydantic +import pyproj +import yaml + +from virtualship.errors import ConfigError, ScheduleError +from virtualship.utils import _validate_numeric_mins_to_timedelta + +from .location import Location +from .space_time_region import SpaceTimeRegion + +if TYPE_CHECKING: + from parcels import FieldSet + + from virtualship.expedition.input_data import InputData + + +projection: pyproj.Geod = pyproj.Geod(ellps="WGS84") + + +class Expedition(pydantic.BaseModel): + """Expedition class, including schedule and ship config.""" + + schedule: Schedule + ship_config: ShipConfig + + model_config = pydantic.ConfigDict(extra="forbid") + + def to_yaml(self, file_path: str) -> None: + """Write exepedition object to yaml file.""" + with open(file_path, "w") as file: + yaml.dump(self.model_dump(by_alias=True), file) + + @classmethod + def from_yaml(cls, file_path: str) -> Expedition: + """Load config from yaml file.""" + with open(file_path) as file: + data = yaml.safe_load(file) + + return Expedition(**data) + + +class Schedule(pydantic.BaseModel): + """Schedule of the virtual ship.""" + + waypoints: list[Waypoint] + space_time_region: SpaceTimeRegion | None = None + + model_config = pydantic.ConfigDict(extra="forbid") + + def get_instruments(self) -> set[InstrumentType]: + """Return a set of unique InstrumentType enums used in the schedule.""" + instruments_in_schedule = [] + for waypoint in self.waypoints: + if waypoint.instrument: + for instrument in waypoint.instrument: + if instrument: + instruments_in_schedule.append(instrument) + return set(instruments_in_schedule) + + def verify( + self, + ship_speed: float, + input_data: InputData | None, + *, + check_space_time_region: bool = False, + ignore_missing_fieldsets: bool = False, + ) -> None: + """ + Verify the feasibility and correctness of the schedule's waypoints. + + This method checks various conditions to ensure the schedule is valid: + 1. At least one waypoint is provided. + 2. The first waypoint has a specified time. + 3. Waypoint times are in ascending order. + 4. All waypoints are in water (not on land). + 5. The ship can arrive on time at each waypoint given its speed. + + :param ship_speed: The ship's speed in knots. + :param input_data: An InputData object containing fieldsets used to check if waypoints are on water. + :param check_space_time_region: whether to check for missing space_time_region. + :param ignore_missing_fieldsets: whether to ignore warning for missing field sets. + :raises PlanningError: If any of the verification checks fail, indicating infeasible or incorrect waypoints. + :raises NotImplementedError: If an instrument in the schedule is not implemented. + :return: None. The method doesn't return a value but raises exceptions if verification fails. + """ + print("\nVerifying route... ") + + if check_space_time_region and self.space_time_region is None: + raise ScheduleError( + "space_time_region not found in schedule, please define it to fetch the data." + ) + + if len(self.waypoints) == 0: + raise ScheduleError("At least one waypoint must be provided.") + + # check first waypoint has a time + if self.waypoints[0].time is None: + raise ScheduleError("First waypoint must have a specified time.") + + # check waypoint times are in ascending order + timed_waypoints = [wp for wp in self.waypoints if wp.time is not None] + checks = [ + next.time >= cur.time for cur, next in itertools.pairwise(timed_waypoints) + ] + if not all(checks): + invalid_i = [i for i, c in enumerate(checks) if c] + raise ScheduleError( + f"Waypoint(s) {', '.join(f'#{i + 1}' for i in invalid_i)}: each waypoint should be timed after all previous waypoints", + ) + + # check if all waypoints are in water + # this is done by picking an arbitrary provided fieldset and checking if UV is not zero + + # get all available fieldsets + available_fieldsets = [] + if input_data is not None: + fieldsets = [ + input_data.adcp_fieldset, + input_data.argo_float_fieldset, + input_data.ctd_fieldset, + input_data.drifter_fieldset, + input_data.ship_underwater_st_fieldset, + ] + for fs in fieldsets: + if fs is not None: + available_fieldsets.append(fs) + + # check if there are any fieldsets, else it's an error + if len(available_fieldsets) == 0: + if not ignore_missing_fieldsets: + print( + "Cannot verify because no fieldsets have been loaded. This is probably " + "because you are not using any instruments in your schedule. This is not a problem, " + "but carefully check your waypoint locations manually." + ) + + else: + # pick any + fieldset = available_fieldsets[0] + # get waypoints with 0 UV + land_waypoints = [ + (wp_i, wp) + for wp_i, wp in enumerate(self.waypoints) + if _is_on_land_zero_uv(fieldset, wp) + ] + # raise an error if there are any + if len(land_waypoints) > 0: + raise ScheduleError( + f"The following waypoints are on land: {['#' + str(wp_i) + ' ' + str(wp) for (wp_i, wp) in land_waypoints]}" + ) + + # check that ship will arrive on time at each waypoint (in case no unexpected event happen) + time = self.waypoints[0].time + for wp_i, (wp, wp_next) in enumerate( + zip(self.waypoints, self.waypoints[1:], strict=False) + ): + if wp.instrument is InstrumentType.CTD: + time += timedelta(minutes=20) + + geodinv: tuple[float, float, float] = projection.inv( + wp.location.lon, + wp.location.lat, + wp_next.location.lon, + wp_next.location.lat, + ) + distance = geodinv[2] + + time_to_reach = timedelta(seconds=distance / ship_speed * 3600 / 1852) + arrival_time = time + time_to_reach + + if wp_next.time is None: + time = arrival_time + elif arrival_time > wp_next.time: + raise ScheduleError( + f"Waypoint planning is not valid: would arrive too late at waypoint number {wp_i + 2}. " + f"location: {wp_next.location} time: {wp_next.time} instrument: {wp_next.instrument}" + ) + else: + time = wp_next.time + + print("... All good to go!") + + +class Waypoint(pydantic.BaseModel): + """A Waypoint to sail to with an optional time and an optional instrument.""" + + location: Location + time: datetime | None = None + instrument: InstrumentType | list[InstrumentType] | None = None + + @pydantic.field_serializer("instrument") + def serialize_instrument(self, instrument): + """Ensure InstrumentType is serialized as a string (or list of strings).""" + if isinstance(instrument, list): + return [inst.value for inst in instrument] + return instrument.value if instrument else None + + +class InstrumentType(Enum): + """Types of the instruments.""" + + CTD = "CTD" + CTD_BGC = "CTD_BGC" + DRIFTER = "DRIFTER" + ARGO_FLOAT = "ARGO_FLOAT" + XBT = "XBT" + + +class ArgoFloatConfig(pydantic.BaseModel): + """Configuration for argos floats.""" + + min_depth_meter: float = pydantic.Field(le=0.0) + max_depth_meter: float = pydantic.Field(le=0.0) + drift_depth_meter: float = pydantic.Field(le=0.0) + vertical_speed_meter_per_second: float = pydantic.Field(lt=0.0) + cycle_days: float = pydantic.Field(gt=0.0) + drift_days: float = pydantic.Field(gt=0.0) + + +class ADCPConfig(pydantic.BaseModel): + """Configuration for ADCP instrument.""" + + max_depth_meter: float = pydantic.Field(le=0.0) + num_bins: int = pydantic.Field(gt=0.0) + period: timedelta = pydantic.Field( + serialization_alias="period_minutes", + validation_alias="period_minutes", + gt=timedelta(), + ) + + model_config = pydantic.ConfigDict(populate_by_name=True) + + @pydantic.field_serializer("period") + def _serialize_period(self, value: timedelta, _info): + return value.total_seconds() / 60.0 + + @pydantic.field_validator("period", mode="before") + def _validate_period(cls, value: int | float | timedelta) -> timedelta: + return _validate_numeric_mins_to_timedelta(value) + + +class CTDConfig(pydantic.BaseModel): + """Configuration for CTD instrument.""" + + stationkeeping_time: timedelta = pydantic.Field( + serialization_alias="stationkeeping_time_minutes", + validation_alias="stationkeeping_time_minutes", + gt=timedelta(), + ) + min_depth_meter: float = pydantic.Field(le=0.0) + max_depth_meter: float = pydantic.Field(le=0.0) + + model_config = pydantic.ConfigDict(populate_by_name=True) + + @pydantic.field_serializer("stationkeeping_time") + def _serialize_stationkeeping_time(self, value: timedelta, _info): + return value.total_seconds() / 60.0 + + @pydantic.field_validator("stationkeeping_time", mode="before") + def _validate_stationkeeping_time(cls, value: int | float | timedelta) -> timedelta: + return _validate_numeric_mins_to_timedelta(value) + + +class CTD_BGCConfig(pydantic.BaseModel): + """Configuration for CTD_BGC instrument.""" + + stationkeeping_time: timedelta = pydantic.Field( + serialization_alias="stationkeeping_time_minutes", + validation_alias="stationkeeping_time_minutes", + gt=timedelta(), + ) + min_depth_meter: float = pydantic.Field(le=0.0) + max_depth_meter: float = pydantic.Field(le=0.0) + + model_config = pydantic.ConfigDict(populate_by_name=True) + + @pydantic.field_serializer("stationkeeping_time") + def _serialize_stationkeeping_time(self, value: timedelta, _info): + return value.total_seconds() / 60.0 + + @pydantic.field_validator("stationkeeping_time", mode="before") + def _validate_stationkeeping_time(cls, value: int | float | timedelta) -> timedelta: + return _validate_numeric_mins_to_timedelta(value) + + +class ShipUnderwaterSTConfig(pydantic.BaseModel): + """Configuration for underwater ST.""" + + period: timedelta = pydantic.Field( + serialization_alias="period_minutes", + validation_alias="period_minutes", + gt=timedelta(), + ) + + model_config = pydantic.ConfigDict(populate_by_name=True) + + @pydantic.field_serializer("period") + def _serialize_period(self, value: timedelta, _info): + return value.total_seconds() / 60.0 + + @pydantic.field_validator("period", mode="before") + def _validate_period(cls, value: int | float | timedelta) -> timedelta: + return _validate_numeric_mins_to_timedelta(value) + + +class DrifterConfig(pydantic.BaseModel): + """Configuration for drifters.""" + + depth_meter: float = pydantic.Field(le=0.0) + lifetime: timedelta = pydantic.Field( + serialization_alias="lifetime_minutes", + validation_alias="lifetime_minutes", + gt=timedelta(), + ) + + model_config = pydantic.ConfigDict(populate_by_name=True) + + @pydantic.field_serializer("lifetime") + def _serialize_lifetime(self, value: timedelta, _info): + return value.total_seconds() / 60.0 + + @pydantic.field_validator("lifetime", mode="before") + def _validate_lifetime(cls, value: int | float | timedelta) -> timedelta: + return _validate_numeric_mins_to_timedelta(value) + + +class XBTConfig(pydantic.BaseModel): + """Configuration for xbt instrument.""" + + min_depth_meter: float = pydantic.Field(le=0.0) + max_depth_meter: float = pydantic.Field(le=0.0) + fall_speed_meter_per_second: float = pydantic.Field(gt=0.0) + deceleration_coefficient: float = pydantic.Field(gt=0.0) + + +class ShipConfig(pydantic.BaseModel): + """Configuration of the virtual ship.""" + + ship_speed_knots: float = pydantic.Field(gt=0.0) + """ + Velocity of the ship in knots. + """ + + argo_float_config: ArgoFloatConfig | None = None + """ + Argo float configuration. + + If None, no argo floats can be deployed. + """ + + adcp_config: ADCPConfig | None = None + """ + ADCP configuration. + + If None, no ADCP measurements will be performed. + """ + + ctd_config: CTDConfig | None = None + """ + CTD configuration. + + If None, no CTDs can be cast. + """ + + ctd_bgc_config: CTD_BGCConfig | None = None + """ + CTD_BGC configuration. + + If None, no BGC CTDs can be cast. + """ + + ship_underwater_st_config: ShipUnderwaterSTConfig | None = None + """ + Ship underwater salinity temperature measurementconfiguration. + + If None, no ST measurements will be performed. + """ + + drifter_config: DrifterConfig | None = None + """ + Drifter configuration. + + If None, no drifters can be deployed. + """ + + xbt_config: XBTConfig | None = None + """ + XBT configuration. + + If None, no XBTs can be cast. + """ + + model_config = pydantic.ConfigDict(extra="forbid") + + def verify(self, schedule: Schedule) -> None: + """ + Verify ship configuration against the schedule. + + Removes instrument configs not present in the schedule and checks that all scheduled instruments are configured. + Raises ConfigError if any scheduled instrument is missing a config. + """ + instruments_in_schedule = schedule.get_instruments() + instrument_config_map = { + InstrumentType.ARGO_FLOAT: "argo_float_config", + InstrumentType.DRIFTER: "drifter_config", + InstrumentType.XBT: "xbt_config", + InstrumentType.CTD: "ctd_config", + InstrumentType.CTD_BGC: "ctd_bgc_config", + } + # Remove configs for unused instruments + for inst_type, config_attr in instrument_config_map.items(): + if hasattr(self, config_attr) and inst_type not in instruments_in_schedule: + print( + f"{inst_type.value} configuration provided but not in schedule. Removing config." + ) + setattr(self, config_attr, None) + # Check all scheduled instruments are configured + for inst_type in instruments_in_schedule: + config_attr = instrument_config_map.get(inst_type) + if ( + not config_attr + or not hasattr(self, config_attr) + or getattr(self, config_attr) is None + ): + raise ConfigError( + f"Schedule includes instrument '{inst_type.value}', but ship_config does not provide configuration for it." + ) + + +def _is_on_land_zero_uv(fieldset: FieldSet, waypoint: Waypoint) -> bool: + """ + Check if waypoint is on land by assuming zero velocity means land. + + :param fieldset: The fieldset to sample the velocity from. + :param waypoint: The waypoint to check. + :returns: If the waypoint is on land. + """ + return fieldset.UV.eval( + 0, + fieldset.gridset.grids[0].depth[0], + waypoint.location.lat, + waypoint.location.lon, + applyConversion=False, + ) == (0.0, 0.0) diff --git a/src/virtualship/models/schedule.py b/src/virtualship/models/schedule.py deleted file mode 100644 index 3de44f09..00000000 --- a/src/virtualship/models/schedule.py +++ /dev/null @@ -1,236 +0,0 @@ -"""Schedule class.""" - -from __future__ import annotations - -import itertools -from datetime import datetime, timedelta -from pathlib import Path -from typing import TYPE_CHECKING - -import pydantic -import pyproj -import yaml - -from virtualship.errors import ScheduleError - -from .location import Location -from .ship_config import InstrumentType -from .space_time_region import SpaceTimeRegion - -if TYPE_CHECKING: - from parcels import FieldSet - - from virtualship.expedition.input_data import InputData - -projection: pyproj.Geod = pyproj.Geod(ellps="WGS84") - - -class Waypoint(pydantic.BaseModel): - """A Waypoint to sail to with an optional time and an optional instrument.""" - - location: Location - time: datetime | None = None - instrument: InstrumentType | list[InstrumentType] | None = None - - @pydantic.field_serializer("instrument") - def serialize_instrument(self, instrument): - """Ensure InstrumentType is serialized as a string (or list of strings).""" - if isinstance(instrument, list): - return [inst.value for inst in instrument] - return instrument.value if instrument else None - - -class Schedule(pydantic.BaseModel): - """Schedule of the virtual ship.""" - - waypoints: list[Waypoint] - space_time_region: SpaceTimeRegion | None = None - - model_config = pydantic.ConfigDict(extra="forbid") - - def to_yaml(self, file_path: str | Path) -> None: - """ - Write schedule to yaml file. - - :param file_path: Path to the file to write to. - """ - with open(file_path, "w") as file: - yaml.dump( - self.model_dump( - by_alias=True, - ), - file, - ) - - @classmethod - def from_yaml(cls, file_path: str | Path) -> Schedule: - """ - Load schedule from yaml file. - - :param file_path: Path to the file to load from. - :returns: The schedule. - """ - with open(file_path) as file: - data = yaml.safe_load(file) - return Schedule(**data) - - def get_instruments(self) -> set[InstrumentType]: - """ - Retrieve a set of unique instruments used in the schedule. - - This method iterates through all waypoints in the schedule and collects - the instruments associated with each waypoint. It returns a set of unique - instruments, either as objects or as names. - - :raises CheckpointError: If the past waypoints in the given schedule - have been changed compared to the checkpoint. - :return: set: A set of unique instruments used in the schedule. - - """ - instruments_in_schedule = [] - for waypoint in self.waypoints: - if waypoint.instrument: - for instrument in waypoint.instrument: - if instrument: - instruments_in_schedule.append(instrument) - return set(instruments_in_schedule) - - def verify( - self, - ship_speed: float, - input_data: InputData | None, - *, - check_space_time_region: bool = False, - ignore_missing_fieldsets: bool = False, - ) -> None: - """ - Verify the feasibility and correctness of the schedule's waypoints. - - This method checks various conditions to ensure the schedule is valid: - 1. At least one waypoint is provided. - 2. The first waypoint has a specified time. - 3. Waypoint times are in ascending order. - 4. All waypoints are in water (not on land). - 5. The ship can arrive on time at each waypoint given its speed. - - :param ship_speed: The ship's speed in knots. - :param input_data: An InputData object containing fieldsets used to check if waypoints are on water. - :param check_space_time_region: whether to check for missing space_time_region. - :param ignore_missing_fieldsets: whether to ignore warning for missing field sets. - :raises PlanningError: If any of the verification checks fail, indicating infeasible or incorrect waypoints. - :raises NotImplementedError: If an instrument in the schedule is not implemented. - :return: None. The method doesn't return a value but raises exceptions if verification fails. - """ - print("\nVerifying route... ") - - if check_space_time_region and self.space_time_region is None: - raise ScheduleError( - "space_time_region not found in schedule, please define it to fetch the data." - ) - - if len(self.waypoints) == 0: - raise ScheduleError("At least one waypoint must be provided.") - - # check first waypoint has a time - if self.waypoints[0].time is None: - raise ScheduleError("First waypoint must have a specified time.") - - # check waypoint times are in ascending order - timed_waypoints = [wp for wp in self.waypoints if wp.time is not None] - checks = [ - next.time >= cur.time for cur, next in itertools.pairwise(timed_waypoints) - ] - if not all(checks): - invalid_i = [i for i, c in enumerate(checks) if c] - raise ScheduleError( - f"Waypoint(s) {', '.join(f'#{i + 1}' for i in invalid_i)}: each waypoint should be timed after all previous waypoints", - ) - - # check if all waypoints are in water - # this is done by picking an arbitrary provided fieldset and checking if UV is not zero - - # get all available fieldsets - available_fieldsets = [] - if input_data is not None: - fieldsets = [ - input_data.adcp_fieldset, - input_data.argo_float_fieldset, - input_data.ctd_fieldset, - input_data.drifter_fieldset, - input_data.ship_underwater_st_fieldset, - ] - for fs in fieldsets: - if fs is not None: - available_fieldsets.append(fs) - - # check if there are any fieldsets, else it's an error - if len(available_fieldsets) == 0: - if not ignore_missing_fieldsets: - print( - "Cannot verify because no fieldsets have been loaded. This is probably " - "because you are not using any instruments in your schedule. This is not a problem, " - "but carefully check your waypoint locations manually." - ) - - else: - # pick any - fieldset = available_fieldsets[0] - # get waypoints with 0 UV - land_waypoints = [ - (wp_i, wp) - for wp_i, wp in enumerate(self.waypoints) - if _is_on_land_zero_uv(fieldset, wp) - ] - # raise an error if there are any - if len(land_waypoints) > 0: - raise ScheduleError( - f"The following waypoints are on land: {['#' + str(wp_i) + ' ' + str(wp) for (wp_i, wp) in land_waypoints]}" - ) - - # check that ship will arrive on time at each waypoint (in case no unexpected event happen) - time = self.waypoints[0].time - for wp_i, (wp, wp_next) in enumerate( - zip(self.waypoints, self.waypoints[1:], strict=False) - ): - if wp.instrument is InstrumentType.CTD: - time += timedelta(minutes=20) - - geodinv: tuple[float, float, float] = projection.inv( - wp.location.lon, - wp.location.lat, - wp_next.location.lon, - wp_next.location.lat, - ) - distance = geodinv[2] - - time_to_reach = timedelta(seconds=distance / ship_speed * 3600 / 1852) - arrival_time = time + time_to_reach - - if wp_next.time is None: - time = arrival_time - elif arrival_time > wp_next.time: - raise ScheduleError( - f"Waypoint planning is not valid: would arrive too late at waypoint number {wp_i + 2}. " - f"location: {wp_next.location} time: {wp_next.time} instrument: {wp_next.instrument}" - ) - else: - time = wp_next.time - - print("... All good to go!") - - -def _is_on_land_zero_uv(fieldset: FieldSet, waypoint: Waypoint) -> bool: - """ - Check if waypoint is on land by assuming zero velocity means land. - - :param fieldset: The fieldset to sample the velocity from. - :param waypoint: The waypoint to check. - :returns: If the waypoint is on land. - """ - return fieldset.UV.eval( - 0, - fieldset.gridset.grids[0].depth[0], - waypoint.location.lat, - waypoint.location.lon, - applyConversion=False, - ) == (0.0, 0.0) diff --git a/src/virtualship/models/ship_config.py b/src/virtualship/models/ship_config.py deleted file mode 100644 index be3ee30d..00000000 --- a/src/virtualship/models/ship_config.py +++ /dev/null @@ -1,320 +0,0 @@ -"""ShipConfig and supporting classes.""" - -from __future__ import annotations - -from datetime import timedelta -from enum import Enum -from pathlib import Path -from typing import TYPE_CHECKING - -import pydantic -import yaml - -from virtualship.errors import ConfigError -from virtualship.utils import _validate_numeric_mins_to_timedelta - -if TYPE_CHECKING: - from .schedule import Schedule - - -class InstrumentType(Enum): - """Types of the instruments.""" - - CTD = "CTD" - CTD_BGC = "CTD_BGC" - DRIFTER = "DRIFTER" - ARGO_FLOAT = "ARGO_FLOAT" - XBT = "XBT" - - -class ArgoFloatConfig(pydantic.BaseModel): - """Configuration for argos floats.""" - - min_depth_meter: float = pydantic.Field(le=0.0) - max_depth_meter: float = pydantic.Field(le=0.0) - drift_depth_meter: float = pydantic.Field(le=0.0) - vertical_speed_meter_per_second: float = pydantic.Field(lt=0.0) - cycle_days: float = pydantic.Field(gt=0.0) - drift_days: float = pydantic.Field(gt=0.0) - - -class ADCPConfig(pydantic.BaseModel): - """Configuration for ADCP instrument.""" - - max_depth_meter: float = pydantic.Field(le=0.0) - num_bins: int = pydantic.Field(gt=0.0) - period: timedelta = pydantic.Field( - serialization_alias="period_minutes", - validation_alias="period_minutes", - gt=timedelta(), - ) - - model_config = pydantic.ConfigDict(populate_by_name=True) - - @pydantic.field_serializer("period") - def _serialize_period(self, value: timedelta, _info): - return value.total_seconds() / 60.0 - - @pydantic.field_validator("period", mode="before") - def _validate_period(cls, value: int | float | timedelta) -> timedelta: - return _validate_numeric_mins_to_timedelta(value) - - -class CTDConfig(pydantic.BaseModel): - """Configuration for CTD instrument.""" - - stationkeeping_time: timedelta = pydantic.Field( - serialization_alias="stationkeeping_time_minutes", - validation_alias="stationkeeping_time_minutes", - gt=timedelta(), - ) - min_depth_meter: float = pydantic.Field(le=0.0) - max_depth_meter: float = pydantic.Field(le=0.0) - - model_config = pydantic.ConfigDict(populate_by_name=True) - - @pydantic.field_serializer("stationkeeping_time") - def _serialize_stationkeeping_time(self, value: timedelta, _info): - return value.total_seconds() / 60.0 - - @pydantic.field_validator("stationkeeping_time", mode="before") - def _validate_stationkeeping_time(cls, value: int | float | timedelta) -> timedelta: - return _validate_numeric_mins_to_timedelta(value) - - -class CTD_BGCConfig(pydantic.BaseModel): - """Configuration for CTD_BGC instrument.""" - - stationkeeping_time: timedelta = pydantic.Field( - serialization_alias="stationkeeping_time_minutes", - validation_alias="stationkeeping_time_minutes", - gt=timedelta(), - ) - min_depth_meter: float = pydantic.Field(le=0.0) - max_depth_meter: float = pydantic.Field(le=0.0) - - model_config = pydantic.ConfigDict(populate_by_name=True) - - @pydantic.field_serializer("stationkeeping_time") - def _serialize_stationkeeping_time(self, value: timedelta, _info): - return value.total_seconds() / 60.0 - - @pydantic.field_validator("stationkeeping_time", mode="before") - def _validate_stationkeeping_time(cls, value: int | float | timedelta) -> timedelta: - return _validate_numeric_mins_to_timedelta(value) - - -class ShipUnderwaterSTConfig(pydantic.BaseModel): - """Configuration for underwater ST.""" - - period: timedelta = pydantic.Field( - serialization_alias="period_minutes", - validation_alias="period_minutes", - gt=timedelta(), - ) - - model_config = pydantic.ConfigDict(populate_by_name=True) - - @pydantic.field_serializer("period") - def _serialize_period(self, value: timedelta, _info): - return value.total_seconds() / 60.0 - - @pydantic.field_validator("period", mode="before") - def _validate_period(cls, value: int | float | timedelta) -> timedelta: - return _validate_numeric_mins_to_timedelta(value) - - -class DrifterConfig(pydantic.BaseModel): - """Configuration for drifters.""" - - depth_meter: float = pydantic.Field(le=0.0) - lifetime: timedelta = pydantic.Field( - serialization_alias="lifetime_minutes", - validation_alias="lifetime_minutes", - gt=timedelta(), - ) - - model_config = pydantic.ConfigDict(populate_by_name=True) - - @pydantic.field_serializer("lifetime") - def _serialize_lifetime(self, value: timedelta, _info): - return value.total_seconds() / 60.0 - - @pydantic.field_validator("lifetime", mode="before") - def _validate_lifetime(cls, value: int | float | timedelta) -> timedelta: - return _validate_numeric_mins_to_timedelta(value) - - -class XBTConfig(pydantic.BaseModel): - """Configuration for xbt instrument.""" - - min_depth_meter: float = pydantic.Field(le=0.0) - max_depth_meter: float = pydantic.Field(le=0.0) - fall_speed_meter_per_second: float = pydantic.Field(gt=0.0) - deceleration_coefficient: float = pydantic.Field(gt=0.0) - - -class ShipConfig(pydantic.BaseModel): - """Configuration of the virtual ship.""" - - ship_speed_knots: float = pydantic.Field(gt=0.0) - """ - Velocity of the ship in knots. - """ - - argo_float_config: ArgoFloatConfig | None = None - """ - Argo float configuration. - - If None, no argo floats can be deployed. - """ - - adcp_config: ADCPConfig | None = None - """ - ADCP configuration. - - If None, no ADCP measurements will be performed. - """ - - ctd_config: CTDConfig | None = None - """ - CTD configuration. - - If None, no CTDs can be cast. - """ - - ctd_bgc_config: CTD_BGCConfig | None = None - """ - CTD_BGC configuration. - - If None, no BGC CTDs can be cast. - """ - - ship_underwater_st_config: ShipUnderwaterSTConfig | None = None - """ - Ship underwater salinity temperature measurementconfiguration. - - If None, no ST measurements will be performed. - """ - - drifter_config: DrifterConfig | None = None - """ - Drifter configuration. - - If None, no drifters can be deployed. - """ - - xbt_config: XBTConfig | None = None - """ - XBT configuration. - - If None, no XBTs can be cast. - """ - - model_config = pydantic.ConfigDict(extra="forbid") - - def to_yaml(self, file_path: str | Path) -> None: - """ - Write config to yaml file. - - :param file_path: Path to the file to write to. - """ - with open(file_path, "w") as file: - yaml.dump(self.model_dump(by_alias=True), file) - - @classmethod - def from_yaml(cls, file_path: str | Path) -> ShipConfig: - """ - Load config from yaml file. - - :param file_path: Path to the file to load from. - :returns: The config. - """ - with open(file_path) as file: - data = yaml.safe_load(file) - return ShipConfig(**data) - - def verify(self, schedule: Schedule) -> None: - """ - Verify the ship configuration against the provided schedule. - - This function performs two main tasks: - 1. Removes instrument configurations that are not present in the schedule. - 2. Verifies that all instruments in the schedule have corresponding configurations. - - Parameters - ---------- - schedule : Schedule - The schedule object containing the planned instruments and waypoints. - - Returns - ------- - None - - Raises - ------ - ConfigError - If an instrument in the schedule does not have a corresponding configuration. - - Notes - ----- - - Prints a message if a configuration is provided for an instrument not in the schedule. - - Sets the configuration to None for instruments not in the schedule. - - Raises a ConfigError for each instrument in the schedule that lacks a configuration. - - """ - instruments_in_schedule = schedule.get_instruments() - - for instrument in [ - "ARGO_FLOAT", - "DRIFTER", - "XBT", - "CTD", - "CTD_BGC", - ]: # TODO make instrument names consistent capitals or lowercase throughout codebase - if hasattr(self, instrument.lower() + "_config") and not any( - instrument == schedule_instrument.name - for schedule_instrument in instruments_in_schedule - ): - print(f"{instrument} configuration provided but not in schedule.") - setattr(self, instrument.lower() + "_config", None) - - # verify instruments in schedule have configuration - # TODO: the ConfigError message could be improved to explain that the **schedule** file has X instrument but the **ship_config** file does not - for instrument in instruments_in_schedule: - try: - InstrumentType(instrument) - except ValueError as e: - raise NotImplementedError("Instrument not supported.") from e - - if instrument == InstrumentType.ARGO_FLOAT and ( - not hasattr(self, "argo_float_config") or self.argo_float_config is None - ): - raise ConfigError( - "Planning has a waypoint with Argo float instrument, but configuration does not configure Argo floats." - ) - if instrument == InstrumentType.CTD and ( - not hasattr(self, "ctd_config") or self.ctd_config is None - ): - raise ConfigError( - "Planning has a waypoint with CTD instrument, but configuration does not configure CTDs." - ) - if instrument == InstrumentType.CTD_BGC and ( - not hasattr(self, "ctd_bgc_config") or self.ctd_bgc_config is None - ): - raise ConfigError( - "Planning has a waypoint with CTD_BGC instrument, but configuration does not configure CTD_BGCs." - ) - if instrument == InstrumentType.DRIFTER and ( - not hasattr(self, "drifter_config") or self.drifter_config is None - ): - raise ConfigError( - "Planning has a waypoint with drifter instrument, but configuration does not configure drifters." - ) - - if instrument == InstrumentType.XBT and ( - not hasattr(self, "xbt_config") or self.xbt_config is None - ): - raise ConfigError( - "Planning has a waypoint with XBT instrument, but configuration does not configure XBT." - ) diff --git a/src/virtualship/static/expedition.yaml b/src/virtualship/static/expedition.yaml new file mode 100644 index 00000000..ec4ec7c5 --- /dev/null +++ b/src/virtualship/static/expedition.yaml @@ -0,0 +1,74 @@ +ship_config: + ship_speed_knots: 10.0 + adcp_config: + num_bins: 40 + max_depth_meter: -1000.0 + period_minutes: 5.0 + argo_float_config: + cycle_days: 10.0 + drift_days: 9.0 + drift_depth_meter: -1000.0 + max_depth_meter: -2000.0 + min_depth_meter: 0.0 + vertical_speed_meter_per_second: -0.1 + ctd_config: + max_depth_meter: -2000.0 + min_depth_meter: -11.0 + stationkeeping_time_minutes: 20.0 + ctd_bgc_config: + max_depth_meter: -2000.0 + min_depth_meter: -11.0 + stationkeeping_time_minutes: 20.0 + drifter_config: + depth_meter: 0.0 + lifetime_minutes: 60480.0 + xbt_config: + max_depth_meter: -285.0 + min_depth_meter: -2.0 + fall_speed_meter_per_second: 6.7 + deceleration_coefficient: 0.00225 + ship_underwater_st_config: + period_minutes: 5.0 +schedule: + space_time_region: + spatial_range: + minimum_longitude: -5 + maximum_longitude: 5 + minimum_latitude: -5 + maximum_latitude: 5 + minimum_depth: 0 + maximum_depth: 2000 + time_range: + start_time: 2023-01-01 00:00:00 + end_time: 2023-02-01 00:00:00 + waypoints: + - instrument: + - CTD + - CTD_BGC + location: + latitude: 0 + longitude: 0 + time: 2023-01-01 00:00:00 + - instrument: + - DRIFTER + - CTD + location: + latitude: 0.01 + longitude: 0.01 + time: 2023-01-01 01:00:00 + - instrument: + - ARGO_FLOAT + location: + latitude: 0.02 + longitude: 0.02 + time: 2023-01-01 02:00:00 + - instrument: + - XBT + location: + latitude: 0.03 + longitude: 0.03 + time: 2023-01-01 03:00:00 + - location: + latitude: 0.03 + longitude: 0.03 + time: 2023-01-01 03:00:00 diff --git a/src/virtualship/static/schedule.yaml b/src/virtualship/static/schedule.yaml deleted file mode 100644 index 7cb39423..00000000 --- a/src/virtualship/static/schedule.yaml +++ /dev/null @@ -1,42 +0,0 @@ -space_time_region: - spatial_range: - minimum_longitude: -5 - maximum_longitude: 5 - minimum_latitude: -5 - maximum_latitude: 5 - minimum_depth: 0 - maximum_depth: 2000 - time_range: - start_time: 2023-01-01 00:00:00 - end_time: 2023-02-01 00:00:00 -waypoints: - - instrument: - - CTD - - CTD_BGC - location: - latitude: 0 - longitude: 0 - time: 2023-01-01 00:00:00 - - instrument: - - DRIFTER - - CTD - location: - latitude: 0.01 - longitude: 0.01 - time: 2023-01-01 01:00:00 - - instrument: - - ARGO_FLOAT - location: - latitude: 0.02 - longitude: 0.02 - time: 2023-01-01 02:00:00 - - instrument: - - XBT - location: - latitude: 0.03 - longitude: 0.03 - time: 2023-01-01 03:00:00 - - location: - latitude: 0.03 - longitude: 0.03 - time: 2023-01-01 03:00:00 diff --git a/src/virtualship/static/ship_config.yaml b/src/virtualship/static/ship_config.yaml deleted file mode 100644 index 34d6c6ea..00000000 --- a/src/virtualship/static/ship_config.yaml +++ /dev/null @@ -1,30 +0,0 @@ -ship_speed_knots: 10.0 -adcp_config: - num_bins: 40 - max_depth_meter: -1000.0 - period_minutes: 5.0 -argo_float_config: - cycle_days: 10.0 - drift_days: 9.0 - drift_depth_meter: -1000.0 - max_depth_meter: -2000.0 - min_depth_meter: 0.0 - vertical_speed_meter_per_second: -0.1 -ctd_config: - max_depth_meter: -2000.0 - min_depth_meter: -11.0 - stationkeeping_time_minutes: 20.0 -ctd_bgc_config: - max_depth_meter: -2000.0 - min_depth_meter: -11.0 - stationkeeping_time_minutes: 20.0 -drifter_config: - depth_meter: 0.0 - lifetime_minutes: 60480.0 -xbt_config: - max_depth_meter: -285.0 - min_depth_meter: -2.0 - fall_speed_meter_per_second: 6.7 - deceleration_coefficient: 0.00225 -ship_underwater_st_config: - period_minutes: 5.0 diff --git a/src/virtualship/utils.py b/src/virtualship/utils.py index 1f334f06..911d1d42 100644 --- a/src/virtualship/utils.py +++ b/src/virtualship/utils.py @@ -8,17 +8,15 @@ from pathlib import Path from typing import TYPE_CHECKING, TextIO -from yaspin import Spinner - if TYPE_CHECKING: - from virtualship.models import Schedule, ShipConfig + from virtualship.models import Expedition import pandas as pd import yaml from pydantic import BaseModel +from yaspin import Spinner -SCHEDULE = "schedule.yaml" -SHIP_CONFIG = "ship_config.yaml" +EXPEDITION = "expedition.yaml" CHECKPOINT = "checkpoint.yaml" @@ -28,15 +26,10 @@ def load_static_file(name: str) -> str: @lru_cache(None) -def get_example_config() -> str: - """Get the example configuration file.""" - return load_static_file(SHIP_CONFIG) - - @lru_cache(None) -def get_example_schedule() -> str: - """Get the example schedule file.""" - return load_static_file(SCHEDULE) +def get_example_expedition() -> str: + """Get the example unified expedition configuration file.""" + return load_static_file(EXPEDITION) def _dump_yaml(model: BaseModel, stream: TextIO) -> str | None: @@ -121,7 +114,7 @@ def validate_coordinates(coordinates_data): def mfp_to_yaml(coordinates_file_path: str, yaml_output_path: str): # noqa: D417 """ - Generates a YAML file with spatial and temporal information based on instrument data from MFP excel file. + Generates an expedition.yaml file with spatial and temporal information based on instrument data from MFP excel file. Ship config portion of the YAML file is sourced from static version. Parameters ---------- @@ -134,9 +127,12 @@ def mfp_to_yaml(coordinates_file_path: str, yaml_output_path: str): # noqa: D41 4. returns the yaml information. """ + # avoid circular imports from virtualship.models import ( + Expedition, Location, Schedule, + ShipConfig, SpaceTimeRegion, SpatialRange, TimeRange, @@ -188,8 +184,16 @@ def mfp_to_yaml(coordinates_file_path: str, yaml_output_path: str): # noqa: D41 space_time_region=space_time_region, ) + # extract ship config object from static + config = ShipConfig.model_validate( + yaml.safe_load(get_example_expedition()).get("ship_config") + ) + + # combine to Expedition object + expedition = Expedition(schedule=schedule, ship_config=config) + # Save to YAML file - schedule.to_yaml(yaml_output_path) + expedition.to_yaml(yaml_output_path) def _validate_numeric_mins_to_timedelta(value: int | float | timedelta) -> timedelta: @@ -199,26 +203,16 @@ def _validate_numeric_mins_to_timedelta(value: int | float | timedelta) -> timed return timedelta(minutes=value) -def _get_schedule(expedition_dir: Path) -> Schedule: - """Load Schedule object from yaml config file in `expedition_dir`.""" - from virtualship.models import Schedule - - file_path = expedition_dir.joinpath(SCHEDULE) - try: - return Schedule.from_yaml(file_path) - except FileNotFoundError as e: - raise FileNotFoundError(f'Schedule not found. Save it to "{file_path}".') from e - - -def _get_ship_config(expedition_dir: Path) -> ShipConfig: - from virtualship.models import ShipConfig +def _get_expedition(expedition_dir: Path) -> Expedition: + """Load Expedition object from yaml config file in `expedition_dir`.""" + from virtualship.models import Expedition - file_path = expedition_dir.joinpath(SHIP_CONFIG) + file_path = expedition_dir.joinpath(EXPEDITION) try: - return ShipConfig.from_yaml(file_path) + return Expedition.from_yaml(file_path) except FileNotFoundError as e: raise FileNotFoundError( - f'Ship config not found. Save it to "{file_path}".' + f'{EXPEDITION} not found. Save it to "{file_path}".' ) from e From d24f2db432d8551fa09b75579274bd98355d10ae Mon Sep 17 00:00:00 2001 From: j-atkins <106238905+j-atkins@users.noreply.github.com> Date: Thu, 16 Oct 2025 18:03:51 +0200 Subject: [PATCH 03/15] refactor fetch, plan and expedition functions to use unified expedition model --- src/virtualship/cli/_fetch.py | 28 +++++++-------- src/virtualship/cli/_plan.py | 38 +++++++++++++++------ src/virtualship/expedition/do_expedition.py | 30 +++++++++------- 3 files changed, 58 insertions(+), 38 deletions(-) diff --git a/src/virtualship/cli/_fetch.py b/src/virtualship/cli/_fetch.py index ac039d76..871503ee 100644 --- a/src/virtualship/cli/_fetch.py +++ b/src/virtualship/cli/_fetch.py @@ -12,8 +12,7 @@ from virtualship.utils import ( _dump_yaml, _generic_load_yaml, - _get_schedule, - _get_ship_config, + _get_expedition, ) if TYPE_CHECKING: @@ -24,7 +23,7 @@ from copernicusmarine.core_functions.credentials_utils import InvalidUsernameOrPassword import virtualship.cli._creds as creds -from virtualship.utils import SCHEDULE +from virtualship.utils import EXPEDITION DOWNLOAD_METADATA = "download_metadata.yaml" @@ -49,17 +48,18 @@ def _fetch(path: str | Path, username: str | None, password: str | None) -> None data_folder = path / "data" data_folder.mkdir(exist_ok=True) - schedule = _get_schedule(path) - ship_config = _get_ship_config(path) + expedition = _get_expedition(path) - schedule.verify( - ship_config.ship_speed_knots, + expedition.schedule.verify( + expedition.ship_config.ship_speed_knots, input_data=None, check_space_time_region=True, ignore_missing_fieldsets=True, ) - space_time_region_hash = get_space_time_region_hash(schedule.space_time_region) + space_time_region_hash = get_space_time_region_hash( + expedition.schedule.space_time_region + ) existing_download = get_existing_download(data_folder, space_time_region_hash) if existing_download is not None: @@ -72,11 +72,11 @@ def _fetch(path: str | Path, username: str | None, password: str | None) -> None username, password = creds.get_credentials_flow(username, password, creds_path) # Extract space_time_region details from the schedule - spatial_range = schedule.space_time_region.spatial_range - time_range = schedule.space_time_region.time_range + spatial_range = expedition.schedule.space_time_region.spatial_range + time_range = expedition.schedule.space_time_region.time_range start_datetime = time_range.start_time end_datetime = time_range.end_time - instruments_in_schedule = schedule.get_instruments() + instruments_in_schedule = expedition.schedule.get_instruments() # Create download folder and set download metadata download_folder = data_folder / hash_to_filename(space_time_region_hash) @@ -84,15 +84,15 @@ def _fetch(path: str | Path, username: str | None, password: str | None) -> None DownloadMetadata(download_complete=False).to_yaml( download_folder / DOWNLOAD_METADATA ) - shutil.copyfile(path / SCHEDULE, download_folder / SCHEDULE) + shutil.copyfile(path / EXPEDITION, download_folder / EXPEDITION) if ( ( {"XBT", "CTD", "CDT_BGC", "SHIP_UNDERWATER_ST"} & set(instrument.name for instrument in instruments_in_schedule) ) - or ship_config.ship_underwater_st_config is not None - or ship_config.adcp_config is not None + or expedition.ship_config.ship_underwater_st_config is not None + or expedition.ship_config.adcp_config is not None ): print("Ship data will be downloaded. Please wait...") diff --git a/src/virtualship/cli/_plan.py b/src/virtualship/cli/_plan.py index 85539e3f..9363bc71 100644 --- a/src/virtualship/cli/_plan.py +++ b/src/virtualship/cli/_plan.py @@ -30,9 +30,7 @@ type_to_textual, ) from virtualship.errors import UnexpectedError, UserError -from virtualship.models.location import Location -from virtualship.models.schedule import Schedule, Waypoint -from virtualship.models.ship_config import ( +from virtualship.models import ( ADCPConfig, ArgoFloatConfig, CTD_BGCConfig, @@ -41,13 +39,21 @@ InstrumentType, ShipConfig, ShipUnderwaterSTConfig, + Waypoint, XBTConfig, ) +from virtualship.models.expedition import Expedition +from virtualship.models.location import Location from virtualship.models.space_time_region import ( SpatialRange, TimeRange, ) +#! WORK IN PROGRESS +# TODO: this whole file can be more aggressively refactored if consolidating schedule and ship_config YAMLs! +# TODO: for example, ScheduleEditor and ConfigEditor classes could be consolidated? Expedition.yaml only needs to be read in (and written to) once. + + UNEXPECTED_MSG_ONSAVE = ( "Please ensure that:\n" "\n1) All typed entries are valid (all boxes in all sections must have green borders and no warnings).\n" @@ -296,9 +302,12 @@ def __init__(self, path: str): def compose(self) -> ComposeResult: try: - self.schedule = Schedule.from_yaml(f"{self.path}/schedule.yaml") + expedition = Expedition.from_yaml(f"{self.path}/expedition.yaml") + self.schedule = expedition.schedule except Exception as e: - raise UserError(f"There is an issue in schedule.yaml:\n\n{e}") from None + raise UserError( + f"There is an issue in expedition.yaml (schedule section):\n\n{e}" + ) from None try: yield Label("[b]Schedule Editor[/b]", id="title", markup=True) @@ -584,7 +593,7 @@ def remove_waypoint(self) -> None: raise UnexpectedError(unexpected_msg_compose(e)) from None def save_changes(self) -> bool: - """Save changes to schedule.yaml.""" + """Save changes to expedition.yaml (schedule section).""" try: ## spacetime region spatial_range = SpatialRange( @@ -652,7 +661,9 @@ def save_changes(self) -> bool: wp.instrument.append(instrument) # save - self.schedule.to_yaml(f"{self.path}/schedule.yaml") + expedition = Expedition.from_yaml(f"{self.path}/expedition.yaml") + expedition.schedule = self.schedule + expedition.to_yaml(f"{self.path}/expedition.yaml") return True except Exception as e: @@ -747,9 +758,12 @@ def __init__(self, path: str): def compose(self) -> ComposeResult: try: - self.config = ShipConfig.from_yaml(f"{self.path}/ship_config.yaml") + expedition = Expedition.from_yaml(f"{self.path}/expedition.yaml") + self.config = expedition.ship_config except Exception as e: - raise UserError(f"There is an issue in ship_config.yaml:\n\n{e}") from None + raise UserError( + f"There is an issue in expedition.yaml (ship_config section):\n\n{e}" + ) from None try: ## SECTION: "Ship Speed & Onboard Measurements" @@ -958,7 +972,7 @@ def shallow_changed(self, event: Switch.Changed) -> None: deep.value = False def save_changes(self) -> bool: - """Save changes to ship_config.yaml.""" + """Save changes to expedition.yaml (ship_config section).""" try: # ship speed attr = "ship_speed_knots" @@ -1010,7 +1024,9 @@ def save_changes(self) -> bool: setattr(self.config, instrument_name, config_class(**kwargs)) # save - self.config.to_yaml(f"{self.path}/ship_config.yaml") + expedition = Expedition.from_yaml(f"{self.path}/expedition.yaml") + expedition.ship_config = self.config + expedition.to_yaml(f"{self.path}/expedition.yaml") return True except Exception as e: diff --git a/src/virtualship/expedition/do_expedition.py b/src/virtualship/expedition/do_expedition.py index e4e7dc08..55820486 100644 --- a/src/virtualship/expedition/do_expedition.py +++ b/src/virtualship/expedition/do_expedition.py @@ -37,12 +37,10 @@ def do_expedition(expedition_dir: str | Path, input_data: Path | None = None) -> if isinstance(expedition_dir, str): expedition_dir = Path(expedition_dir) - expedition_config = _get_expedition(expedition_dir) - ship_config = expedition_config.ship_config - schedule = expedition_config.schedule + expedition = _get_expedition(expedition_dir) # Verify ship_config file is consistent with schedule - ship_config.verify(schedule) + expedition.ship_config.verify(expedition.schedule) # load last checkpoint checkpoint = _load_checkpoint(expedition_dir) @@ -50,24 +48,28 @@ def do_expedition(expedition_dir: str | Path, input_data: Path | None = None) -> checkpoint = Checkpoint(past_schedule=Schedule(waypoints=[])) # verify that schedule and checkpoint match - checkpoint.verify(schedule) + checkpoint.verify(expedition.schedule) # load fieldsets loaded_input_data = _load_input_data( expedition_dir=expedition_dir, - schedule=schedule, - ship_config=ship_config, + schedule=expedition.schedule, + ship_config=expedition.ship_config, input_data=input_data, ) print("\n---- WAYPOINT VERIFICATION ----") # verify schedule is valid - schedule.verify(ship_config.ship_speed_knots, loaded_input_data) + expedition.schedule.verify( + expedition.ship_config.ship_speed_knots, loaded_input_data + ) # simulate the schedule schedule_results = simulate_schedule( - projection=projection, ship_config=ship_config, schedule=schedule + projection=projection, + ship_config=expedition.ship_config, + schedule=expedition.schedule, ) if isinstance(schedule_results, ScheduleProblem): print( @@ -76,7 +78,9 @@ def do_expedition(expedition_dir: str | Path, input_data: Path | None = None) -> _save_checkpoint( Checkpoint( past_schedule=Schedule( - waypoints=schedule.waypoints[: schedule_results.failed_waypoint_i] + waypoints=expedition.schedule.waypoints[ + : schedule_results.failed_waypoint_i + ] ) ), expedition_dir, @@ -91,10 +95,10 @@ def do_expedition(expedition_dir: str | Path, input_data: Path | None = None) -> print("\n----- EXPEDITION SUMMARY ------") # calculate expedition cost in US$ - assert schedule.waypoints[0].time is not None, ( + assert expedition.schedule.waypoints[0].time is not None, ( "First waypoint has no time. This should not be possible as it should have been verified before." ) - time_past = schedule_results.time - schedule.waypoints[0].time + time_past = schedule_results.time - expedition.schedule.waypoints[0].time cost = expedition_cost(schedule_results, time_past) with open(expedition_dir.joinpath("results", "cost.txt"), "w") as file: file.writelines(f"cost: {cost} US$") @@ -106,7 +110,7 @@ def do_expedition(expedition_dir: str | Path, input_data: Path | None = None) -> print("\nSimulating measurements. This may take a while...\n") simulate_measurements( expedition_dir, - ship_config, + expedition.ship_config, loaded_input_data, schedule_results.measurements_to_simulate, ) From 6d95a793b439ceeba2070e131addea50c9d338a6 Mon Sep 17 00:00:00 2001 From: j-atkins <106238905+j-atkins@users.noreply.github.com> Date: Thu, 16 Oct 2025 18:08:02 +0200 Subject: [PATCH 04/15] reorder ship_config and schedule sections --- src/virtualship/static/expedition.yaml | 62 +++++++++++++------------- 1 file changed, 31 insertions(+), 31 deletions(-) diff --git a/src/virtualship/static/expedition.yaml b/src/virtualship/static/expedition.yaml index ec4ec7c5..a7328510 100644 --- a/src/virtualship/static/expedition.yaml +++ b/src/virtualship/static/expedition.yaml @@ -1,34 +1,3 @@ -ship_config: - ship_speed_knots: 10.0 - adcp_config: - num_bins: 40 - max_depth_meter: -1000.0 - period_minutes: 5.0 - argo_float_config: - cycle_days: 10.0 - drift_days: 9.0 - drift_depth_meter: -1000.0 - max_depth_meter: -2000.0 - min_depth_meter: 0.0 - vertical_speed_meter_per_second: -0.1 - ctd_config: - max_depth_meter: -2000.0 - min_depth_meter: -11.0 - stationkeeping_time_minutes: 20.0 - ctd_bgc_config: - max_depth_meter: -2000.0 - min_depth_meter: -11.0 - stationkeeping_time_minutes: 20.0 - drifter_config: - depth_meter: 0.0 - lifetime_minutes: 60480.0 - xbt_config: - max_depth_meter: -285.0 - min_depth_meter: -2.0 - fall_speed_meter_per_second: 6.7 - deceleration_coefficient: 0.00225 - ship_underwater_st_config: - period_minutes: 5.0 schedule: space_time_region: spatial_range: @@ -72,3 +41,34 @@ schedule: latitude: 0.03 longitude: 0.03 time: 2023-01-01 03:00:00 +ship_config: + ship_speed_knots: 10.0 + adcp_config: + num_bins: 40 + max_depth_meter: -1000.0 + period_minutes: 5.0 + argo_float_config: + cycle_days: 10.0 + drift_days: 9.0 + drift_depth_meter: -1000.0 + max_depth_meter: -2000.0 + min_depth_meter: 0.0 + vertical_speed_meter_per_second: -0.1 + ctd_config: + max_depth_meter: -2000.0 + min_depth_meter: -11.0 + stationkeeping_time_minutes: 20.0 + ctd_bgc_config: + max_depth_meter: -2000.0 + min_depth_meter: -11.0 + stationkeeping_time_minutes: 20.0 + drifter_config: + depth_meter: 0.0 + lifetime_minutes: 60480.0 + xbt_config: + max_depth_meter: -285.0 + min_depth_meter: -2.0 + fall_speed_meter_per_second: 6.7 + deceleration_coefficient: 0.00225 + ship_underwater_st_config: + period_minutes: 5.0 From 62e355ca24fe0753130246da872084199c15670c Mon Sep 17 00:00:00 2001 From: j-atkins <106238905+j-atkins@users.noreply.github.com> Date: Fri, 17 Oct 2025 14:03:41 +0200 Subject: [PATCH 05/15] refactor `plan` UI to interact with unified expedition.yaml file --- src/virtualship/cli/_plan.py | 1253 ++++++++++++++++------------------ 1 file changed, 592 insertions(+), 661 deletions(-) diff --git a/src/virtualship/cli/_plan.py b/src/virtualship/cli/_plan.py index 9363bc71..7c3e6646 100644 --- a/src/virtualship/cli/_plan.py +++ b/src/virtualship/cli/_plan.py @@ -1,7 +1,6 @@ import datetime import os import traceback -from typing import ClassVar from textual import on from textual.app import App, ComposeResult @@ -36,23 +35,17 @@ CTD_BGCConfig, CTDConfig, DrifterConfig, + Expedition, InstrumentType, + Location, ShipConfig, ShipUnderwaterSTConfig, - Waypoint, - XBTConfig, -) -from virtualship.models.expedition import Expedition -from virtualship.models.location import Location -from virtualship.models.space_time_region import ( SpatialRange, TimeRange, + Waypoint, + XBTConfig, ) - -#! WORK IN PROGRESS -# TODO: this whole file can be more aggressively refactored if consolidating schedule and ship_config YAMLs! -# TODO: for example, ScheduleEditor and ConfigEditor classes could be consolidated? Expedition.yaml only needs to be read in (and written to) once. - +from virtualship.utils import EXPEDITION UNEXPECTED_MSG_ONSAVE = ( "Please ensure that:\n" @@ -87,230 +80,231 @@ def log_exception_to_file( f.write("\n") -class WaypointWidget(Static): - def __init__(self, waypoint: Waypoint, index: int): +DEFAULT_TS_CONFIG = {"period_minutes": 5.0} + +DEFAULT_ADCP_CONFIG = { + "num_bins": 40, + "period_minutes": 5.0, +} + +INSTRUMENT_FIELDS = { + "adcp_config": { + "class": ADCPConfig, + "title": "Onboard ADCP", + "attributes": [ + {"name": "num_bins"}, + {"name": "period", "minutes": True}, + ], + }, + "ship_underwater_st_config": { + "class": ShipUnderwaterSTConfig, + "title": "Onboard Temperature/Salinity", + "attributes": [ + {"name": "period", "minutes": True}, + ], + }, + "ctd_config": { + "class": CTDConfig, + "title": "CTD", + "attributes": [ + {"name": "max_depth_meter"}, + {"name": "min_depth_meter"}, + {"name": "stationkeeping_time", "minutes": True}, + ], + }, + "ctd_bgc_config": { + "class": CTD_BGCConfig, + "title": "CTD-BGC", + "attributes": [ + {"name": "max_depth_meter"}, + {"name": "min_depth_meter"}, + {"name": "stationkeeping_time", "minutes": True}, + ], + }, + "xbt_config": { + "class": XBTConfig, + "title": "XBT", + "attributes": [ + {"name": "min_depth_meter"}, + {"name": "max_depth_meter"}, + {"name": "fall_speed_meter_per_second"}, + {"name": "deceleration_coefficient"}, + ], + }, + "argo_float_config": { + "class": ArgoFloatConfig, + "title": "Argo Float", + "attributes": [ + {"name": "min_depth_meter"}, + {"name": "max_depth_meter"}, + {"name": "drift_depth_meter"}, + {"name": "vertical_speed_meter_per_second"}, + {"name": "cycle_days"}, + {"name": "drift_days"}, + ], + }, + "drifter_config": { + "class": DrifterConfig, + "title": "Drifter", + "attributes": [ + {"name": "depth_meter"}, + {"name": "lifetime", "minutes": True}, + ], + }, +} + + +class ExpeditionEditor(Static): + def __init__(self, path: str): super().__init__() - self.waypoint = waypoint - self.index = index + self.path = path + self.expedition = None def compose(self) -> ComposeResult: try: - with Collapsible( - title=f"[b]Waypoint {self.index + 1}[/b]", - collapsed=True, - id=f"wp{self.index + 1}", - ): - if self.index > 0: - yield Button( - "Copy Time & Instruments from Previous", - id=f"wp{self.index}_copy", - variant="warning", - ) - yield Label("Location:") - yield Label(" Latitude:") - yield Input( - id=f"wp{self.index}_lat", - value=str(self.waypoint.location.lat) - if self.waypoint.location.lat - is not None # is not None to handle if lat is 0.0 - else "", - validators=[ - Function( - is_valid_lat, - f"INVALID: value must be {is_valid_lat.__doc__.lower()}", - ) - ], - type="number", - placeholder="°N", - classes="latitude-input", - ) - yield Label( - "", - id=f"validation-failure-label-wp{self.index}_lat", - classes="-hidden validation-failure", - ) + self.expedition = Expedition.from_yaml(self.path.joinpath(EXPEDITION)) + except Exception as e: + raise UserError( + f"There is an issue in {self.path.joinpath(EXPEDITION)}:\n\n{e}" + ) from None - yield Label(" Longitude:") - yield Input( - id=f"wp{self.index}_lon", - value=str(self.waypoint.location.lon) - if self.waypoint.location.lon - is not None # is not None to handle if lon is 0.0 - else "", - validators=[ - Function( - is_valid_lon, - f"INVALID: value must be {is_valid_lon.__doc__.lower()}", - ) - ], - type="number", - placeholder="°E", - classes="longitude-input", - ) - yield Label( - "", - id=f"validation-failure-label-wp{self.index}_lon", - classes="-hidden validation-failure", - ) + try: + ## 1) SHIP CONFIG EDITOR - yield Label("Time:") - with Horizontal(): - yield Label("Year:") - yield Select( - [ - (str(year), year) - # TODO: change from hard coding? ...flexibility for different datasets... - for year in range( - 2022, - datetime.datetime.now().year + 1, - ) - ], - id=f"wp{self.index}_year", - value=int(self.waypoint.time.year) - if self.waypoint.time - else Select.BLANK, - prompt="YYYY", - classes="year-select", - ) - yield Label("Month:") - yield Select( - [(f"{m:02d}", m) for m in range(1, 13)], - id=f"wp{self.index}_month", - value=int(self.waypoint.time.month) - if self.waypoint.time - else Select.BLANK, - prompt="MM", - classes="month-select", - ) - yield Label("Day:") - yield Select( - [(f"{d:02d}", d) for d in range(1, 32)], - id=f"wp{self.index}_day", - value=int(self.waypoint.time.day) - if self.waypoint.time - else Select.BLANK, - prompt="DD", - classes="day-select", - ) - yield Label("Hour:") - yield Select( - [(f"{h:02d}", h) for h in range(24)], - id=f"wp{self.index}_hour", - value=int(self.waypoint.time.hour) - if self.waypoint.time - else Select.BLANK, - prompt="hh", - classes="hour-select", - ) - yield Label("Min:") - yield Select( - [(f"{m:02d}", m) for m in range(0, 60, 5)], - id=f"wp{self.index}_minute", - value=int(self.waypoint.time.minute) - if self.waypoint.time - else Select.BLANK, - prompt="mm", - classes="minute-select", - ) + yield Label( + "[b]Ship Config Editor[/b]", id="title_ship_config", markup=True + ) + yield Rule(line_style="heavy") - yield Label("Instruments:") - for instrument in InstrumentType: - is_selected = instrument in (self.waypoint.instrument or []) - with Horizontal(): - yield Label(instrument.value) - yield Switch( - value=is_selected, id=f"wp{self.index}_{instrument.value}" - ) + # SECTION: "Ship Speed & Onboard Measurements" - if instrument.value == "DRIFTER": - yield Label("Count") - yield Input( - id=f"wp{self.index}_drifter_count", - value=str( - self.get_drifter_count() if is_selected else "" - ), - type="integer", - placeholder="# of drifters", - validators=Integer( - minimum=1, - failure_description="INVALID: value must be > 0", - ), - classes="drifter-count-input", - ) - yield Label( - "", - id=f"validation-failure-label-wp{self.index}_drifter_count", - classes="-hidden validation-failure", + with Collapsible( + title="[b]Ship Speed & Onboard Measurements[/b]", id="speed_collapsible" + ): + attr = "ship_speed_knots" + validators = group_validators(ShipConfig, attr) + with Horizontal(classes="ship_speed"): + yield Label("[b]Ship Speed (knots):[/b]") + yield Input( + id="speed", + type=type_to_textual(get_field_type(ShipConfig, attr)), + validators=[ + Function( + validator, + f"INVALID: value must be {validator.__doc__.lower()}", ) + for validator in validators + ], + classes="ship_speed_input", + placeholder="knots", + value=str( + self.expedition.ship_config.ship_speed_knots + if self.expedition.ship_config.ship_speed_knots + else "" + ), + ) + yield Label("", id="validation-failure-label-speed", classes="-hidden") - except Exception as e: - raise UnexpectedError(unexpected_msg_compose(e)) from None - - def get_drifter_count(self) -> int: - return sum( - 1 for inst in self.waypoint.instrument if inst == InstrumentType.DRIFTER - ) - - def copy_from_previous(self) -> None: - """Copy inputs from previous waypoint widget (time and instruments only, not lat/lon).""" - try: - if self.index > 0: - schedule_editor = self.parent - if schedule_editor: - time_components = ["year", "month", "day", "hour", "minute"] - for comp in time_components: - prev = schedule_editor.query_one(f"#wp{self.index - 1}_{comp}") - curr = self.query_one(f"#wp{self.index}_{comp}") - if prev and curr: - curr.value = prev.value - - for instrument in InstrumentType: - prev_switch = schedule_editor.query_one( - f"#wp{self.index - 1}_{instrument.value}" - ) - curr_switch = self.query_one( - f"#wp{self.index}_{instrument.value}" - ) - if prev_switch and curr_switch: - curr_switch.value = prev_switch.value - except Exception as e: - raise UnexpectedError(unexpected_msg_compose(e)) from None + with Horizontal(classes="ts-section"): + yield Label("[b]Onboard Temperature/Salinity:[/b]") + yield Switch( + value=bool( + self.expedition.ship_config.ship_underwater_st_config + ), + id="has_onboard_ts", + ) - @on(Button.Pressed, "Button") - def button_pressed(self, event: Button.Pressed) -> None: - if event.button.id == f"wp{self.index}_copy": - self.copy_from_previous() + with Horizontal(classes="adcp-section"): + yield Label("[b]Onboard ADCP:[/b]") + yield Switch( + value=bool(self.expedition.ship_config.adcp_config), + id="has_adcp", + ) - @on(Switch.Changed) - def on_switch_changed(self, event: Switch.Changed) -> None: - if event.switch.id == f"wp{self.index}_DRIFTER": - drifter_count_input = self.query_one( - f"#wp{self.index}_drifter_count", Input - ) - if not event.value: - drifter_count_input.value = "" - else: - if not drifter_count_input.value: - drifter_count_input.value = "1" + # adcp type selection + with Horizontal(id="adcp_type_container", classes="-hidden"): + is_deep = ( + self.expedition.ship_config.adcp_config + and self.expedition.ship_config.adcp_config.max_depth_meter + == -1000.0 + ) + yield Label(" OceanObserver:") + yield Switch(value=is_deep, id="adcp_deep") + yield Label(" SeaSeven:") + yield Switch(value=not is_deep, id="adcp_shallow") + yield Button("?", id="info_button", variant="warning") + ## SECTION: "Instrument Configurations"" -class ScheduleEditor(Static): - def __init__(self, path: str): - super().__init__() - self.path = path - self.schedule = None + with Collapsible( + title="[b]Instrument Configurations[/b] (advanced users only)", + collapsed=True, + ): + for instrument_name, info in INSTRUMENT_FIELDS.items(): + config_class = info["class"] + attributes = info["attributes"] + config_instance = getattr( + self.expedition.ship_config, instrument_name, None + ) + title = info.get("title", instrument_name.replace("_", " ").title()) + with Collapsible( + title=f"[b]{title}[/b]", + collapsed=True, + ): + if instrument_name in ( + "adcp_config", + "ship_underwater_st_config", + ): + yield Label( + f"NOTE: entries will be ignored here if {info['title']} is OFF in Ship Speed & Onboard Measurements." + ) + with Container(classes="instrument-config"): + for attr_meta in attributes: + attr = attr_meta["name"] + is_minutes = attr_meta.get("minutes", False) + validators = group_validators(config_class, attr) + if config_instance: + raw_value = getattr(config_instance, attr, "") + if is_minutes and raw_value != "": + try: + value = str( + raw_value.total_seconds() / 60.0 + ) + except AttributeError: + value = str(raw_value) + else: + value = str(raw_value) + else: + value = "" + label = f"{attr.replace('_', ' ').title()}:" + yield Label( + label + if not is_minutes + else label.replace(":", " Minutes:") + ) + yield Input( + id=f"{instrument_name}_{attr}", + type=type_to_textual( + get_field_type(config_class, attr) + ), + validators=[ + Function( + validator, + f"INVALID: value must be {validator.__doc__.lower()}", + ) + for validator in validators + ], + value=value, + ) + yield Label( + "", + id=f"validation-failure-label-{instrument_name}_{attr}", + classes="-hidden validation-failure", + ) - def compose(self) -> ComposeResult: - try: - expedition = Expedition.from_yaml(f"{self.path}/expedition.yaml") - self.schedule = expedition.schedule - except Exception as e: - raise UserError( - f"There is an issue in expedition.yaml (schedule section):\n\n{e}" - ) from None + ## 2) SCHEDULE EDITOR - try: - yield Label("[b]Schedule Editor[/b]", id="title", markup=True) + yield Label("[b]Schedule Editor[/b]", id="title_schedule", markup=True) yield Rule(line_style="heavy") # SECTION: "Waypoints & Instrument Selection" @@ -336,8 +330,8 @@ def compose(self) -> ComposeResult: title="[b]Space-Time Region[/b] (advanced users only)", collapsed=True, ): - if self.schedule.space_time_region: - str_data = self.schedule.space_time_region + if self.expedition.schedule.space_time_region: + str_data = self.expedition.schedule.space_time_region yield Label("Minimum Latitude:") yield Input( @@ -510,13 +504,135 @@ def compose(self) -> ComposeResult: def on_mount(self) -> None: self.refresh_waypoint_widgets() + adcp_present = ( + getattr(self.expedition.ship_config, "adcp_config", None) + if self.expedition.ship_config + else False + ) + self.show_hide_adcp_type(bool(adcp_present)) def refresh_waypoint_widgets(self): waypoint_list = self.query_one("#waypoint_list", VerticalScroll) waypoint_list.remove_children() - for i, waypoint in enumerate(self.schedule.waypoints): + for i, waypoint in enumerate(self.expedition.schedule.waypoints): waypoint_list.mount(WaypointWidget(waypoint, i)) + def save_changes(self) -> bool: + """Save changes to expedition.yaml.""" + try: + self._update_ship_config() + self._update_instrument_configs() + self._update_schedule() + self.expedition.to_yaml(self.path.joinpath(EXPEDITION)) + return True + except Exception as e: + log_exception_to_file( + e, + self.path, + context_message=f"Error saving {self.path.joinpath(EXPEDITION)}:", + ) + raise UnexpectedError( + UNEXPECTED_MSG_ONSAVE + + f"\n\nTraceback will be logged in {self.path}/virtualship_error.txt. Please attach this/copy the contents to any issue submitted." + ) from None + + def _update_ship_config(self): + attr = "ship_speed_knots" + field_type = get_field_type(type(self.expedition.ship_config), attr) + value = field_type(self.query_one("#speed").value) + ShipConfig.model_validate( + {**self.expedition.ship_config.model_dump(), attr: value} + ) + self.expedition.ship_config.ship_speed_knots = value + + def _update_instrument_configs(self): + for instrument_name, info in INSTRUMENT_FIELDS.items(): + config_class = info["class"] + attributes = info["attributes"] + kwargs = {} + # special handling for onboard ADCP and T/S + if instrument_name == "adcp_config": + has_adcp = self.query_one("#has_adcp", Switch).value + if not has_adcp: + setattr(self.expedition.ship_config, instrument_name, None) + continue + if instrument_name == "ship_underwater_st_config": + has_ts = self.query_one("#has_onboard_ts", Switch).value + if not has_ts: + setattr(self.expedition.ship_config, instrument_name, None) + continue + for attr_meta in attributes: + attr = attr_meta["name"] + is_minutes = attr_meta.get("minutes", False) + input_id = f"{instrument_name}_{attr}" + value = self.query_one(f"#{input_id}").value + field_type = get_field_type(config_class, attr) + if is_minutes and field_type is datetime.timedelta: + value = datetime.timedelta(minutes=float(value)) + else: + value = field_type(value) + kwargs[attr] = value + # ADCP max_depth_meter based on deep/shallow switch + if instrument_name == "adcp_config": + if self.query_one("#adcp_deep", Switch).value: + kwargs["max_depth_meter"] = -1000.0 + else: + kwargs["max_depth_meter"] = -150.0 + setattr( + self.expedition.ship_config, instrument_name, config_class(**kwargs) + ) + + def _update_schedule(self): + spatial_range = SpatialRange( + minimum_longitude=self.query_one("#min_lon").value, + maximum_longitude=self.query_one("#max_lon").value, + minimum_latitude=self.query_one("#min_lat").value, + maximum_latitude=self.query_one("#max_lat").value, + minimum_depth=self.query_one("#min_depth").value, + maximum_depth=self.query_one("#max_depth").value, + ) + start_time_input = self.query_one("#start_time").value + end_time_input = self.query_one("#end_time").value + waypoint_times = [ + wp.time + for wp in self.expedition.schedule.waypoints + if hasattr(wp, "time") and wp.time + ] + if not start_time_input and waypoint_times: + start_time = min(waypoint_times) + else: + start_time = start_time_input + if not end_time_input and waypoint_times: + end_time = max(waypoint_times) + datetime.timedelta(minutes=60480.0) + else: + end_time = end_time_input + time_range = TimeRange(start_time=start_time, end_time=end_time) + self.expedition.schedule.space_time_region.spatial_range = spatial_range + self.expedition.schedule.space_time_region.time_range = time_range + for i, wp in enumerate(self.expedition.schedule.waypoints): + wp.location = Location( + latitude=float(self.query_one(f"#wp{i}_lat").value), + longitude=float(self.query_one(f"#wp{i}_lon").value), + ) + wp.time = datetime.datetime( + int(self.query_one(f"#wp{i}_year").value), + int(self.query_one(f"#wp{i}_month").value), + int(self.query_one(f"#wp{i}_day").value), + int(self.query_one(f"#wp{i}_hour").value), + int(self.query_one(f"#wp{i}_minute").value), + 0, + ) + wp.instrument = [] + for instrument in InstrumentType: + switch_on = self.query_one(f"#wp{i}_{instrument.value}").value + if instrument.value == "DRIFTER" and switch_on: + count_str = self.query_one(f"#wp{i}_drifter_count").value + count = int(count_str) + assert count > 0 + wp.instrument.extend([InstrumentType.DRIFTER] * count) + elif switch_on: + wp.instrument.append(instrument) + @on(Input.Changed) def show_invalid_reasons(self, event: Input.Changed) -> None: input_id = event.input.id @@ -556,8 +672,8 @@ def show_invalid_reasons(self, event: Input.Changed) -> None: def add_waypoint(self) -> None: """Add a new waypoint to the schedule. Copies time from last waypoint if possible (Lat/lon and instruments blank).""" try: - if self.schedule.waypoints: - last_wp = self.schedule.waypoints[-1] + if self.expedition.schedule.waypoints: + last_wp = self.expedition.schedule.waypoints[-1] new_time = last_wp.time if last_wp.time else None new_wp = Waypoint( location=Location( @@ -567,325 +683,27 @@ def add_waypoint(self) -> None: time=new_time, instrument=[], ) - else: - new_wp = Waypoint( - location=Location(latitude=0.0, longitude=0.0), - time=None, - instrument=[], - ) - self.schedule.waypoints.append(new_wp) - self.refresh_waypoint_widgets() - - except Exception as e: - raise UnexpectedError(unexpected_msg_compose(e)) from None - - @on(Button.Pressed, "#remove_waypoint") - def remove_waypoint(self) -> None: - """Remove the last waypoint from the schedule.""" - try: - if self.schedule.waypoints: - self.schedule.waypoints.pop() - self.refresh_waypoint_widgets() - else: - self.notify("No waypoints to remove.", severity="error", timeout=5) - - except Exception as e: - raise UnexpectedError(unexpected_msg_compose(e)) from None - - def save_changes(self) -> bool: - """Save changes to expedition.yaml (schedule section).""" - try: - ## spacetime region - spatial_range = SpatialRange( - minimum_longitude=self.query_one("#min_lon").value, - maximum_longitude=self.query_one("#max_lon").value, - minimum_latitude=self.query_one("#min_lat").value, - maximum_latitude=self.query_one("#max_lat").value, - minimum_depth=self.query_one("#min_depth").value, - maximum_depth=self.query_one("#max_depth").value, - ) - - # auto fill start and end times if input is blank - start_time_input = self.query_one("#start_time").value - end_time_input = self.query_one("#end_time").value - waypoint_times = [ - wp.time - for wp in self.schedule.waypoints - if hasattr(wp, "time") and wp.time - ] - - if not start_time_input and waypoint_times: - start_time = min(waypoint_times) - else: - start_time = start_time_input - - if not end_time_input and waypoint_times: - end_time = max(waypoint_times) + datetime.timedelta( - minutes=60480.0 - ) # with buffer (corresponds to default drifter lifetime) - else: - end_time = end_time_input - - time_range = TimeRange( - start_time=start_time, - end_time=end_time, - ) - - self.schedule.space_time_region.spatial_range = spatial_range - self.schedule.space_time_region.time_range = time_range - - ## waypoints - for i, wp in enumerate(self.schedule.waypoints): - wp.location = Location( - latitude=float(self.query_one(f"#wp{i}_lat").value), - longitude=float(self.query_one(f"#wp{i}_lon").value), - ) - wp.time = datetime.datetime( - int(self.query_one(f"#wp{i}_year").value), - int(self.query_one(f"#wp{i}_month").value), - int(self.query_one(f"#wp{i}_day").value), - int(self.query_one(f"#wp{i}_hour").value), - int(self.query_one(f"#wp{i}_minute").value), - 0, - ) - - wp.instrument = [] - for instrument in InstrumentType: - switch_on = self.query_one(f"#wp{i}_{instrument.value}").value - if instrument.value == "DRIFTER" and switch_on: - count_str = self.query_one(f"#wp{i}_drifter_count").value - count = int(count_str) - assert count > 0 - wp.instrument.extend([InstrumentType.DRIFTER] * count) - elif switch_on: - wp.instrument.append(instrument) - - # save - expedition = Expedition.from_yaml(f"{self.path}/expedition.yaml") - expedition.schedule = self.schedule - expedition.to_yaml(f"{self.path}/expedition.yaml") - return True - - except Exception as e: - log_exception_to_file( - e, self.path, context_message="Error saving schedule:" - ) - - raise UnexpectedError( - UNEXPECTED_MSG_ONSAVE - + f"\n\nTraceback will be logged in {self.path}/virtualship_error.txt. Please attach this/copy the contents to any issue submitted." - ) from None - - -class ConfigEditor(Container): - DEFAULT_ADCP_CONFIG: ClassVar[dict[str, float]] = { - "num_bins": 40, - "period_minutes": 5.0, - } - - DEFAULT_TS_CONFIG: ClassVar[dict[str, float]] = {"period_minutes": 5.0} - - INSTRUMENT_FIELDS: ClassVar[dict[str, dict]] = { - "adcp_config": { - "class": ADCPConfig, - "title": "Onboard ADCP", - "attributes": [ - {"name": "num_bins"}, - {"name": "period", "minutes": True}, - ], - }, - "ship_underwater_st_config": { - "class": ShipUnderwaterSTConfig, - "title": "Onboard Temperature/Salinity", - "attributes": [ - {"name": "period", "minutes": True}, - ], - }, - "ctd_config": { - "class": CTDConfig, - "title": "CTD", - "attributes": [ - {"name": "max_depth_meter"}, - {"name": "min_depth_meter"}, - {"name": "stationkeeping_time", "minutes": True}, - ], - }, - "ctd_bgc_config": { - "class": CTD_BGCConfig, - "title": "CTD-BGC", - "attributes": [ - {"name": "max_depth_meter"}, - {"name": "min_depth_meter"}, - {"name": "stationkeeping_time", "minutes": True}, - ], - }, - "xbt_config": { - "class": XBTConfig, - "title": "XBT", - "attributes": [ - {"name": "min_depth_meter"}, - {"name": "max_depth_meter"}, - {"name": "fall_speed_meter_per_second"}, - {"name": "deceleration_coefficient"}, - ], - }, - "argo_float_config": { - "class": ArgoFloatConfig, - "title": "Argo Float", - "attributes": [ - {"name": "min_depth_meter"}, - {"name": "max_depth_meter"}, - {"name": "drift_depth_meter"}, - {"name": "vertical_speed_meter_per_second"}, - {"name": "cycle_days"}, - {"name": "drift_days"}, - ], - }, - "drifter_config": { - "class": DrifterConfig, - "title": "Drifter", - "attributes": [ - {"name": "depth_meter"}, - {"name": "lifetime", "minutes": True}, - ], - }, - } - - def __init__(self, path: str): - super().__init__() - self.path = path - self.config = None - - def compose(self) -> ComposeResult: - try: - expedition = Expedition.from_yaml(f"{self.path}/expedition.yaml") - self.config = expedition.ship_config - except Exception as e: - raise UserError( - f"There is an issue in expedition.yaml (ship_config section):\n\n{e}" - ) from None - - try: - ## SECTION: "Ship Speed & Onboard Measurements" - - yield Label("[b]Ship Config Editor[/b]", id="title", markup=True) - yield Rule(line_style="heavy") - - with Collapsible( - title="[b]Ship Speed & Onboard Measurements[/b]", id="speed_collapsible" - ): - attr = "ship_speed_knots" - validators = group_validators(ShipConfig, attr) - with Horizontal(classes="ship_speed"): - yield Label("[b]Ship Speed (knots):[/b]") - yield Input( - id="speed", - type=type_to_textual(get_field_type(ShipConfig, attr)), - validators=[ - Function( - validator, - f"INVALID: value must be {validator.__doc__.lower()}", - ) - for validator in validators - ], - classes="ship_speed_input", - placeholder="knots", - value=str( - self.config.ship_speed_knots - if self.config.ship_speed_knots - else "" - ), - ) - yield Label("", id="validation-failure-label-speed", classes="-hidden") - - with Horizontal(classes="ts-section"): - yield Label("[b]Onboard Temperature/Salinity:[/b]") - yield Switch( - value=bool(self.config.ship_underwater_st_config), - id="has_onboard_ts", - ) - - with Horizontal(classes="adcp-section"): - yield Label("[b]Onboard ADCP:[/b]") - yield Switch(value=bool(self.config.adcp_config), id="has_adcp") - - # adcp type selection - with Horizontal(id="adcp_type_container", classes="-hidden"): - is_deep = ( - self.config.adcp_config - and self.config.adcp_config.max_depth_meter == -1000.0 - ) - yield Label(" OceanObserver:") - yield Switch(value=is_deep, id="adcp_deep") - yield Label(" SeaSeven:") - yield Switch(value=not is_deep, id="adcp_shallow") - yield Button("?", id="info_button", variant="warning") - - ## SECTION: "Instrument Configurations"" - - with Collapsible( - title="[b]Instrument Configurations[/b] (advanced users only)", - collapsed=True, - ): - for instrument_name, info in self.INSTRUMENT_FIELDS.items(): - config_class = info["class"] - attributes = info["attributes"] - config_instance = getattr(self.config, instrument_name, None) - title = info.get("title", instrument_name.replace("_", " ").title()) - with Collapsible( - title=f"[b]{title}[/b]", - collapsed=True, - ): - if instrument_name in ( - "adcp_config", - "ship_underwater_st_config", - ): - yield Label( - f"NOTE: entries will be ignored here if {info['title']} is OFF in Ship Speed & Onboard Measurements." - ) - with Container(classes="instrument-config"): - for attr_meta in attributes: - attr = attr_meta["name"] - is_minutes = attr_meta.get("minutes", False) - validators = group_validators(config_class, attr) - if config_instance: - raw_value = getattr(config_instance, attr, "") - if is_minutes and raw_value != "": - try: - value = str( - raw_value.total_seconds() / 60.0 - ) - except AttributeError: - value = str(raw_value) - else: - value = str(raw_value) - else: - value = "" - label = f"{attr.replace('_', ' ').title()}:" - yield Label( - label - if not is_minutes - else label.replace(":", " Minutes:") - ) - yield Input( - id=f"{instrument_name}_{attr}", - type=type_to_textual( - get_field_type(config_class, attr) - ), - validators=[ - Function( - validator, - f"INVALID: value must be {validator.__doc__.lower()}", - ) - for validator in validators - ], - value=value, - ) - yield Label( - "", - id=f"validation-failure-label-{instrument_name}_{attr}", - classes="-hidden validation-failure", - ) + else: + new_wp = Waypoint( + location=Location(latitude=0.0, longitude=0.0), + time=None, + instrument=[], + ) + self.expedition.schedule.waypoints.append(new_wp) + self.refresh_waypoint_widgets() + + except Exception as e: + raise UnexpectedError(unexpected_msg_compose(e)) from None + + @on(Button.Pressed, "#remove_waypoint") + def remove_waypoint(self) -> None: + """Remove the last waypoint from the schedule.""" + try: + if self.expedition.schedule.waypoints: + self.expedition.schedule.waypoints.pop() + self.refresh_waypoint_widgets() + else: + self.notify("No waypoints to remove.", severity="error", timeout=5) except Exception as e: raise UnexpectedError(unexpected_msg_compose(e)) from None @@ -899,31 +717,6 @@ def info_pressed(self) -> None: timeout=20, ) - @on(Input.Changed) - def show_invalid_reasons(self, event: Input.Changed) -> None: - input_id = event.input.id - label_id = f"validation-failure-label-{input_id}" - label = self.query_one(f"#{label_id}", Label) - if not event.validation_result.is_valid: - message = ( - "\n".join(event.validation_result.failure_descriptions) - if isinstance(event.validation_result.failure_descriptions, list) - else str(event.validation_result.failure_descriptions) - ) - label.update(message) - label.remove_class("-hidden") - label.add_class("validation-failure") - else: - label.update("") - label.add_class("-hidden") - label.remove_class("validation-failure") - - def on_mount(self) -> None: - adcp_present = ( - getattr(self.config, "adcp_config", None) if self.config else False - ) - self.show_hide_adcp_type(bool(adcp_present)) - def show_hide_adcp_type(self, show: bool) -> None: container = self.query_one("#adcp_type_container") if show: @@ -933,29 +726,29 @@ def show_hide_adcp_type(self, show: bool) -> None: def _set_adcp_default_values(self): self.query_one("#adcp_config_num_bins").value = str( - self.DEFAULT_ADCP_CONFIG["num_bins"] + DEFAULT_ADCP_CONFIG["num_bins"] ) self.query_one("#adcp_config_period").value = str( - self.DEFAULT_ADCP_CONFIG["period_minutes"] + DEFAULT_ADCP_CONFIG["period_minutes"] ) self.query_one("#adcp_shallow").value = False self.query_one("#adcp_deep").value = True def _set_ts_default_values(self): self.query_one("#ship_underwater_st_config_period").value = str( - self.DEFAULT_TS_CONFIG["period_minutes"] + DEFAULT_TS_CONFIG["period_minutes"] ) @on(Switch.Changed, "#has_adcp") def on_adcp_toggle(self, event: Switch.Changed) -> None: self.show_hide_adcp_type(event.value) - if event.value and not self.config.adcp_config: + if event.value and not self.expedition.ship_config.adcp_config: # ADCP was turned on and was previously null self._set_adcp_default_values() @on(Switch.Changed, "#has_onboard_ts") def on_ts_toggle(self, event: Switch.Changed) -> None: - if event.value and not self.config.ship_underwater_st_config: + if event.value and not self.expedition.ship_config.ship_underwater_st_config: # T/S was turned on and was previously null self._set_ts_default_values() @@ -971,70 +764,212 @@ def shallow_changed(self, event: Switch.Changed) -> None: deep = self.query_one("#adcp_deep", Switch) deep.value = False - def save_changes(self) -> bool: - """Save changes to expedition.yaml (ship_config section).""" + +class WaypointWidget(Static): + def __init__(self, waypoint: Waypoint, index: int): + super().__init__() + self.waypoint = waypoint + self.index = index + + def compose(self) -> ComposeResult: try: - # ship speed - attr = "ship_speed_knots" - field_type = get_field_type(type(self.config), attr) - value = field_type(self.query_one("#speed").value) - ShipConfig.model_validate( - {**self.config.model_dump(), attr: value} - ) # validate using a temporary model (raises if invalid) - self.config.ship_speed_knots = value - - # individual instrument configurations - for instrument_name, info in self.INSTRUMENT_FIELDS.items(): - config_class = info["class"] - attributes = info["attributes"] - kwargs = {} - - # special handling for onboard ADCP and T/S - # will skip to next instrument if toggle is off - if instrument_name == "adcp_config": - has_adcp = self.query_one("#has_adcp", Switch).value - if not has_adcp: - setattr(self.config, instrument_name, None) - continue - if instrument_name == "ship_underwater_st_config": - has_ts = self.query_one("#has_onboard_ts", Switch).value - if not has_ts: - setattr(self.config, instrument_name, None) - continue - - for attr_meta in attributes: - attr = attr_meta["name"] - is_minutes = attr_meta.get("minutes", False) - input_id = f"{instrument_name}_{attr}" - value = self.query_one(f"#{input_id}").value - field_type = get_field_type(config_class, attr) - if is_minutes and field_type is datetime.timedelta: - value = datetime.timedelta(minutes=float(value)) - else: - value = field_type(value) - kwargs[attr] = value - - # ADCP max_depth_meter based on deep/shallow switch - if instrument_name == "adcp_config": - if self.query_one("#adcp_deep", Switch).value: - kwargs["max_depth_meter"] = -1000.0 - else: - kwargs["max_depth_meter"] = -150.0 - - setattr(self.config, instrument_name, config_class(**kwargs)) - - # save - expedition = Expedition.from_yaml(f"{self.path}/expedition.yaml") - expedition.ship_config = self.config - expedition.to_yaml(f"{self.path}/expedition.yaml") - return True + with Collapsible( + title=f"[b]Waypoint {self.index + 1}[/b]", + collapsed=True, + id=f"wp{self.index + 1}", + ): + if self.index > 0: + yield Button( + "Copy Time & Instruments from Previous", + id=f"wp{self.index}_copy", + variant="warning", + ) + yield Label("Location:") + yield Label(" Latitude:") + yield Input( + id=f"wp{self.index}_lat", + value=str(self.waypoint.location.lat) + if self.waypoint.location.lat + is not None # is not None to handle if lat is 0.0 + else "", + validators=[ + Function( + is_valid_lat, + f"INVALID: value must be {is_valid_lat.__doc__.lower()}", + ) + ], + type="number", + placeholder="°N", + classes="latitude-input", + ) + yield Label( + "", + id=f"validation-failure-label-wp{self.index}_lat", + classes="-hidden validation-failure", + ) + + yield Label(" Longitude:") + yield Input( + id=f"wp{self.index}_lon", + value=str(self.waypoint.location.lon) + if self.waypoint.location.lon + is not None # is not None to handle if lon is 0.0 + else "", + validators=[ + Function( + is_valid_lon, + f"INVALID: value must be {is_valid_lon.__doc__.lower()}", + ) + ], + type="number", + placeholder="°E", + classes="longitude-input", + ) + yield Label( + "", + id=f"validation-failure-label-wp{self.index}_lon", + classes="-hidden validation-failure", + ) + + yield Label("Time:") + with Horizontal(): + yield Label("Year:") + yield Select( + [ + (str(year), year) + # TODO: change from hard coding? ...flexibility for different datasets... + for year in range( + 2022, + datetime.datetime.now().year + 1, + ) + ], + id=f"wp{self.index}_year", + value=int(self.waypoint.time.year) + if self.waypoint.time + else Select.BLANK, + prompt="YYYY", + classes="year-select", + ) + yield Label("Month:") + yield Select( + [(f"{m:02d}", m) for m in range(1, 13)], + id=f"wp{self.index}_month", + value=int(self.waypoint.time.month) + if self.waypoint.time + else Select.BLANK, + prompt="MM", + classes="month-select", + ) + yield Label("Day:") + yield Select( + [(f"{d:02d}", d) for d in range(1, 32)], + id=f"wp{self.index}_day", + value=int(self.waypoint.time.day) + if self.waypoint.time + else Select.BLANK, + prompt="DD", + classes="day-select", + ) + yield Label("Hour:") + yield Select( + [(f"{h:02d}", h) for h in range(24)], + id=f"wp{self.index}_hour", + value=int(self.waypoint.time.hour) + if self.waypoint.time + else Select.BLANK, + prompt="hh", + classes="hour-select", + ) + yield Label("Min:") + yield Select( + [(f"{m:02d}", m) for m in range(0, 60, 5)], + id=f"wp{self.index}_minute", + value=int(self.waypoint.time.minute) + if self.waypoint.time + else Select.BLANK, + prompt="mm", + classes="minute-select", + ) + + yield Label("Instruments:") + for instrument in InstrumentType: + is_selected = instrument in (self.waypoint.instrument or []) + with Horizontal(): + yield Label(instrument.value) + yield Switch( + value=is_selected, id=f"wp{self.index}_{instrument.value}" + ) + + if instrument.value == "DRIFTER": + yield Label("Count") + yield Input( + id=f"wp{self.index}_drifter_count", + value=str( + self.get_drifter_count() if is_selected else "" + ), + type="integer", + placeholder="# of drifters", + validators=Integer( + minimum=1, + failure_description="INVALID: value must be > 0", + ), + classes="drifter-count-input", + ) + yield Label( + "", + id=f"validation-failure-label-wp{self.index}_drifter_count", + classes="-hidden validation-failure", + ) except Exception as e: - log_exception_to_file( - e, self.path, context_message="Error saving ship config:" - ) + raise UnexpectedError(unexpected_msg_compose(e)) from None + + def get_drifter_count(self) -> int: + return sum( + 1 for inst in self.waypoint.instrument if inst == InstrumentType.DRIFTER + ) + + def copy_from_previous(self) -> None: + """Copy inputs from previous waypoint widget (time and instruments only, not lat/lon).""" + try: + if self.index > 0: + schedule_editor = self.parent + if schedule_editor: + time_components = ["year", "month", "day", "hour", "minute"] + for comp in time_components: + prev = schedule_editor.query_one(f"#wp{self.index - 1}_{comp}") + curr = self.query_one(f"#wp{self.index}_{comp}") + if prev and curr: + curr.value = prev.value + + for instrument in InstrumentType: + prev_switch = schedule_editor.query_one( + f"#wp{self.index - 1}_{instrument.value}" + ) + curr_switch = self.query_one( + f"#wp{self.index}_{instrument.value}" + ) + if prev_switch and curr_switch: + curr_switch.value = prev_switch.value + except Exception as e: + raise UnexpectedError(unexpected_msg_compose(e)) from None + + @on(Button.Pressed, "Button") + def button_pressed(self, event: Button.Pressed) -> None: + if event.button.id == f"wp{self.index}_copy": + self.copy_from_previous() - raise UnexpectedError(UNEXPECTED_MSG_ONSAVE) from None + @on(Switch.Changed) + def on_switch_changed(self, event: Switch.Changed) -> None: + if event.switch.id == f"wp{self.index}_DRIFTER": + drifter_count_input = self.query_one( + f"#wp{self.index}_drifter_count", Input + ) + if not event.value: + drifter_count_input.value = "" + else: + if not drifter_count_input.value: + drifter_count_input.value = "1" class PlanScreen(Screen): @@ -1045,8 +980,7 @@ def __init__(self, path: str): def compose(self) -> ComposeResult: try: with VerticalScroll(): - yield ConfigEditor(self.path) - yield ScheduleEditor(self.path) + yield ExpeditionEditor(self.path) with Horizontal(): yield Button("Save Changes", id="save_button", variant="success") yield Button("Exit", id="exit_button", variant="error") @@ -1055,20 +989,20 @@ def compose(self) -> ComposeResult: def sync_ui_waypoints(self): """Update the waypoints models with current UI values (spacetime only) from the live UI inputs.""" - schedule_editor = self.query_one(ScheduleEditor) + expedition_editor = self.query_one(ExpeditionEditor) errors = [] - for i, wp in enumerate(schedule_editor.schedule.waypoints): + for i, wp in enumerate(expedition_editor.expedition.schedule.waypoints): try: wp.location = Location( - latitude=float(schedule_editor.query_one(f"#wp{i}_lat").value), - longitude=float(schedule_editor.query_one(f"#wp{i}_lon").value), + latitude=float(expedition_editor.query_one(f"#wp{i}_lat").value), + longitude=float(expedition_editor.query_one(f"#wp{i}_lon").value), ) wp.time = datetime.datetime( - int(schedule_editor.query_one(f"#wp{i}_year").value), - int(schedule_editor.query_one(f"#wp{i}_month").value), - int(schedule_editor.query_one(f"#wp{i}_day").value), - int(schedule_editor.query_one(f"#wp{i}_hour").value), - int(schedule_editor.query_one(f"#wp{i}_minute").value), + int(expedition_editor.query_one(f"#wp{i}_year").value), + int(expedition_editor.query_one(f"#wp{i}_month").value), + int(expedition_editor.query_one(f"#wp{i}_day").value), + int(expedition_editor.query_one(f"#wp{i}_hour").value), + int(expedition_editor.query_one(f"#wp{i}_minute").value), 0, ) except Exception as e: @@ -1091,26 +1025,24 @@ def exit_pressed(self) -> None: @on(Button.Pressed, "#save_button") def save_pressed(self) -> None: """Save button press.""" - config_editor = self.query_one(ConfigEditor) - schedule_editor = self.query_one(ScheduleEditor) + expedition_editor = self.query_one(ExpeditionEditor) try: - ship_speed_value = self.get_ship_speed(config_editor) + ship_speed_value = self.get_ship_speed(expedition_editor) self.sync_ui_waypoints() # call to ensure waypoint inputs are synced # verify schedule - schedule_editor.schedule.verify( + expedition_editor.expedition.schedule.verify( ship_speed_value, input_data=None, check_space_time_region=True, ignore_missing_fieldsets=True, ) - config_saved = config_editor.save_changes() - schedule_saved = schedule_editor.save_changes() + expedition_saved = expedition_editor.save_changes() - if config_saved and schedule_saved: + if expedition_saved: self.notify( "Changes saved successfully", severity="information", @@ -1125,9 +1057,9 @@ def save_pressed(self) -> None: ) return False - def get_ship_speed(self, config_editor): + def get_ship_speed(self, expedition_editor): try: - ship_speed = float(config_editor.query_one("#speed").value) + ship_speed = float(expedition_editor.query_one("#speed").value) assert ship_speed > 0 except Exception as e: log_exception_to_file( @@ -1146,12 +1078,6 @@ class PlanApp(App): align: center middle; } - ConfigEditor { - padding: 1; - margin-bottom: 1; - height: auto; - } - VerticalScroll { width: 100%; height: 100%; @@ -1226,7 +1152,12 @@ class PlanApp(App): margin: 0 1; } - #title { + #title_ship_config { + text-style: bold; + padding: 1; + } + + #title_schedule { text-style: bold; padding: 1; } From 2765b63f2e6784fb56f1f20fcd6691917f7a3eb1 Mon Sep 17 00:00:00 2001 From: j-atkins <106238905+j-atkins@users.noreply.github.com> Date: Fri, 17 Oct 2025 14:11:29 +0200 Subject: [PATCH 06/15] update test_UI_changes to use unified expedition.yaml --- tests/cli/test_plan.py | 30 ++++++++++++++---------------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/tests/cli/test_plan.py b/tests/cli/test_plan.py index 6fef90a1..421feba0 100644 --- a/tests/cli/test_plan.py +++ b/tests/cli/test_plan.py @@ -9,7 +9,8 @@ import yaml from textual.widgets import Button, Collapsible, Input -from virtualship.cli._plan import ConfigEditor, PlanApp, ScheduleEditor +from virtualship.cli._plan import ExpeditionEditor, PlanApp +from virtualship.utils import EXPEDITION NEW_SPEED = "8.0" NEW_LAT = "0.05" @@ -33,12 +34,8 @@ async def test_UI_changes(): tmpdir = Path(tempfile.mkdtemp()) shutil.copy( - files("virtualship.static").joinpath("ship_config.yaml"), - tmpdir / "ship_config.yaml", - ) - shutil.copy( - files("virtualship.static").joinpath("schedule.yaml"), - tmpdir / "schedule.yaml", + files("virtualship.static").joinpath(EXPEDITION), + tmpdir / EXPEDITION, ) app = PlanApp(path=tmpdir) @@ -47,22 +44,23 @@ async def test_UI_changes(): await pilot.pause(0.5) plan_screen = pilot.app.screen - config_editor = plan_screen.query_one(ConfigEditor) - schedule_editor = plan_screen.query_one(ScheduleEditor) + expedition_editor = plan_screen.query_one(ExpeditionEditor) # get mock of UI notify method plan_screen.notify = MagicMock() # change ship speed - speed_collapsible = config_editor.query_one("#speed_collapsible", Collapsible) + speed_collapsible = expedition_editor.query_one( + "#speed_collapsible", Collapsible + ) if speed_collapsible.collapsed: speed_collapsible.collapsed = False await pilot.pause() - ship_speed_input = config_editor.query_one("#speed", Input) + ship_speed_input = expedition_editor.query_one("#speed", Input) await simulate_input(pilot, ship_speed_input, NEW_SPEED) # change waypoint lat/lon (e.g. first waypoint) - waypoints_collapsible = schedule_editor.query_one("#waypoints", Collapsible) + waypoints_collapsible = expedition_editor.query_one("#waypoints", Collapsible) if waypoints_collapsible.collapsed: waypoints_collapsible.collapsed = False await pilot.pause() @@ -104,11 +102,11 @@ async def test_UI_changes(): ) # verify changes to speed, lat, lon in saved YAML - ship_config_path = os.path.join(tmpdir, "ship_config.yaml") - with open(ship_config_path) as f: - saved_config = yaml.safe_load(f) + expedition_path = os.path.join(tmpdir, EXPEDITION) + with open(expedition_path) as f: + saved_expedition = yaml.safe_load(f) - assert saved_config["ship_speed_knots"] == float(NEW_SPEED) + assert saved_expedition["ship_config"]["ship_speed_knots"] == float(NEW_SPEED) # check schedule.verify() methods are working by purposefully making invalid schedule (i.e. ship speed too slow to reach waypoints) invalid_speed = "0.0001" From e968038848d236a3d0e284874eb352d51fc4cd55 Mon Sep 17 00:00:00 2001 From: j-atkins <106238905+j-atkins@users.noreply.github.com> Date: Fri, 17 Oct 2025 15:22:30 +0200 Subject: [PATCH 07/15] update methods to use unified expedition.yaml --- src/virtualship/cli/commands.py | 2 +- src/virtualship/expedition/do_expedition.py | 3 +- .../expedition/simulate_schedule.py | 87 ++++++++++--------- 3 files changed, 47 insertions(+), 45 deletions(-) diff --git a/src/virtualship/cli/commands.py b/src/virtualship/cli/commands.py index 666536eb..3e83be3b 100644 --- a/src/virtualship/cli/commands.py +++ b/src/virtualship/cli/commands.py @@ -43,7 +43,7 @@ def init(path, from_mfp): if from_mfp: mfp_file = Path(from_mfp) - # Generate schedule.yaml from the MPF file + # Generate expedition.yaml from the MPF file click.echo(f"Generating schedule from {mfp_file}...") mfp_to_yaml(mfp_file, expedition) click.echo( diff --git a/src/virtualship/expedition/do_expedition.py b/src/virtualship/expedition/do_expedition.py index 55820486..b1e44dc4 100644 --- a/src/virtualship/expedition/do_expedition.py +++ b/src/virtualship/expedition/do_expedition.py @@ -68,8 +68,7 @@ def do_expedition(expedition_dir: str | Path, input_data: Path | None = None) -> # simulate the schedule schedule_results = simulate_schedule( projection=projection, - ship_config=expedition.ship_config, - schedule=expedition.schedule, + expedition=expedition, ) if isinstance(schedule_results, ScheduleProblem): print( diff --git a/src/virtualship/expedition/simulate_schedule.py b/src/virtualship/expedition/simulate_schedule.py index 95fa2f5f..9198d7b3 100644 --- a/src/virtualship/expedition/simulate_schedule.py +++ b/src/virtualship/expedition/simulate_schedule.py @@ -13,10 +13,9 @@ from virtualship.instruments.drifter import Drifter from virtualship.instruments.xbt import XBT from virtualship.models import ( + Expedition, InstrumentType, Location, - Schedule, - ShipConfig, Spacetime, Waypoint, ) @@ -52,7 +51,7 @@ class MeasurementsToSimulate: def simulate_schedule( - projection: pyproj.Geod, ship_config: ShipConfig, schedule: Schedule + projection: pyproj.Geod, expedition: Expedition ) -> ScheduleOk | ScheduleProblem: """ Simulate a schedule. @@ -62,13 +61,12 @@ def simulate_schedule( :param schedule: The schedule to simulate. :returns: Either the results of a successfully simulated schedule, or information on where the schedule became infeasible. """ - return _ScheduleSimulator(projection, ship_config, schedule).simulate() + return _ScheduleSimulator(projection, expedition).simulate() class _ScheduleSimulator: _projection: pyproj.Geod - _ship_config: ShipConfig - _schedule: Schedule + _expedition: Expedition _time: datetime """Current time.""" @@ -82,18 +80,15 @@ class _ScheduleSimulator: _next_ship_underwater_st_time: datetime """Next moment ship underwater ST measurement will be done.""" - def __init__( - self, projection: pyproj.Geod, ship_config: ShipConfig, schedule: Schedule - ) -> None: + def __init__(self, projection: pyproj.Geod, expedition: Expedition) -> None: self._projection = projection - self._ship_config = ship_config - self._schedule = schedule + self._expedition = expedition - assert self._schedule.waypoints[0].time is not None, ( + assert self._expedition.schedule.waypoints[0].time is not None, ( "First waypoint must have a time. This should have been verified before calling this function." ) - self._time = schedule.waypoints[0].time - self._location = schedule.waypoints[0].location + self._time = expedition.schedule.waypoints[0].time + self._location = expedition.schedule.waypoints[0].location self._measurements_to_simulate = MeasurementsToSimulate() @@ -101,7 +96,7 @@ def __init__( self._next_ship_underwater_st_time = self._time def simulate(self) -> ScheduleOk | ScheduleProblem: - for wp_i, waypoint in enumerate(self._schedule.waypoints): + for wp_i, waypoint in enumerate(self._expedition.schedule.waypoints): # sail towards waypoint self._progress_time_traveling_towards(waypoint.location) @@ -131,7 +126,9 @@ def _progress_time_traveling_towards(self, location: Location) -> None: lons2=location.lon, lats2=location.lat, ) - ship_speed_meter_per_second = self._ship_config.ship_speed_knots * 1852 / 3600 + ship_speed_meter_per_second = ( + self._expedition.ship_config.ship_speed_knots * 1852 / 3600 + ) azimuth1 = geodinv[0] distance_to_next_waypoint = geodinv[2] time_to_reach = timedelta( @@ -140,7 +137,7 @@ def _progress_time_traveling_towards(self, location: Location) -> None: end_time = self._time + time_to_reach # note all ADCP measurements - if self._ship_config.adcp_config is not None: + if self._expedition.ship_config.adcp_config is not None: location = self._location time = self._time while self._next_adcp_time <= end_time: @@ -162,11 +159,12 @@ def _progress_time_traveling_towards(self, location: Location) -> None: ) self._next_adcp_time = ( - self._next_adcp_time + self._ship_config.adcp_config.period + self._next_adcp_time + + self._expedition.ship_config.adcp_config.period ) # note all ship underwater ST measurements - if self._ship_config.ship_underwater_st_config is not None: + if self._expedition.ship_config.ship_underwater_st_config is not None: location = self._location time = self._time while self._next_ship_underwater_st_time <= end_time: @@ -189,7 +187,7 @@ def _progress_time_traveling_towards(self, location: Location) -> None: self._next_ship_underwater_st_time = ( self._next_ship_underwater_st_time - + self._ship_config.ship_underwater_st_config.period + + self._expedition.ship_config.ship_underwater_st_config.period ) self._time = end_time @@ -199,24 +197,25 @@ def _progress_time_stationary(self, time_passed: timedelta) -> None: end_time = self._time + time_passed # note all ADCP measurements - if self._ship_config.adcp_config is not None: + if self._expedition.ship_config.adcp_config is not None: while self._next_adcp_time <= end_time: self._measurements_to_simulate.adcps.append( Spacetime(self._location, self._next_adcp_time) ) self._next_adcp_time = ( - self._next_adcp_time + self._ship_config.adcp_config.period + self._next_adcp_time + + self._expedition.ship_config.adcp_config.period ) # note all ship underwater ST measurements - if self._ship_config.ship_underwater_st_config is not None: + if self._expedition.ship_config.ship_underwater_st_config is not None: while self._next_ship_underwater_st_time <= end_time: self._measurements_to_simulate.ship_underwater_sts.append( Spacetime(self._location, self._next_ship_underwater_st_time) ) self._next_ship_underwater_st_time = ( self._next_ship_underwater_st_time - + self._ship_config.ship_underwater_st_config.period + + self._expedition.ship_config.ship_underwater_st_config.period ) self._time = end_time @@ -241,48 +240,52 @@ def _make_measurements(self, waypoint: Waypoint) -> timedelta: self._measurements_to_simulate.argo_floats.append( ArgoFloat( spacetime=Spacetime(self._location, self._time), - min_depth=self._ship_config.argo_float_config.min_depth_meter, - max_depth=self._ship_config.argo_float_config.max_depth_meter, - drift_depth=self._ship_config.argo_float_config.drift_depth_meter, - vertical_speed=self._ship_config.argo_float_config.vertical_speed_meter_per_second, - cycle_days=self._ship_config.argo_float_config.cycle_days, - drift_days=self._ship_config.argo_float_config.drift_days, + min_depth=self._expedition.ship_config.argo_float_config.min_depth_meter, + max_depth=self._expedition.ship_config.argo_float_config.max_depth_meter, + drift_depth=self._expedition.ship_config.argo_float_config.drift_depth_meter, + vertical_speed=self._expedition.ship_config.argo_float_config.vertical_speed_meter_per_second, + cycle_days=self._expedition.ship_config.argo_float_config.cycle_days, + drift_days=self._expedition.ship_config.argo_float_config.drift_days, ) ) elif instrument is InstrumentType.CTD: self._measurements_to_simulate.ctds.append( CTD( spacetime=Spacetime(self._location, self._time), - min_depth=self._ship_config.ctd_config.min_depth_meter, - max_depth=self._ship_config.ctd_config.max_depth_meter, + min_depth=self._expedition.ship_config.ctd_config.min_depth_meter, + max_depth=self._expedition.ship_config.ctd_config.max_depth_meter, ) ) - time_costs.append(self._ship_config.ctd_config.stationkeeping_time) + time_costs.append( + self._expedition.ship_config.ctd_config.stationkeeping_time + ) elif instrument is InstrumentType.CTD_BGC: self._measurements_to_simulate.ctd_bgcs.append( CTD_BGC( spacetime=Spacetime(self._location, self._time), - min_depth=self._ship_config.ctd_bgc_config.min_depth_meter, - max_depth=self._ship_config.ctd_bgc_config.max_depth_meter, + min_depth=self._expedition.ship_config.ctd_bgc_config.min_depth_meter, + max_depth=self._expedition.ship_config.ctd_bgc_config.max_depth_meter, ) ) - time_costs.append(self._ship_config.ctd_bgc_config.stationkeeping_time) + time_costs.append( + self._expedition.ship_config.ctd_bgc_config.stationkeeping_time + ) elif instrument is InstrumentType.DRIFTER: self._measurements_to_simulate.drifters.append( Drifter( spacetime=Spacetime(self._location, self._time), - depth=self._ship_config.drifter_config.depth_meter, - lifetime=self._ship_config.drifter_config.lifetime, + depth=self._expedition.ship_config.drifter_config.depth_meter, + lifetime=self._expedition.ship_config.drifter_config.lifetime, ) ) elif instrument is InstrumentType.XBT: self._measurements_to_simulate.xbts.append( XBT( spacetime=Spacetime(self._location, self._time), - min_depth=self._ship_config.xbt_config.min_depth_meter, - max_depth=self._ship_config.xbt_config.max_depth_meter, - fall_speed=self._ship_config.xbt_config.fall_speed_meter_per_second, - deceleration_coefficient=self._ship_config.xbt_config.deceleration_coefficient, + min_depth=self._expedition.ship_config.xbt_config.min_depth_meter, + max_depth=self._expedition.ship_config.xbt_config.max_depth_meter, + fall_speed=self._expedition.ship_config.xbt_config.fall_speed_meter_per_second, + deceleration_coefficient=self._expedition.ship_config.xbt_config.deceleration_coefficient, ) ) else: From e4c48fdd5fe89caf08748be24d3f46c44da3595b Mon Sep 17 00:00:00 2001 From: j-atkins <106238905+j-atkins@users.noreply.github.com> Date: Fri, 17 Oct 2025 15:22:57 +0200 Subject: [PATCH 08/15] tests for unified expedition.yaml --- .../tutorials/Argo_data_tutorial.ipynb | 73 ++++++---- tests/cli/test_cli.py | 25 +--- tests/cli/test_fetch.py | 28 ++-- .../expedition/expedition_dir/expedition.yaml | 45 ++++++ tests/expedition/expedition_dir/schedule.yaml | 18 --- .../expedition_dir/ship_config.yaml | 25 ---- .../{test_schedule.py => test_expedition.py} | 133 ++++++++++++++++-- tests/expedition/test_ship_config.py | 126 ----------------- tests/expedition/test_simulate_schedule.py | 19 +-- tests/test_mfp_to_yaml.py | 12 +- tests/test_utils.py | 26 +--- 11 files changed, 252 insertions(+), 278 deletions(-) create mode 100644 tests/expedition/expedition_dir/expedition.yaml delete mode 100644 tests/expedition/expedition_dir/schedule.yaml delete mode 100644 tests/expedition/expedition_dir/ship_config.yaml rename tests/expedition/{test_schedule.py => test_expedition.py} (53%) delete mode 100644 tests/expedition/test_ship_config.py diff --git a/docs/user-guide/tutorials/Argo_data_tutorial.ipynb b/docs/user-guide/tutorials/Argo_data_tutorial.ipynb index 30cee460..81dbcd27 100644 --- a/docs/user-guide/tutorials/Argo_data_tutorial.ipynb +++ b/docs/user-guide/tutorials/Argo_data_tutorial.ipynb @@ -28,25 +28,26 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "We have downloaded the data from Copernicus Marine Service, using `virtualship fetch` and the information in following `schedule.yaml` file:\n", + "We have downloaded the data from Copernicus Marine Service, using `virtualship fetch` and the information in following `schedule` section of the `expedition.yaml` file:\n", "```yaml\n", - "space_time_region:\n", - " spatial_range:\n", - " minimum_longitude: -5\n", - " maximum_longitude: 5\n", - " minimum_latitude: -5\n", - " maximum_latitude: 5\n", - " minimum_depth: 0\n", - " maximum_depth: 2000\n", - " time_range:\n", - " start_time: 2023-01-01 00:00:00\n", - " end_time: 2023-02-01 00:00:00\n", - "waypoints:\n", - " - instrument: ARGO_FLOAT\n", - " location:\n", - " latitude: 0.02\n", - " longitude: 0.02\n", - " time: 2023-01-01 02:00:00\n", + "schedule:\n", + " space_time_region:\n", + " spatial_range:\n", + " minimum_longitude: -5\n", + " maximum_longitude: 5\n", + " minimum_latitude: -5\n", + " maximum_latitude: 5\n", + " minimum_depth: 0\n", + " maximum_depth: 2000\n", + " time_range:\n", + " start_time: 2023-01-01 00:00:00\n", + " end_time: 2023-02-01 00:00:00\n", + " waypoints:\n", + " - instrument: ARGO_FLOAT\n", + " location:\n", + " latitude: 0.02\n", + " longitude: 0.02\n", + " time: 2023-01-01 02:00:00\n", "```\n", "\n", "After running `virtualship run`, we have a `results/argo_floats.zarr` file with the data from the float." @@ -58,11 +59,31 @@ "metadata": {}, "outputs": [ { - "name": "stderr", - "output_type": "stream", - "text": [ - "/Users/erik/anaconda3/envs/parcels/lib/python3.12/site-packages/xarray/coding/times.py:254: RuntimeWarning: invalid value encountered in cast\n", - " flat_num_dates_ns_int = (flat_num_dates * _NS_PER_TIME_DELTA[delta]).astype(\n" + "ename": "FileNotFoundError", + "evalue": "No such file or directory: '/Users/erik/Desktop/VSC_Argo/results/argo_floats.zarr'", + "output_type": "error", + "traceback": [ + "\u001b[31m---------------------------------------------------------------------------\u001b[39m", + "\u001b[31mKeyError\u001b[39m Traceback (most recent call last)", + "\u001b[36mFile \u001b[39m\u001b[32m/opt/anaconda3/envs/ship/lib/python3.12/site-packages/xarray/backends/zarr.py:1844\u001b[39m, in \u001b[36m_get_open_params\u001b[39m\u001b[34m(store, mode, synchronizer, group, consolidated, consolidate_on_close, chunk_store, storage_options, zarr_version, use_zarr_fill_value_as_mask, zarr_format)\u001b[39m\n\u001b[32m 1843\u001b[39m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[32m-> \u001b[39m\u001b[32m1844\u001b[39m zarr_root_group = \u001b[43mzarr\u001b[49m\u001b[43m.\u001b[49m\u001b[43mopen_consolidated\u001b[49m\u001b[43m(\u001b[49m\u001b[43mstore\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mopen_kwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 1845\u001b[39m \u001b[38;5;28;01mexcept\u001b[39;00m (\u001b[38;5;167;01mValueError\u001b[39;00m, \u001b[38;5;167;01mKeyError\u001b[39;00m):\n\u001b[32m 1846\u001b[39m \u001b[38;5;66;03m# ValueError in zarr-python 3.x, KeyError in 2.x.\u001b[39;00m\n", + "\u001b[36mFile \u001b[39m\u001b[32m/opt/anaconda3/envs/ship/lib/python3.12/site-packages/zarr/convenience.py:1362\u001b[39m, in \u001b[36mopen_consolidated\u001b[39m\u001b[34m(store, metadata_key, mode, **kwargs)\u001b[39m\n\u001b[32m 1361\u001b[39m \u001b[38;5;66;03m# setup metadata store\u001b[39;00m\n\u001b[32m-> \u001b[39m\u001b[32m1362\u001b[39m meta_store = \u001b[43mConsolidatedStoreClass\u001b[49m\u001b[43m(\u001b[49m\u001b[43mstore\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmetadata_key\u001b[49m\u001b[43m=\u001b[49m\u001b[43mmetadata_key\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 1364\u001b[39m \u001b[38;5;66;03m# pass through\u001b[39;00m\n", + "\u001b[36mFile \u001b[39m\u001b[32m/opt/anaconda3/envs/ship/lib/python3.12/site-packages/zarr/storage.py:3045\u001b[39m, in \u001b[36mConsolidatedMetadataStore.__init__\u001b[39m\u001b[34m(self, store, metadata_key)\u001b[39m\n\u001b[32m 3044\u001b[39m \u001b[38;5;66;03m# retrieve consolidated metadata\u001b[39;00m\n\u001b[32m-> \u001b[39m\u001b[32m3045\u001b[39m meta = json_loads(\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mstore\u001b[49m\u001b[43m[\u001b[49m\u001b[43mmetadata_key\u001b[49m\u001b[43m]\u001b[49m)\n\u001b[32m 3047\u001b[39m \u001b[38;5;66;03m# check format of consolidated metadata\u001b[39;00m\n", + "\u001b[36mFile \u001b[39m\u001b[32m/opt/anaconda3/envs/ship/lib/python3.12/site-packages/zarr/storage.py:1120\u001b[39m, in \u001b[36mDirectoryStore.__getitem__\u001b[39m\u001b[34m(self, key)\u001b[39m\n\u001b[32m 1119\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[32m-> \u001b[39m\u001b[32m1120\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mKeyError\u001b[39;00m(key)\n", + "\u001b[31mKeyError\u001b[39m: '.zmetadata'", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[31mGroupNotFoundError\u001b[39m Traceback (most recent call last)", + "\u001b[36mFile \u001b[39m\u001b[32m/opt/anaconda3/envs/ship/lib/python3.12/site-packages/xarray/backends/zarr.py:1848\u001b[39m, in \u001b[36m_get_open_params\u001b[39m\u001b[34m(store, mode, synchronizer, group, consolidated, consolidate_on_close, chunk_store, storage_options, zarr_version, use_zarr_fill_value_as_mask, zarr_format)\u001b[39m\n\u001b[32m 1847\u001b[39m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[32m-> \u001b[39m\u001b[32m1848\u001b[39m zarr_root_group = \u001b[43mzarr\u001b[49m\u001b[43m.\u001b[49m\u001b[43mopen_group\u001b[49m\u001b[43m(\u001b[49m\u001b[43mstore\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mopen_kwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 1849\u001b[39m emit_user_level_warning(\n\u001b[32m 1850\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mFailed to open Zarr store with consolidated metadata, \u001b[39m\u001b[33m\"\u001b[39m\n\u001b[32m 1851\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mbut successfully read with non-consolidated metadata. \u001b[39m\u001b[33m\"\u001b[39m\n\u001b[32m (...)\u001b[39m\u001b[32m 1861\u001b[39m \u001b[38;5;167;01mRuntimeWarning\u001b[39;00m,\n\u001b[32m 1862\u001b[39m )\n", + "\u001b[36mFile \u001b[39m\u001b[32m/opt/anaconda3/envs/ship/lib/python3.12/site-packages/zarr/hierarchy.py:1578\u001b[39m, in \u001b[36mopen_group\u001b[39m\u001b[34m(store, mode, cache_attrs, synchronizer, path, chunk_store, storage_options, zarr_version, meta_array)\u001b[39m\n\u001b[32m 1577\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m ContainsArrayError(path)\n\u001b[32m-> \u001b[39m\u001b[32m1578\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m GroupNotFoundError(path)\n\u001b[32m 1580\u001b[39m \u001b[38;5;28;01melif\u001b[39;00m mode == \u001b[33m\"\u001b[39m\u001b[33mw\u001b[39m\u001b[33m\"\u001b[39m:\n", + "\u001b[31mGroupNotFoundError\u001b[39m: group not found at path ''", + "\nThe above exception was the direct cause of the following exception:\n", + "\u001b[31mFileNotFoundError\u001b[39m Traceback (most recent call last)", + "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[2]\u001b[39m\u001b[32m, line 1\u001b[39m\n\u001b[32m----> \u001b[39m\u001b[32m1\u001b[39m ds = \u001b[43mxr\u001b[49m\u001b[43m.\u001b[49m\u001b[43mopen_zarr\u001b[49m\u001b[43m(\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43m/Users/erik/Desktop/VSC_Argo/results/argo_floats.zarr\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n", + "\u001b[36mFile \u001b[39m\u001b[32m/opt/anaconda3/envs/ship/lib/python3.12/site-packages/xarray/backends/zarr.py:1565\u001b[39m, in \u001b[36mopen_zarr\u001b[39m\u001b[34m(store, group, synchronizer, chunks, decode_cf, mask_and_scale, decode_times, concat_characters, decode_coords, drop_variables, consolidated, overwrite_encoded_chunks, chunk_store, storage_options, decode_timedelta, use_cftime, zarr_version, zarr_format, use_zarr_fill_value_as_mask, chunked_array_type, from_array_kwargs, create_default_indexes, **kwargs)\u001b[39m\n\u001b[32m 1551\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mTypeError\u001b[39;00m(\n\u001b[32m 1552\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mopen_zarr() got unexpected keyword arguments \u001b[39m\u001b[33m\"\u001b[39m + \u001b[33m\"\u001b[39m\u001b[33m,\u001b[39m\u001b[33m\"\u001b[39m.join(kwargs.keys())\n\u001b[32m 1553\u001b[39m )\n\u001b[32m 1555\u001b[39m backend_kwargs = {\n\u001b[32m 1556\u001b[39m \u001b[33m\"\u001b[39m\u001b[33msynchronizer\u001b[39m\u001b[33m\"\u001b[39m: synchronizer,\n\u001b[32m 1557\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mconsolidated\u001b[39m\u001b[33m\"\u001b[39m: consolidated,\n\u001b[32m (...)\u001b[39m\u001b[32m 1562\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mzarr_format\u001b[39m\u001b[33m\"\u001b[39m: zarr_format,\n\u001b[32m 1563\u001b[39m }\n\u001b[32m-> \u001b[39m\u001b[32m1565\u001b[39m ds = \u001b[43mopen_dataset\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 1566\u001b[39m \u001b[43m \u001b[49m\u001b[43mfilename_or_obj\u001b[49m\u001b[43m=\u001b[49m\u001b[43mstore\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1567\u001b[39m \u001b[43m \u001b[49m\u001b[43mgroup\u001b[49m\u001b[43m=\u001b[49m\u001b[43mgroup\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1568\u001b[39m \u001b[43m \u001b[49m\u001b[43mdecode_cf\u001b[49m\u001b[43m=\u001b[49m\u001b[43mdecode_cf\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1569\u001b[39m \u001b[43m \u001b[49m\u001b[43mmask_and_scale\u001b[49m\u001b[43m=\u001b[49m\u001b[43mmask_and_scale\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1570\u001b[39m \u001b[43m \u001b[49m\u001b[43mdecode_times\u001b[49m\u001b[43m=\u001b[49m\u001b[43mdecode_times\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1571\u001b[39m \u001b[43m \u001b[49m\u001b[43mconcat_characters\u001b[49m\u001b[43m=\u001b[49m\u001b[43mconcat_characters\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1572\u001b[39m \u001b[43m \u001b[49m\u001b[43mdecode_coords\u001b[49m\u001b[43m=\u001b[49m\u001b[43mdecode_coords\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1573\u001b[39m \u001b[43m \u001b[49m\u001b[43mengine\u001b[49m\u001b[43m=\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mzarr\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[32m 1574\u001b[39m \u001b[43m \u001b[49m\u001b[43mchunks\u001b[49m\u001b[43m=\u001b[49m\u001b[43mchunks\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1575\u001b[39m \u001b[43m \u001b[49m\u001b[43mdrop_variables\u001b[49m\u001b[43m=\u001b[49m\u001b[43mdrop_variables\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1576\u001b[39m \u001b[43m \u001b[49m\u001b[43mcreate_default_indexes\u001b[49m\u001b[43m=\u001b[49m\u001b[43mcreate_default_indexes\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1577\u001b[39m \u001b[43m \u001b[49m\u001b[43mchunked_array_type\u001b[49m\u001b[43m=\u001b[49m\u001b[43mchunked_array_type\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1578\u001b[39m \u001b[43m \u001b[49m\u001b[43mfrom_array_kwargs\u001b[49m\u001b[43m=\u001b[49m\u001b[43mfrom_array_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1579\u001b[39m \u001b[43m \u001b[49m\u001b[43mbackend_kwargs\u001b[49m\u001b[43m=\u001b[49m\u001b[43mbackend_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1580\u001b[39m \u001b[43m \u001b[49m\u001b[43mdecode_timedelta\u001b[49m\u001b[43m=\u001b[49m\u001b[43mdecode_timedelta\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1581\u001b[39m \u001b[43m \u001b[49m\u001b[43muse_cftime\u001b[49m\u001b[43m=\u001b[49m\u001b[43muse_cftime\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1582\u001b[39m \u001b[43m \u001b[49m\u001b[43mzarr_version\u001b[49m\u001b[43m=\u001b[49m\u001b[43mzarr_version\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1583\u001b[39m \u001b[43m \u001b[49m\u001b[43muse_zarr_fill_value_as_mask\u001b[49m\u001b[43m=\u001b[49m\u001b[43muse_zarr_fill_value_as_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1584\u001b[39m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 1585\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m ds\n", + "\u001b[36mFile \u001b[39m\u001b[32m/opt/anaconda3/envs/ship/lib/python3.12/site-packages/xarray/backends/api.py:750\u001b[39m, in \u001b[36mopen_dataset\u001b[39m\u001b[34m(filename_or_obj, engine, chunks, cache, decode_cf, mask_and_scale, decode_times, decode_timedelta, use_cftime, concat_characters, decode_coords, drop_variables, create_default_indexes, inline_array, chunked_array_type, from_array_kwargs, backend_kwargs, **kwargs)\u001b[39m\n\u001b[32m 738\u001b[39m decoders = _resolve_decoders_kwargs(\n\u001b[32m 739\u001b[39m decode_cf,\n\u001b[32m 740\u001b[39m open_backend_dataset_parameters=backend.open_dataset_parameters,\n\u001b[32m (...)\u001b[39m\u001b[32m 746\u001b[39m decode_coords=decode_coords,\n\u001b[32m 747\u001b[39m )\n\u001b[32m 749\u001b[39m overwrite_encoded_chunks = kwargs.pop(\u001b[33m\"\u001b[39m\u001b[33moverwrite_encoded_chunks\u001b[39m\u001b[33m\"\u001b[39m, \u001b[38;5;28;01mNone\u001b[39;00m)\n\u001b[32m--> \u001b[39m\u001b[32m750\u001b[39m backend_ds = \u001b[43mbackend\u001b[49m\u001b[43m.\u001b[49m\u001b[43mopen_dataset\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 751\u001b[39m \u001b[43m \u001b[49m\u001b[43mfilename_or_obj\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 752\u001b[39m \u001b[43m \u001b[49m\u001b[43mdrop_variables\u001b[49m\u001b[43m=\u001b[49m\u001b[43mdrop_variables\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 753\u001b[39m \u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mdecoders\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 754\u001b[39m \u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 755\u001b[39m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 756\u001b[39m ds = _dataset_from_backend_dataset(\n\u001b[32m 757\u001b[39m backend_ds,\n\u001b[32m 758\u001b[39m filename_or_obj,\n\u001b[32m (...)\u001b[39m\u001b[32m 769\u001b[39m **kwargs,\n\u001b[32m 770\u001b[39m )\n\u001b[32m 771\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m ds\n", + "\u001b[36mFile \u001b[39m\u001b[32m/opt/anaconda3/envs/ship/lib/python3.12/site-packages/xarray/backends/zarr.py:1636\u001b[39m, in \u001b[36mZarrBackendEntrypoint.open_dataset\u001b[39m\u001b[34m(self, filename_or_obj, mask_and_scale, decode_times, concat_characters, decode_coords, drop_variables, use_cftime, decode_timedelta, group, mode, synchronizer, consolidated, chunk_store, storage_options, zarr_version, zarr_format, store, engine, use_zarr_fill_value_as_mask, cache_members)\u001b[39m\n\u001b[32m 1634\u001b[39m filename_or_obj = _normalize_path(filename_or_obj)\n\u001b[32m 1635\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m store:\n\u001b[32m-> \u001b[39m\u001b[32m1636\u001b[39m store = \u001b[43mZarrStore\u001b[49m\u001b[43m.\u001b[49m\u001b[43mopen_group\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 1637\u001b[39m \u001b[43m \u001b[49m\u001b[43mfilename_or_obj\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1638\u001b[39m \u001b[43m \u001b[49m\u001b[43mgroup\u001b[49m\u001b[43m=\u001b[49m\u001b[43mgroup\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1639\u001b[39m \u001b[43m \u001b[49m\u001b[43mmode\u001b[49m\u001b[43m=\u001b[49m\u001b[43mmode\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1640\u001b[39m \u001b[43m \u001b[49m\u001b[43msynchronizer\u001b[49m\u001b[43m=\u001b[49m\u001b[43msynchronizer\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1641\u001b[39m \u001b[43m \u001b[49m\u001b[43mconsolidated\u001b[49m\u001b[43m=\u001b[49m\u001b[43mconsolidated\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1642\u001b[39m \u001b[43m \u001b[49m\u001b[43mconsolidate_on_close\u001b[49m\u001b[43m=\u001b[49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[32m 1643\u001b[39m \u001b[43m \u001b[49m\u001b[43mchunk_store\u001b[49m\u001b[43m=\u001b[49m\u001b[43mchunk_store\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1644\u001b[39m \u001b[43m \u001b[49m\u001b[43mstorage_options\u001b[49m\u001b[43m=\u001b[49m\u001b[43mstorage_options\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1645\u001b[39m \u001b[43m \u001b[49m\u001b[43mzarr_version\u001b[49m\u001b[43m=\u001b[49m\u001b[43mzarr_version\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1646\u001b[39m \u001b[43m \u001b[49m\u001b[43muse_zarr_fill_value_as_mask\u001b[49m\u001b[43m=\u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[32m 1647\u001b[39m \u001b[43m \u001b[49m\u001b[43mzarr_format\u001b[49m\u001b[43m=\u001b[49m\u001b[43mzarr_format\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1648\u001b[39m \u001b[43m \u001b[49m\u001b[43mcache_members\u001b[49m\u001b[43m=\u001b[49m\u001b[43mcache_members\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1649\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 1651\u001b[39m store_entrypoint = StoreBackendEntrypoint()\n\u001b[32m 1652\u001b[39m \u001b[38;5;28;01mwith\u001b[39;00m close_on_error(store):\n", + "\u001b[36mFile \u001b[39m\u001b[32m/opt/anaconda3/envs/ship/lib/python3.12/site-packages/xarray/backends/zarr.py:714\u001b[39m, in \u001b[36mZarrStore.open_group\u001b[39m\u001b[34m(cls, store, mode, synchronizer, group, consolidated, consolidate_on_close, chunk_store, storage_options, append_dim, write_region, safe_chunks, align_chunks, zarr_version, zarr_format, use_zarr_fill_value_as_mask, write_empty, cache_members)\u001b[39m\n\u001b[32m 688\u001b[39m \u001b[38;5;129m@classmethod\u001b[39m\n\u001b[32m 689\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34mopen_group\u001b[39m(\n\u001b[32m 690\u001b[39m \u001b[38;5;28mcls\u001b[39m,\n\u001b[32m (...)\u001b[39m\u001b[32m 707\u001b[39m cache_members: \u001b[38;5;28mbool\u001b[39m = \u001b[38;5;28;01mTrue\u001b[39;00m,\n\u001b[32m 708\u001b[39m ):\n\u001b[32m 709\u001b[39m (\n\u001b[32m 710\u001b[39m zarr_group,\n\u001b[32m 711\u001b[39m consolidate_on_close,\n\u001b[32m 712\u001b[39m close_store_on_close,\n\u001b[32m 713\u001b[39m use_zarr_fill_value_as_mask,\n\u001b[32m--> \u001b[39m\u001b[32m714\u001b[39m ) = \u001b[43m_get_open_params\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 715\u001b[39m \u001b[43m \u001b[49m\u001b[43mstore\u001b[49m\u001b[43m=\u001b[49m\u001b[43mstore\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 716\u001b[39m \u001b[43m \u001b[49m\u001b[43mmode\u001b[49m\u001b[43m=\u001b[49m\u001b[43mmode\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 717\u001b[39m \u001b[43m \u001b[49m\u001b[43msynchronizer\u001b[49m\u001b[43m=\u001b[49m\u001b[43msynchronizer\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 718\u001b[39m \u001b[43m \u001b[49m\u001b[43mgroup\u001b[49m\u001b[43m=\u001b[49m\u001b[43mgroup\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 719\u001b[39m \u001b[43m \u001b[49m\u001b[43mconsolidated\u001b[49m\u001b[43m=\u001b[49m\u001b[43mconsolidated\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 720\u001b[39m \u001b[43m \u001b[49m\u001b[43mconsolidate_on_close\u001b[49m\u001b[43m=\u001b[49m\u001b[43mconsolidate_on_close\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 721\u001b[39m \u001b[43m \u001b[49m\u001b[43mchunk_store\u001b[49m\u001b[43m=\u001b[49m\u001b[43mchunk_store\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 722\u001b[39m \u001b[43m \u001b[49m\u001b[43mstorage_options\u001b[49m\u001b[43m=\u001b[49m\u001b[43mstorage_options\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 723\u001b[39m \u001b[43m \u001b[49m\u001b[43mzarr_version\u001b[49m\u001b[43m=\u001b[49m\u001b[43mzarr_version\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 724\u001b[39m \u001b[43m \u001b[49m\u001b[43muse_zarr_fill_value_as_mask\u001b[49m\u001b[43m=\u001b[49m\u001b[43muse_zarr_fill_value_as_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 725\u001b[39m \u001b[43m \u001b[49m\u001b[43mzarr_format\u001b[49m\u001b[43m=\u001b[49m\u001b[43mzarr_format\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 726\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 728\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mcls\u001b[39m(\n\u001b[32m 729\u001b[39m zarr_group,\n\u001b[32m 730\u001b[39m mode,\n\u001b[32m (...)\u001b[39m\u001b[32m 739\u001b[39m cache_members=cache_members,\n\u001b[32m 740\u001b[39m )\n", + "\u001b[36mFile \u001b[39m\u001b[32m/opt/anaconda3/envs/ship/lib/python3.12/site-packages/xarray/backends/zarr.py:1864\u001b[39m, in \u001b[36m_get_open_params\u001b[39m\u001b[34m(store, mode, synchronizer, group, consolidated, consolidate_on_close, chunk_store, storage_options, zarr_version, use_zarr_fill_value_as_mask, zarr_format)\u001b[39m\n\u001b[32m 1849\u001b[39m emit_user_level_warning(\n\u001b[32m 1850\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mFailed to open Zarr store with consolidated metadata, \u001b[39m\u001b[33m\"\u001b[39m\n\u001b[32m 1851\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mbut successfully read with non-consolidated metadata. \u001b[39m\u001b[33m\"\u001b[39m\n\u001b[32m (...)\u001b[39m\u001b[32m 1861\u001b[39m \u001b[38;5;167;01mRuntimeWarning\u001b[39;00m,\n\u001b[32m 1862\u001b[39m )\n\u001b[32m 1863\u001b[39m \u001b[38;5;28;01mexcept\u001b[39;00m missing_exc \u001b[38;5;28;01mas\u001b[39;00m err:\n\u001b[32m-> \u001b[39m\u001b[32m1864\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mFileNotFoundError\u001b[39;00m(\n\u001b[32m 1865\u001b[39m \u001b[33mf\u001b[39m\u001b[33m\"\u001b[39m\u001b[33mNo such file or directory: \u001b[39m\u001b[33m'\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mstore\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m'\u001b[39m\u001b[33m\"\u001b[39m\n\u001b[32m 1866\u001b[39m ) \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01merr\u001b[39;00m\n\u001b[32m 1868\u001b[39m \u001b[38;5;66;03m# but the user should still receive a DataTree whose root is the group they asked for\u001b[39;00m\n\u001b[32m 1869\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m group \u001b[38;5;129;01mand\u001b[39;00m group != \u001b[33m\"\u001b[39m\u001b[33m/\u001b[39m\u001b[33m\"\u001b[39m:\n", + "\u001b[31mFileNotFoundError\u001b[39m: No such file or directory: '/Users/erik/Desktop/VSC_Argo/results/argo_floats.zarr'" ] } ], @@ -79,7 +100,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": {}, "outputs": [ { @@ -111,7 +132,7 @@ ], "metadata": { "kernelspec": { - "display_name": "parcels", + "display_name": "ship", "language": "python", "name": "python3" }, @@ -125,7 +146,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.12.9" } }, "nbformat": 4, diff --git a/tests/cli/test_cli.py b/tests/cli/test_cli.py index 015c3267..b8e797b7 100644 --- a/tests/cli/test_cli.py +++ b/tests/cli/test_cli.py @@ -4,7 +4,7 @@ from click.testing import CliRunner from virtualship.cli.commands import fetch, init -from virtualship.utils import SCHEDULE, SHIP_CONFIG +from virtualship.utils import EXPEDITION @pytest.fixture @@ -32,29 +32,16 @@ def test_init(): with runner.isolated_filesystem(): result = runner.invoke(init, ["."]) assert result.exit_code == 0 - config = Path(SHIP_CONFIG) - schedule = Path(SCHEDULE) + expedition = Path(EXPEDITION) - assert config.exists() - assert schedule.exists() + assert expedition.exists() -def test_init_existing_config(): +def test_init_existing_expedition(): runner = CliRunner() with runner.isolated_filesystem(): - config = Path(SHIP_CONFIG) - config.write_text("test") - - with pytest.raises(FileExistsError): - result = runner.invoke(init, ["."]) - raise result.exception - - -def test_init_existing_schedule(): - runner = CliRunner() - with runner.isolated_filesystem(): - schedule = Path(SCHEDULE) - schedule.write_text("test") + expedition = Path(EXPEDITION) + expedition.write_text("test") with pytest.raises(FileExistsError): result = runner.invoke(init, ["."]) diff --git a/tests/cli/test_fetch.py b/tests/cli/test_fetch.py index 856b72f6..69390733 100644 --- a/tests/cli/test_fetch.py +++ b/tests/cli/test_fetch.py @@ -16,8 +16,8 @@ hash_model, hash_to_filename, ) -from virtualship.models import Schedule, ShipConfig -from virtualship.utils import get_example_config, get_example_schedule +from virtualship.models import Expedition +from virtualship.utils import EXPEDITION, get_example_expedition @pytest.fixture @@ -32,31 +32,19 @@ def fake_download(output_filename, output_directory, **_): @pytest.fixture -def schedule(tmpdir): - out_path = tmpdir.join("schedule.yaml") +def expedition(tmpdir): + out_path = tmpdir.join(EXPEDITION) with open(out_path, "w") as file: - file.write(get_example_schedule()) + file.write(get_example_expedition()) - schedule = Schedule.from_yaml(out_path) + expedition = Expedition.from_yaml(out_path) - return schedule - - -@pytest.fixture -def ship_config(tmpdir): - out_path = tmpdir.join("ship_config.yaml") - - with open(out_path, "w") as file: - file.write(get_example_config()) - - ship_config = ShipConfig.from_yaml(out_path) - - return ship_config + return expedition @pytest.mark.usefixtures("copernicus_subset_no_download") -def test_fetch(schedule, ship_config, tmpdir): +def test_fetch(expedition, tmpdir): """Test the fetch command, but mock the download.""" _fetch(Path(tmpdir), "test", "test") diff --git a/tests/expedition/expedition_dir/expedition.yaml b/tests/expedition/expedition_dir/expedition.yaml new file mode 100644 index 00000000..702c7fd2 --- /dev/null +++ b/tests/expedition/expedition_dir/expedition.yaml @@ -0,0 +1,45 @@ +schedule: + waypoints: + - instrument: + - CTD + location: + latitude: 0 + longitude: 0 + time: 2023-01-01 00:00:00 + - instrument: + - DRIFTER + - ARGO_FLOAT + location: + latitude: 0.01 + longitude: 0.01 + time: 2023-01-02 00:00:00 + - location: # empty waypoint + latitude: 0.02 + longitude: 0.01 + time: 2023-01-02 03:00:00 +ship_config: + ship_speed_knots: 10.0 + adcp_config: + num_bins: 40 + max_depth_meter: -1000.0 + period_minutes: 5.0 + argo_float_config: + cycle_days: 10.0 + drift_days: 9.0 + drift_depth_meter: -1000.0 + max_depth_meter: -2000.0 + min_depth_meter: 0.0 + vertical_speed_meter_per_second: -0.1 + ctd_config: + max_depth_meter: -2000.0 + min_depth_meter: -11.0 + stationkeeping_time_minutes: 20.0 + ctd_bgc_config: + max_depth_meter: -2000.0 + min_depth_meter: -11.0 + stationkeeping_time_minutes: 20.0 + drifter_config: + depth_meter: 0.0 + lifetime_minutes: 40320.0 + ship_underwater_st_config: + period_minutes: 5.0 diff --git a/tests/expedition/expedition_dir/schedule.yaml b/tests/expedition/expedition_dir/schedule.yaml deleted file mode 100644 index 29c14ac9..00000000 --- a/tests/expedition/expedition_dir/schedule.yaml +++ /dev/null @@ -1,18 +0,0 @@ -waypoints: - - instrument: - - CTD - location: - latitude: 0 - longitude: 0 - time: 2023-01-01 00:00:00 - - instrument: - - DRIFTER - - ARGO_FLOAT - location: - latitude: 0.01 - longitude: 0.01 - time: 2023-01-02 00:00:00 - - location: # empty waypoint - latitude: 0.02 - longitude: 0.01 - time: 2023-01-02 03:00:00 diff --git a/tests/expedition/expedition_dir/ship_config.yaml b/tests/expedition/expedition_dir/ship_config.yaml deleted file mode 100644 index 1bae9d1d..00000000 --- a/tests/expedition/expedition_dir/ship_config.yaml +++ /dev/null @@ -1,25 +0,0 @@ -ship_speed_knots: 10.0 -adcp_config: - num_bins: 40 - max_depth_meter: -1000.0 - period_minutes: 5.0 -argo_float_config: - cycle_days: 10.0 - drift_days: 9.0 - drift_depth_meter: -1000.0 - max_depth_meter: -2000.0 - min_depth_meter: 0.0 - vertical_speed_meter_per_second: -0.1 -ctd_config: - max_depth_meter: -2000.0 - min_depth_meter: -11.0 - stationkeeping_time_minutes: 20.0 -ctd_bgc_config: - max_depth_meter: -2000.0 - min_depth_meter: -11.0 - stationkeeping_time_minutes: 20.0 -drifter_config: - depth_meter: 0.0 - lifetime_minutes: 40320.0 -ship_underwater_st_config: - period_minutes: 5.0 diff --git a/tests/expedition/test_schedule.py b/tests/expedition/test_expedition.py similarity index 53% rename from tests/expedition/test_schedule.py rename to tests/expedition/test_expedition.py index f4a8532e..979f055b 100644 --- a/tests/expedition/test_schedule.py +++ b/tests/expedition/test_expedition.py @@ -4,18 +4,18 @@ import pyproj import pytest -from virtualship.errors import ScheduleError +from virtualship.errors import ConfigError, ScheduleError from virtualship.expedition.do_expedition import _load_input_data -from virtualship.models import Location, Schedule, Waypoint -from virtualship.utils import _get_ship_config +from virtualship.models import Expedition, Location, Schedule, Waypoint +from virtualship.utils import EXPEDITION, _get_expedition, get_example_expedition projection = pyproj.Geod(ellps="WGS84") expedition_dir = Path("expedition_dir") -def test_import_export_schedule(tmpdir) -> None: - out_path = tmpdir.join("schedule.yaml") +def test_import_export_expedition(tmpdir) -> None: + out_path = tmpdir.join(EXPEDITION) # arbitrary time for testing base_time = datetime.strptime("1950-01-01", "%Y-%m-%d") @@ -30,10 +30,12 @@ def test_import_export_schedule(tmpdir) -> None: ), ] ) - schedule.to_yaml(out_path) + ship_config = _get_expedition(expedition_dir).ship_config + expedition = Expedition(schedule=schedule, ship_config=ship_config) + expedition.to_yaml(out_path) - schedule2 = Schedule.from_yaml(out_path) - assert schedule == schedule2 + expedition2 = Expedition.from_yaml(out_path) + assert expedition == expedition2 def test_verify_schedule() -> None: @@ -44,7 +46,7 @@ def test_verify_schedule() -> None: ] ) - ship_config = _get_ship_config(expedition_dir) + ship_config = _get_expedition(expedition_dir).ship_config schedule.verify(ship_config.ship_speed_knots, None) @@ -143,7 +145,7 @@ def test_get_instruments() -> None: def test_verify_schedule_errors( schedule: Schedule, check_space_time_region: bool, error, match ) -> None: - ship_config = _get_ship_config(expedition_dir) + ship_config = _get_expedition(expedition_dir).ship_config input_data = _load_input_data( expedition_dir, @@ -158,3 +160,114 @@ def test_verify_schedule_errors( input_data, check_space_time_region=check_space_time_region, ) + + +@pytest.fixture +def schedule(tmp_file): + with open(tmp_file, "w") as file: + file.write(get_example_expedition()) + return Expedition.from_yaml(tmp_file).schedule + + +@pytest.fixture +def schedule_no_xbt(schedule): + for waypoint in schedule.waypoints: + if waypoint.instrument and any( + instrument.name == "XBT" for instrument in waypoint.instrument + ): + waypoint.instrument = [ + instrument + for instrument in waypoint.instrument + if instrument.name != "XBT" + ] + + return schedule + + +@pytest.fixture +def ship_config(tmp_file): + with open(tmp_file, "w") as file: + file.write(get_example_expedition()) + return Expedition.from_yaml(tmp_file).ship_config + + +@pytest.fixture +def ship_config_no_xbt(ship_config): + delattr(ship_config, "xbt_config") + return ship_config + + +@pytest.fixture +def ship_config_no_ctd(ship_config): + delattr(ship_config, "ctd_config") + return ship_config + + +@pytest.fixture +def ship_config_no_ctd_bgc(ship_config): + delattr(ship_config, "ctd_bgc_config") + return ship_config + + +@pytest.fixture +def ship_config_no_argo_float(ship_config): + delattr(ship_config, "argo_float_config") + return ship_config + + +@pytest.fixture +def ship_config_no_drifter(ship_config): + delattr(ship_config, "drifter_config") + return ship_config + + +def test_verify_ship_config(ship_config, schedule) -> None: + ship_config.verify(schedule) + + +def test_verify_ship_config_no_instrument(ship_config, schedule_no_xbt) -> None: + ship_config.verify(schedule_no_xbt) + + +@pytest.mark.parametrize( + "ship_config_fixture,error,match", + [ + pytest.param( + "ship_config_no_xbt", + ConfigError, + "Schedule includes instrument 'XBT', but ship_config does not provide configuration for it.", + id="ShipConfigNoXBT", + ), + pytest.param( + "ship_config_no_ctd", + ConfigError, + "Schedule includes instrument 'CTD', but ship_config does not provide configuration for it.", + id="ShipConfigNoCTD", + ), + pytest.param( + "ship_config_no_ctd_bgc", + ConfigError, + "Schedule includes instrument 'CTD_BGC', but ship_config does not provide configuration for it.", + id="ShipConfigNoCTD_BGC", + ), + pytest.param( + "ship_config_no_argo_float", + ConfigError, + "Schedule includes instrument 'ARGO_FLOAT', but ship_config does not provide configuration for it.", + id="ShipConfigNoARGO_FLOAT", + ), + pytest.param( + "ship_config_no_drifter", + ConfigError, + "Schedule includes instrument 'DRIFTER', but ship_config does not provide configuration for it.", + id="ShipConfigNoDRIFTER", + ), + ], +) +def test_verify_ship_config_errors( + request, schedule, ship_config_fixture, error, match +) -> None: + ship_config = request.getfixturevalue(ship_config_fixture) + + with pytest.raises(error, match=match): + ship_config.verify(schedule) diff --git a/tests/expedition/test_ship_config.py b/tests/expedition/test_ship_config.py deleted file mode 100644 index 6444e985..00000000 --- a/tests/expedition/test_ship_config.py +++ /dev/null @@ -1,126 +0,0 @@ -from pathlib import Path - -import pytest - -from virtualship.errors import ConfigError -from virtualship.models import Schedule, ShipConfig -from virtualship.utils import get_example_config, get_example_schedule - -expedition_dir = Path("expedition_dir") - - -@pytest.fixture -def schedule(tmp_file): - with open(tmp_file, "w") as file: - file.write(get_example_schedule()) - return Schedule.from_yaml(tmp_file) - - -@pytest.fixture -def schedule_no_xbt(schedule): - for waypoint in schedule.waypoints: - if waypoint.instrument and any( - instrument.name == "XBT" for instrument in waypoint.instrument - ): - waypoint.instrument = [ - instrument - for instrument in waypoint.instrument - if instrument.name != "XBT" - ] - - return schedule - - -@pytest.fixture -def ship_config(tmp_file): - with open(tmp_file, "w") as file: - file.write(get_example_config()) - return ShipConfig.from_yaml(tmp_file) - - -@pytest.fixture -def ship_config_no_xbt(ship_config): - delattr(ship_config, "xbt_config") - return ship_config - - -@pytest.fixture -def ship_config_no_ctd(ship_config): - delattr(ship_config, "ctd_config") - return ship_config - - -@pytest.fixture -def ship_config_no_ctd_bgc(ship_config): - delattr(ship_config, "ctd_bgc_config") - return ship_config - - -@pytest.fixture -def ship_config_no_argo_float(ship_config): - delattr(ship_config, "argo_float_config") - return ship_config - - -@pytest.fixture -def ship_config_no_drifter(ship_config): - delattr(ship_config, "drifter_config") - return ship_config - - -def test_import_export_ship_config(ship_config, tmp_file) -> None: - ship_config.to_yaml(tmp_file) - ship_config_2 = ShipConfig.from_yaml(tmp_file) - assert ship_config == ship_config_2 - - -def test_verify_ship_config(ship_config, schedule) -> None: - ship_config.verify(schedule) - - -def test_verify_ship_config_no_instrument(ship_config, schedule_no_xbt) -> None: - ship_config.verify(schedule_no_xbt) - - -@pytest.mark.parametrize( - "ship_config_fixture,error,match", - [ - pytest.param( - "ship_config_no_xbt", - ConfigError, - "Planning has a waypoint with XBT instrument, but configuration does not configure XBT.", - id="ShipConfigNoXBT", - ), - pytest.param( - "ship_config_no_ctd", - ConfigError, - "Planning has a waypoint with CTD instrument, but configuration does not configure CTD.", - id="ShipConfigNoCTD", - ), - pytest.param( - "ship_config_no_ctd_bgc", - ConfigError, - "Planning has a waypoint with CTD_BGC instrument, but configuration does not configure CTD_BGCs.", - id="ShipConfigNoCTD_BGC", - ), - pytest.param( - "ship_config_no_argo_float", - ConfigError, - "Planning has a waypoint with Argo float instrument, but configuration does not configure Argo floats.", - id="ShipConfigNoARGO_FLOAT", - ), - pytest.param( - "ship_config_no_drifter", - ConfigError, - "Planning has a waypoint with drifter instrument, but configuration does not configure drifters.", - id="ShipConfigNoDRIFTER", - ), - ], -) -def test_verify_ship_config_errors( - request, schedule, ship_config_fixture, error, match -) -> None: - ship_config = request.getfixturevalue(ship_config_fixture) - - with pytest.raises(error, match=match): - ship_config.verify(schedule) diff --git a/tests/expedition/test_simulate_schedule.py b/tests/expedition/test_simulate_schedule.py index 9eecd73d..f3f1b94a 100644 --- a/tests/expedition/test_simulate_schedule.py +++ b/tests/expedition/test_simulate_schedule.py @@ -7,7 +7,7 @@ ScheduleProblem, simulate_schedule, ) -from virtualship.models import Location, Schedule, ShipConfig, Waypoint +from virtualship.models import Expedition, Location, Schedule, Waypoint def test_simulate_schedule_feasible() -> None: @@ -15,16 +15,16 @@ def test_simulate_schedule_feasible() -> None: base_time = datetime.strptime("2022-01-01T00:00:00", "%Y-%m-%dT%H:%M:%S") projection = pyproj.Geod(ellps="WGS84") - ship_config = ShipConfig.from_yaml("expedition_dir/ship_config.yaml") - ship_config.ship_speed_knots = 10.0 - schedule = Schedule( + expedition = Expedition.from_yaml("expedition_dir/expedition.yaml") + expedition.ship_config.ship_speed_knots = 10.0 + expedition.schedule = Schedule( waypoints=[ Waypoint(location=Location(0, 0), time=base_time), Waypoint(location=Location(0.01, 0), time=base_time + timedelta(days=1)), ] ) - result = simulate_schedule(projection, ship_config, schedule) + result = simulate_schedule(projection, expedition) assert isinstance(result, ScheduleOk) @@ -34,22 +34,23 @@ def test_simulate_schedule_too_far() -> None: base_time = datetime.strptime("2022-01-01T00:00:00", "%Y-%m-%dT%H:%M:%S") projection = pyproj.Geod(ellps="WGS84") - ship_config = ShipConfig.from_yaml("expedition_dir/ship_config.yaml") - schedule = Schedule( + expedition = Expedition.from_yaml("expedition_dir/expedition.yaml") + expedition.ship_config.ship_speed_knots = 10.0 + expedition.schedule = Schedule( waypoints=[ Waypoint(location=Location(0, 0), time=base_time), Waypoint(location=Location(1.0, 0), time=base_time + timedelta(minutes=1)), ] ) - result = simulate_schedule(projection, ship_config, schedule) + result = simulate_schedule(projection, expedition) assert isinstance(result, ScheduleProblem) def test_time_in_minutes_in_ship_schedule() -> None: """Test whether the pydantic serializer picks up the time *in minutes* in the ship schedule.""" - ship_config = ShipConfig.from_yaml("expedition_dir/ship_config.yaml") + ship_config = Expedition.from_yaml("expedition_dir/expedition.yaml").ship_config assert ship_config.adcp_config.period == timedelta(minutes=5) assert ship_config.ctd_config.stationkeeping_time == timedelta(minutes=20) assert ship_config.ctd_bgc_config.stationkeeping_time == timedelta(minutes=20) diff --git a/tests/test_mfp_to_yaml.py b/tests/test_mfp_to_yaml.py index d242d30a..4eab16c2 100644 --- a/tests/test_mfp_to_yaml.py +++ b/tests/test_mfp_to_yaml.py @@ -3,7 +3,7 @@ import pandas as pd import pytest -from virtualship.models import Schedule +from virtualship.models import Expedition from virtualship.utils import mfp_to_yaml @@ -88,7 +88,7 @@ def test_mfp_to_yaml_success(request, fixture_name, tmp_path): """Test that mfp_to_yaml correctly processes a valid MFP file.""" valid_mfp_file = request.getfixturevalue(fixture_name) - yaml_output_path = tmp_path / "schedule.yaml" + yaml_output_path = tmp_path / "expedition.yaml" # Run function (No need to mock open() for YAML, real file is created) mfp_to_yaml(valid_mfp_file, yaml_output_path) @@ -97,9 +97,9 @@ def test_mfp_to_yaml_success(request, fixture_name, tmp_path): assert yaml_output_path.exists() # Load YAML and validate contents - data = Schedule.from_yaml(yaml_output_path) + data = Expedition.from_yaml(yaml_output_path) - assert len(data.waypoints) == 3 + assert len(data.schedule.waypoints) == 3 @pytest.mark.parametrize( @@ -138,7 +138,7 @@ def test_mfp_to_yaml_exceptions(request, fixture_name, error, match, tmp_path): """Test that mfp_to_yaml raises an error when input file is not valid.""" fixture = request.getfixturevalue(fixture_name) - yaml_output_path = tmp_path / "schedule.yaml" + yaml_output_path = tmp_path / "expedition.yaml" with pytest.raises(error, match=match): mfp_to_yaml(fixture, yaml_output_path) @@ -146,7 +146,7 @@ def test_mfp_to_yaml_exceptions(request, fixture_name, error, match, tmp_path): def test_mfp_to_yaml_extra_headers(unexpected_header_mfp_file, tmp_path): """Test that mfp_to_yaml prints a warning when extra columns are found.""" - yaml_output_path = tmp_path / "schedule.yaml" + yaml_output_path = tmp_path / "expedition.yaml" with pytest.warns(UserWarning, match="Found additional unexpected columns.*"): mfp_to_yaml(unexpected_header_mfp_file, yaml_output_path) diff --git a/tests/test_utils.py b/tests/test_utils.py index 4c6db8fc..0dcebd79 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,26 +1,14 @@ -from virtualship.models import Schedule, ShipConfig -from virtualship.utils import get_example_config, get_example_schedule +from virtualship.models import Expedition +from virtualship.utils import get_example_expedition -def test_get_example_config(): - assert len(get_example_config()) > 0 +def test_get_example_expedition(): + assert len(get_example_expedition()) > 0 -def test_get_example_schedule(): - assert len(get_example_schedule()) > 0 - - -def test_valid_example_config(tmp_path): - path = tmp_path / "test.yaml" - with open(path, "w") as file: - file.write(get_example_config()) - - ShipConfig.from_yaml(path) - - -def test_valid_example_schedule(tmp_path): +def test_valid_example_expedition(tmp_path): path = tmp_path / "test.yaml" with open(path, "w") as file: - file.write(get_example_schedule()) + file.write(get_example_expedition()) - Schedule.from_yaml(path) + Expedition.from_yaml(path) From bcad99f493d5f0aa99d9dc737d6a924cc3a0f0ee Mon Sep 17 00:00:00 2001 From: j-atkins <106238905+j-atkins@users.noreply.github.com> Date: Mon, 20 Oct 2025 15:43:12 +0200 Subject: [PATCH 09/15] restructure Expedition model to separate instrument configs and ship speed --- src/virtualship/models/__init__.py | 4 +-- src/virtualship/models/expedition.py | 25 +++++++++++-------- src/virtualship/static/expedition.yaml | 4 +-- .../expedition/expedition_dir/expedition.yaml | 4 +-- 4 files changed, 20 insertions(+), 17 deletions(-) diff --git a/src/virtualship/models/__init__.py b/src/virtualship/models/__init__.py index d7d70cd6..01460efb 100644 --- a/src/virtualship/models/__init__.py +++ b/src/virtualship/models/__init__.py @@ -7,9 +7,9 @@ CTDConfig, DrifterConfig, Expedition, + InstrumentsConfig, InstrumentType, Schedule, - ShipConfig, ShipUnderwaterSTConfig, Waypoint, XBTConfig, @@ -36,10 +36,10 @@ "ShipUnderwaterSTConfig", "DrifterConfig", "XBTConfig", - "ShipConfig", "SpatialRange", "TimeRange", "SpaceTimeRegion", "Spacetime", "Expedition", + "InstrumentsConfig", ] diff --git a/src/virtualship/models/expedition.py b/src/virtualship/models/expedition.py index 1f91d6de..8540d7eb 100644 --- a/src/virtualship/models/expedition.py +++ b/src/virtualship/models/expedition.py @@ -28,7 +28,8 @@ class Expedition(pydantic.BaseModel): """Expedition class, including schedule and ship config.""" schedule: Schedule - ship_config: ShipConfig + instruments_config: InstrumentsConfig + ship_speed_knots: float = pydantic.Field(gt=0.0) model_config = pydantic.ConfigDict(extra="forbid") @@ -42,10 +43,17 @@ def from_yaml(cls, file_path: str) -> Expedition: """Load config from yaml file.""" with open(file_path) as file: data = yaml.safe_load(file) - return Expedition(**data) +class ShipConfig: + """Configuration of the ship.""" + + ship_speed_knots: float = pydantic.Field(gt=0.0) + + model_config = pydantic.ConfigDict(extra="forbid") + + class Schedule(pydantic.BaseModel): """Schedule of the virtual ship.""" @@ -340,13 +348,8 @@ class XBTConfig(pydantic.BaseModel): deceleration_coefficient: float = pydantic.Field(gt=0.0) -class ShipConfig(pydantic.BaseModel): - """Configuration of the virtual ship.""" - - ship_speed_knots: float = pydantic.Field(gt=0.0) - """ - Velocity of the ship in knots. - """ +class InstrumentsConfig(pydantic.BaseModel): + """Configuration of instruments.""" argo_float_config: ArgoFloatConfig | None = None """ @@ -401,7 +404,7 @@ class ShipConfig(pydantic.BaseModel): def verify(self, schedule: Schedule) -> None: """ - Verify ship configuration against the schedule. + Verify instrument configurations against the schedule. Removes instrument configs not present in the schedule and checks that all scheduled instruments are configured. Raises ConfigError if any scheduled instrument is missing a config. @@ -430,7 +433,7 @@ def verify(self, schedule: Schedule) -> None: or getattr(self, config_attr) is None ): raise ConfigError( - f"Schedule includes instrument '{inst_type.value}', but ship_config does not provide configuration for it." + f"Schedule includes instrument '{inst_type.value}', but instruments_config does not provide configuration for it." ) diff --git a/src/virtualship/static/expedition.yaml b/src/virtualship/static/expedition.yaml index a7328510..849a123b 100644 --- a/src/virtualship/static/expedition.yaml +++ b/src/virtualship/static/expedition.yaml @@ -41,8 +41,7 @@ schedule: latitude: 0.03 longitude: 0.03 time: 2023-01-01 03:00:00 -ship_config: - ship_speed_knots: 10.0 +instruments_config: adcp_config: num_bins: 40 max_depth_meter: -1000.0 @@ -72,3 +71,4 @@ ship_config: deceleration_coefficient: 0.00225 ship_underwater_st_config: period_minutes: 5.0 +ship_speed_knots: 10.0 diff --git a/tests/expedition/expedition_dir/expedition.yaml b/tests/expedition/expedition_dir/expedition.yaml index 702c7fd2..edfac696 100644 --- a/tests/expedition/expedition_dir/expedition.yaml +++ b/tests/expedition/expedition_dir/expedition.yaml @@ -17,8 +17,7 @@ schedule: latitude: 0.02 longitude: 0.01 time: 2023-01-02 03:00:00 -ship_config: - ship_speed_knots: 10.0 +instruments_config: adcp_config: num_bins: 40 max_depth_meter: -1000.0 @@ -43,3 +42,4 @@ ship_config: lifetime_minutes: 40320.0 ship_underwater_st_config: period_minutes: 5.0 +ship_speed_knots: 10.0 From 4cc68db7e33c8204e65bfe2a9f6fc043bba30188 Mon Sep 17 00:00:00 2001 From: j-atkins <106238905+j-atkins@users.noreply.github.com> Date: Mon, 20 Oct 2025 15:43:42 +0200 Subject: [PATCH 10/15] update workflows to reflect new Expedition model structure --- src/virtualship/cli/_fetch.py | 6 +- src/virtualship/cli/_plan.py | 69 +++++++++++-------- src/virtualship/expedition/do_expedition.py | 45 +++++------- .../expedition/simulate_measurements.py | 23 +++---- .../expedition/simulate_schedule.py | 59 ++++++++-------- src/virtualship/utils.py | 17 +++-- 6 files changed, 114 insertions(+), 105 deletions(-) diff --git a/src/virtualship/cli/_fetch.py b/src/virtualship/cli/_fetch.py index 871503ee..a2ed6bb1 100644 --- a/src/virtualship/cli/_fetch.py +++ b/src/virtualship/cli/_fetch.py @@ -51,7 +51,7 @@ def _fetch(path: str | Path, username: str | None, password: str | None) -> None expedition = _get_expedition(path) expedition.schedule.verify( - expedition.ship_config.ship_speed_knots, + expedition.ship_speed_knots, input_data=None, check_space_time_region=True, ignore_missing_fieldsets=True, @@ -91,8 +91,8 @@ def _fetch(path: str | Path, username: str | None, password: str | None) -> None {"XBT", "CTD", "CDT_BGC", "SHIP_UNDERWATER_ST"} & set(instrument.name for instrument in instruments_in_schedule) ) - or expedition.ship_config.ship_underwater_st_config is not None - or expedition.ship_config.adcp_config is not None + or expedition.instruments_config.ship_underwater_st_config is not None + or expedition.instruments_config.adcp_config is not None ): print("Ship data will be downloaded. Please wait...") diff --git a/src/virtualship/cli/_plan.py b/src/virtualship/cli/_plan.py index 7c3e6646..2c0baf48 100644 --- a/src/virtualship/cli/_plan.py +++ b/src/virtualship/cli/_plan.py @@ -38,7 +38,6 @@ Expedition, InstrumentType, Location, - ShipConfig, ShipUnderwaterSTConfig, SpatialRange, TimeRange, @@ -169,25 +168,29 @@ def compose(self) -> ComposeResult: ) from None try: - ## 1) SHIP CONFIG EDITOR + ## 1) SHIP SPEED & INSTRUMENTS CONFIG EDITOR yield Label( - "[b]Ship Config Editor[/b]", id="title_ship_config", markup=True + "[b]Ship & Instruments Config Editor[/b]", + id="title_ship_instruments_config", + markup=True, ) yield Rule(line_style="heavy") # SECTION: "Ship Speed & Onboard Measurements" with Collapsible( - title="[b]Ship Speed & Onboard Measurements[/b]", id="speed_collapsible" + title="[b]Ship Speed & Onboard Measurements[/b]", + id="speed_collapsible", + collapsed=False, ): attr = "ship_speed_knots" - validators = group_validators(ShipConfig, attr) + validators = group_validators(Expedition, attr) with Horizontal(classes="ship_speed"): yield Label("[b]Ship Speed (knots):[/b]") yield Input( id="speed", - type=type_to_textual(get_field_type(ShipConfig, attr)), + type=type_to_textual(get_field_type(Expedition, attr)), validators=[ Function( validator, @@ -198,8 +201,8 @@ def compose(self) -> ComposeResult: classes="ship_speed_input", placeholder="knots", value=str( - self.expedition.ship_config.ship_speed_knots - if self.expedition.ship_config.ship_speed_knots + self.expedition.ship_speed_knots + if self.expedition.ship_speed_knots else "" ), ) @@ -209,7 +212,7 @@ def compose(self) -> ComposeResult: yield Label("[b]Onboard Temperature/Salinity:[/b]") yield Switch( value=bool( - self.expedition.ship_config.ship_underwater_st_config + self.expedition.instruments_config.ship_underwater_st_config ), id="has_onboard_ts", ) @@ -217,15 +220,15 @@ def compose(self) -> ComposeResult: with Horizontal(classes="adcp-section"): yield Label("[b]Onboard ADCP:[/b]") yield Switch( - value=bool(self.expedition.ship_config.adcp_config), + value=bool(self.expedition.instruments_config.adcp_config), id="has_adcp", ) # adcp type selection with Horizontal(id="adcp_type_container", classes="-hidden"): is_deep = ( - self.expedition.ship_config.adcp_config - and self.expedition.ship_config.adcp_config.max_depth_meter + self.expedition.instruments_config.adcp_config + and self.expedition.instruments_config.adcp_config.max_depth_meter == -1000.0 ) yield Label(" OceanObserver:") @@ -243,8 +246,9 @@ def compose(self) -> ComposeResult: for instrument_name, info in INSTRUMENT_FIELDS.items(): config_class = info["class"] attributes = info["attributes"] + # instrument-specific configs now live under instruments_config config_instance = getattr( - self.expedition.ship_config, instrument_name, None + self.expedition.instruments_config, instrument_name, None ) title = info.get("title", instrument_name.replace("_", " ").title()) with Collapsible( @@ -505,8 +509,8 @@ def compose(self) -> ComposeResult: def on_mount(self) -> None: self.refresh_waypoint_widgets() adcp_present = ( - getattr(self.expedition.ship_config, "adcp_config", None) - if self.expedition.ship_config + getattr(self.expedition.instruments_config, "adcp_config", None) + if self.expedition.instruments_config else False ) self.show_hide_adcp_type(bool(adcp_present)) @@ -520,7 +524,7 @@ def refresh_waypoint_widgets(self): def save_changes(self) -> bool: """Save changes to expedition.yaml.""" try: - self._update_ship_config() + self._update_ship_speed() self._update_instrument_configs() self._update_schedule() self.expedition.to_yaml(self.path.joinpath(EXPEDITION)) @@ -536,14 +540,18 @@ def save_changes(self) -> bool: + f"\n\nTraceback will be logged in {self.path}/virtualship_error.txt. Please attach this/copy the contents to any issue submitted." ) from None - def _update_ship_config(self): + def _update_ship_speed(self): attr = "ship_speed_knots" - field_type = get_field_type(type(self.expedition.ship_config), attr) + field_type = get_field_type(Expedition, attr) value = field_type(self.query_one("#speed").value) - ShipConfig.model_validate( - {**self.expedition.ship_config.model_dump(), attr: value} - ) - self.expedition.ship_config.ship_speed_knots = value + try: + if not (value > 0): + raise ValueError("ship_speed_knots must be greater than 0") + except TypeError: + raise UnexpectedError("Invalid ship speed value") from None + + # persist to the Expedition instance + self.expedition.ship_speed_knots = value def _update_instrument_configs(self): for instrument_name, info in INSTRUMENT_FIELDS.items(): @@ -554,12 +562,12 @@ def _update_instrument_configs(self): if instrument_name == "adcp_config": has_adcp = self.query_one("#has_adcp", Switch).value if not has_adcp: - setattr(self.expedition.ship_config, instrument_name, None) + setattr(self.expedition.instruments_config, instrument_name, None) continue if instrument_name == "ship_underwater_st_config": has_ts = self.query_one("#has_onboard_ts", Switch).value if not has_ts: - setattr(self.expedition.ship_config, instrument_name, None) + setattr(self.expedition.instruments_config, instrument_name, None) continue for attr_meta in attributes: attr = attr_meta["name"] @@ -579,7 +587,9 @@ def _update_instrument_configs(self): else: kwargs["max_depth_meter"] = -150.0 setattr( - self.expedition.ship_config, instrument_name, config_class(**kwargs) + self.expedition.instruments_config, + instrument_name, + config_class(**kwargs), ) def _update_schedule(self): @@ -742,13 +752,16 @@ def _set_ts_default_values(self): @on(Switch.Changed, "#has_adcp") def on_adcp_toggle(self, event: Switch.Changed) -> None: self.show_hide_adcp_type(event.value) - if event.value and not self.expedition.ship_config.adcp_config: + if event.value and not self.expedition.instruments_config.adcp_config: # ADCP was turned on and was previously null self._set_adcp_default_values() @on(Switch.Changed, "#has_onboard_ts") def on_ts_toggle(self, event: Switch.Changed) -> None: - if event.value and not self.expedition.ship_config.ship_underwater_st_config: + if ( + event.value + and not self.expedition.instruments_config.ship_underwater_st_config + ): # T/S was turned on and was previously null self._set_ts_default_values() @@ -1152,7 +1165,7 @@ class PlanApp(App): margin: 0 1; } - #title_ship_config { + #title_ship_instruments_config { text-style: bold; padding: 1; } diff --git a/src/virtualship/expedition/do_expedition.py b/src/virtualship/expedition/do_expedition.py index b1e44dc4..4960f59f 100644 --- a/src/virtualship/expedition/do_expedition.py +++ b/src/virtualship/expedition/do_expedition.py @@ -7,7 +7,7 @@ import pyproj from virtualship.cli._fetch import get_existing_download, get_space_time_region_hash -from virtualship.models import Schedule, ShipConfig +from virtualship.models import Expedition, Schedule from virtualship.utils import ( CHECKPOINT, _get_expedition, @@ -39,8 +39,8 @@ def do_expedition(expedition_dir: str | Path, input_data: Path | None = None) -> expedition = _get_expedition(expedition_dir) - # Verify ship_config file is consistent with schedule - expedition.ship_config.verify(expedition.schedule) + # Verify instruments_config file is consistent with schedule + expedition.instruments_config.verify(expedition.schedule) # load last checkpoint checkpoint = _load_checkpoint(expedition_dir) @@ -53,17 +53,14 @@ def do_expedition(expedition_dir: str | Path, input_data: Path | None = None) -> # load fieldsets loaded_input_data = _load_input_data( expedition_dir=expedition_dir, - schedule=expedition.schedule, - ship_config=expedition.ship_config, + expedition=expedition, input_data=input_data, ) print("\n---- WAYPOINT VERIFICATION ----") # verify schedule is valid - expedition.schedule.verify( - expedition.ship_config.ship_speed_knots, loaded_input_data - ) + expedition.schedule.verify(expedition.ship_speed_knots, loaded_input_data) # simulate the schedule schedule_results = simulate_schedule( @@ -109,7 +106,7 @@ def do_expedition(expedition_dir: str | Path, input_data: Path | None = None) -> print("\nSimulating measurements. This may take a while...\n") simulate_measurements( expedition_dir, - expedition.ship_config, + expedition.instruments_config, loaded_input_data, schedule_results.measurements_to_simulate, ) @@ -125,26 +122,21 @@ def do_expedition(expedition_dir: str | Path, input_data: Path | None = None) -> def _load_input_data( expedition_dir: Path, - schedule: Schedule, - ship_config: ShipConfig, + expedition: Expedition, input_data: Path | None, ) -> InputData: """ Load the input data. :param expedition_dir: Directory of the expedition. - :type expedition_dir: Path - :param schedule: Schedule object. - :type schedule: Schedule - :param ship_config: Ship configuration. - :type ship_config: ShipConfig + :param expedition: Expedition object. :param input_data: Folder containing input data. - :type input_data: Path | None :return: InputData object. - :rtype: InputData """ if input_data is None: - space_time_region_hash = get_space_time_region_hash(schedule.space_time_region) + space_time_region_hash = get_space_time_region_hash( + expedition.schedule.space_time_region + ) input_data = get_existing_download(expedition_dir, space_time_region_hash) assert input_data is not None, ( @@ -153,13 +145,14 @@ def _load_input_data( return InputData.load( directory=input_data, - load_adcp=ship_config.adcp_config is not None, - load_argo_float=ship_config.argo_float_config is not None, - load_ctd=ship_config.ctd_config is not None, - load_ctd_bgc=ship_config.ctd_bgc_config is not None, - load_drifter=ship_config.drifter_config is not None, - load_xbt=ship_config.xbt_config is not None, - load_ship_underwater_st=ship_config.ship_underwater_st_config is not None, + load_adcp=expedition.instruments_config.adcp_config is not None, + load_argo_float=expedition.instruments_config.argo_float_config is not None, + load_ctd=expedition.instruments_config.ctd_config is not None, + load_ctd_bgc=expedition.instruments_config.ctd_bgc_config is not None, + load_drifter=expedition.instruments_config.drifter_config is not None, + load_xbt=expedition.instruments_config.xbt_config is not None, + load_ship_underwater_st=expedition.instruments_config.ship_underwater_st_config + is not None, ) diff --git a/src/virtualship/expedition/simulate_measurements.py b/src/virtualship/expedition/simulate_measurements.py index 20ba2cdb..6cb2e488 100644 --- a/src/virtualship/expedition/simulate_measurements.py +++ b/src/virtualship/expedition/simulate_measurements.py @@ -16,7 +16,7 @@ from virtualship.instruments.drifter import simulate_drifters from virtualship.instruments.ship_underwater_st import simulate_ship_underwater_st from virtualship.instruments.xbt import simulate_xbt -from virtualship.models import ShipConfig +from virtualship.models import InstrumentsConfig from virtualship.utils import ship_spinner from .simulate_schedule import MeasurementsToSimulate @@ -31,7 +31,7 @@ def simulate_measurements( expedition_dir: str | Path, - ship_config: ShipConfig, + instruments_config: InstrumentsConfig, input_data: InputData, measurements: MeasurementsToSimulate, ) -> None: @@ -41,7 +41,6 @@ def simulate_measurements( Saves everything in expedition_dir/results. :param expedition_dir: Base directory of the expedition. - :param ship_config: Ship configuration. :param input_data: Input data for simulation. :param measurements: The measurements to simulate. :raises RuntimeError: In case fieldsets of configuration is not provided. Make sure to check this before calling this function. @@ -50,7 +49,7 @@ def simulate_measurements( expedition_dir = Path(expedition_dir) if len(measurements.ship_underwater_sts) > 0: - if ship_config.ship_underwater_st_config is None: + if instruments_config.ship_underwater_st_config is None: raise RuntimeError("No configuration for ship underwater ST provided.") if input_data.ship_underwater_st_fieldset is None: raise RuntimeError("No fieldset for ship underwater ST provided.") @@ -68,7 +67,7 @@ def simulate_measurements( spinner.ok("āœ…") if len(measurements.adcps) > 0: - if ship_config.adcp_config is None: + if instruments_config.adcp_config is None: raise RuntimeError("No configuration for ADCP provided.") if input_data.adcp_fieldset is None: raise RuntimeError("No fieldset for ADCP provided.") @@ -78,15 +77,15 @@ def simulate_measurements( simulate_adcp( fieldset=input_data.adcp_fieldset, out_path=expedition_dir.joinpath("results", "adcp.zarr"), - max_depth=ship_config.adcp_config.max_depth_meter, + max_depth=instruments_config.adcp_config.max_depth_meter, min_depth=-5, - num_bins=ship_config.adcp_config.num_bins, + num_bins=instruments_config.adcp_config.num_bins, sample_points=measurements.adcps, ) spinner.ok("āœ…") if len(measurements.ctds) > 0: - if ship_config.ctd_config is None: + if instruments_config.ctd_config is None: raise RuntimeError("No configuration for CTD provided.") if input_data.ctd_fieldset is None: raise RuntimeError("No fieldset for CTD provided.") @@ -102,7 +101,7 @@ def simulate_measurements( spinner.ok("āœ…") if len(measurements.ctd_bgcs) > 0: - if ship_config.ctd_bgc_config is None: + if instruments_config.ctd_bgc_config is None: raise RuntimeError("No configuration for CTD_BGC provided.") if input_data.ctd_bgc_fieldset is None: raise RuntimeError("No fieldset for CTD_BGC provided.") @@ -118,7 +117,7 @@ def simulate_measurements( spinner.ok("āœ…") if len(measurements.xbts) > 0: - if ship_config.xbt_config is None: + if instruments_config.xbt_config is None: raise RuntimeError("No configuration for XBTs provided.") if input_data.xbt_fieldset is None: raise RuntimeError("No fieldset for XBTs provided.") @@ -135,7 +134,7 @@ def simulate_measurements( if len(measurements.drifters) > 0: print("Simulating drifters... ") - if ship_config.drifter_config is None: + if instruments_config.drifter_config is None: raise RuntimeError("No configuration for drifters provided.") if input_data.drifter_fieldset is None: raise RuntimeError("No fieldset for drifters provided.") @@ -150,7 +149,7 @@ def simulate_measurements( if len(measurements.argo_floats) > 0: print("Simulating argo floats... ") - if ship_config.argo_float_config is None: + if instruments_config.argo_float_config is None: raise RuntimeError("No configuration for argo floats provided.") if input_data.argo_float_fieldset is None: raise RuntimeError("No fieldset for argo floats provided.") diff --git a/src/virtualship/expedition/simulate_schedule.py b/src/virtualship/expedition/simulate_schedule.py index 9198d7b3..4e8a2fa1 100644 --- a/src/virtualship/expedition/simulate_schedule.py +++ b/src/virtualship/expedition/simulate_schedule.py @@ -57,8 +57,7 @@ def simulate_schedule( Simulate a schedule. :param projection: The projection to use for sailing. - :param ship_config: Ship configuration. - :param schedule: The schedule to simulate. + :param expedition: Expedition object containing the schedule to simulate. :returns: Either the results of a successfully simulated schedule, or information on where the schedule became infeasible. """ return _ScheduleSimulator(projection, expedition).simulate() @@ -126,9 +125,7 @@ def _progress_time_traveling_towards(self, location: Location) -> None: lons2=location.lon, lats2=location.lat, ) - ship_speed_meter_per_second = ( - self._expedition.ship_config.ship_speed_knots * 1852 / 3600 - ) + ship_speed_meter_per_second = self._expedition.ship_speed_knots * 1852 / 3600 azimuth1 = geodinv[0] distance_to_next_waypoint = geodinv[2] time_to_reach = timedelta( @@ -137,7 +134,7 @@ def _progress_time_traveling_towards(self, location: Location) -> None: end_time = self._time + time_to_reach # note all ADCP measurements - if self._expedition.ship_config.adcp_config is not None: + if self._expedition.instruments_config.adcp_config is not None: location = self._location time = self._time while self._next_adcp_time <= end_time: @@ -160,11 +157,11 @@ def _progress_time_traveling_towards(self, location: Location) -> None: self._next_adcp_time = ( self._next_adcp_time - + self._expedition.ship_config.adcp_config.period + + self._expedition.instruments_config.adcp_config.period ) # note all ship underwater ST measurements - if self._expedition.ship_config.ship_underwater_st_config is not None: + if self._expedition.instruments_config.ship_underwater_st_config is not None: location = self._location time = self._time while self._next_ship_underwater_st_time <= end_time: @@ -187,7 +184,7 @@ def _progress_time_traveling_towards(self, location: Location) -> None: self._next_ship_underwater_st_time = ( self._next_ship_underwater_st_time - + self._expedition.ship_config.ship_underwater_st_config.period + + self._expedition.instruments_config.ship_underwater_st_config.period ) self._time = end_time @@ -197,25 +194,25 @@ def _progress_time_stationary(self, time_passed: timedelta) -> None: end_time = self._time + time_passed # note all ADCP measurements - if self._expedition.ship_config.adcp_config is not None: + if self._expedition.instruments_config.adcp_config is not None: while self._next_adcp_time <= end_time: self._measurements_to_simulate.adcps.append( Spacetime(self._location, self._next_adcp_time) ) self._next_adcp_time = ( self._next_adcp_time - + self._expedition.ship_config.adcp_config.period + + self._expedition.instruments_config.adcp_config.period ) # note all ship underwater ST measurements - if self._expedition.ship_config.ship_underwater_st_config is not None: + if self._expedition.instruments_config.ship_underwater_st_config is not None: while self._next_ship_underwater_st_time <= end_time: self._measurements_to_simulate.ship_underwater_sts.append( Spacetime(self._location, self._next_ship_underwater_st_time) ) self._next_ship_underwater_st_time = ( self._next_ship_underwater_st_time - + self._expedition.ship_config.ship_underwater_st_config.period + + self._expedition.instruments_config.ship_underwater_st_config.period ) self._time = end_time @@ -240,52 +237,52 @@ def _make_measurements(self, waypoint: Waypoint) -> timedelta: self._measurements_to_simulate.argo_floats.append( ArgoFloat( spacetime=Spacetime(self._location, self._time), - min_depth=self._expedition.ship_config.argo_float_config.min_depth_meter, - max_depth=self._expedition.ship_config.argo_float_config.max_depth_meter, - drift_depth=self._expedition.ship_config.argo_float_config.drift_depth_meter, - vertical_speed=self._expedition.ship_config.argo_float_config.vertical_speed_meter_per_second, - cycle_days=self._expedition.ship_config.argo_float_config.cycle_days, - drift_days=self._expedition.ship_config.argo_float_config.drift_days, + min_depth=self._expedition.instruments_config.argo_float_config.min_depth_meter, + max_depth=self._expedition.instruments_config.argo_float_config.max_depth_meter, + drift_depth=self._expedition.instruments_config.argo_float_config.drift_depth_meter, + vertical_speed=self._expedition.instruments_config.argo_float_config.vertical_speed_meter_per_second, + cycle_days=self._expedition.instruments_config.argo_float_config.cycle_days, + drift_days=self._expedition.instruments_config.argo_float_config.drift_days, ) ) elif instrument is InstrumentType.CTD: self._measurements_to_simulate.ctds.append( CTD( spacetime=Spacetime(self._location, self._time), - min_depth=self._expedition.ship_config.ctd_config.min_depth_meter, - max_depth=self._expedition.ship_config.ctd_config.max_depth_meter, + min_depth=self._expedition.instruments_config.ctd_config.min_depth_meter, + max_depth=self._expedition.instruments_config.ctd_config.max_depth_meter, ) ) time_costs.append( - self._expedition.ship_config.ctd_config.stationkeeping_time + self._expedition.instruments_config.ctd_config.stationkeeping_time ) elif instrument is InstrumentType.CTD_BGC: self._measurements_to_simulate.ctd_bgcs.append( CTD_BGC( spacetime=Spacetime(self._location, self._time), - min_depth=self._expedition.ship_config.ctd_bgc_config.min_depth_meter, - max_depth=self._expedition.ship_config.ctd_bgc_config.max_depth_meter, + min_depth=self._expedition.instruments_config.ctd_bgc_config.min_depth_meter, + max_depth=self._expedition.instruments_config.ctd_bgc_config.max_depth_meter, ) ) time_costs.append( - self._expedition.ship_config.ctd_bgc_config.stationkeeping_time + self._expedition.instruments_config.ctd_bgc_config.stationkeeping_time ) elif instrument is InstrumentType.DRIFTER: self._measurements_to_simulate.drifters.append( Drifter( spacetime=Spacetime(self._location, self._time), - depth=self._expedition.ship_config.drifter_config.depth_meter, - lifetime=self._expedition.ship_config.drifter_config.lifetime, + depth=self._expedition.instruments_config.drifter_config.depth_meter, + lifetime=self._expedition.instruments_config.drifter_config.lifetime, ) ) elif instrument is InstrumentType.XBT: self._measurements_to_simulate.xbts.append( XBT( spacetime=Spacetime(self._location, self._time), - min_depth=self._expedition.ship_config.xbt_config.min_depth_meter, - max_depth=self._expedition.ship_config.xbt_config.max_depth_meter, - fall_speed=self._expedition.ship_config.xbt_config.fall_speed_meter_per_second, - deceleration_coefficient=self._expedition.ship_config.xbt_config.deceleration_coefficient, + min_depth=self._expedition.instruments_config.xbt_config.min_depth_meter, + max_depth=self._expedition.instruments_config.xbt_config.max_depth_meter, + fall_speed=self._expedition.instruments_config.xbt_config.fall_speed_meter_per_second, + deceleration_coefficient=self._expedition.instruments_config.xbt_config.deceleration_coefficient, ) ) else: diff --git a/src/virtualship/utils.py b/src/virtualship/utils.py index 911d1d42..e620cf8b 100644 --- a/src/virtualship/utils.py +++ b/src/virtualship/utils.py @@ -130,9 +130,9 @@ def mfp_to_yaml(coordinates_file_path: str, yaml_output_path: str): # noqa: D41 # avoid circular imports from virtualship.models import ( Expedition, + InstrumentsConfig, Location, Schedule, - ShipConfig, SpaceTimeRegion, SpatialRange, TimeRange, @@ -184,13 +184,20 @@ def mfp_to_yaml(coordinates_file_path: str, yaml_output_path: str): # noqa: D41 space_time_region=space_time_region, ) - # extract ship config object from static - config = ShipConfig.model_validate( - yaml.safe_load(get_example_expedition()).get("ship_config") + # extract instruments config from static + instruments_config = InstrumentsConfig.model_validate( + yaml.safe_load(get_example_expedition()).get("instruments_config") ) + # extract ship speed from static + ship_speed_knots = yaml.safe_load(get_example_expedition()).get("ship_speed_knots") + # combine to Expedition object - expedition = Expedition(schedule=schedule, ship_config=config) + expedition = Expedition( + schedule=schedule, + instruments_config=instruments_config, + ship_speed_knots=ship_speed_knots, + ) # Save to YAML file expedition.to_yaml(yaml_output_path) From 63b29a7a4bf0bca24032eeac260649a4a94e53c9 Mon Sep 17 00:00:00 2001 From: j-atkins <106238905+j-atkins@users.noreply.github.com> Date: Mon, 20 Oct 2025 15:44:05 +0200 Subject: [PATCH 11/15] update tests for new Expedition model structure --- tests/cli/test_plan.py | 2 +- tests/expedition/test_expedition.py | 94 +++++++++++----------- tests/expedition/test_simulate_schedule.py | 18 +++-- 3 files changed, 61 insertions(+), 53 deletions(-) diff --git a/tests/cli/test_plan.py b/tests/cli/test_plan.py index 421feba0..f5a379b7 100644 --- a/tests/cli/test_plan.py +++ b/tests/cli/test_plan.py @@ -106,7 +106,7 @@ async def test_UI_changes(): with open(expedition_path) as f: saved_expedition = yaml.safe_load(f) - assert saved_expedition["ship_config"]["ship_speed_knots"] == float(NEW_SPEED) + assert saved_expedition["ship_speed_knots"] == float(NEW_SPEED) # check schedule.verify() methods are working by purposefully making invalid schedule (i.e. ship speed too slow to reach waypoints) invalid_speed = "0.0001" diff --git a/tests/expedition/test_expedition.py b/tests/expedition/test_expedition.py index 979f055b..8ddbadb9 100644 --- a/tests/expedition/test_expedition.py +++ b/tests/expedition/test_expedition.py @@ -30,8 +30,12 @@ def test_import_export_expedition(tmpdir) -> None: ), ] ) - ship_config = _get_expedition(expedition_dir).ship_config - expedition = Expedition(schedule=schedule, ship_config=ship_config) + get_expedition = _get_expedition(expedition_dir) + expedition = Expedition( + schedule=schedule, + instruments_config=get_expedition.instruments_config, + ship_speed_knots=get_expedition.ship_speed_knots, + ) expedition.to_yaml(out_path) expedition2 = Expedition.from_yaml(out_path) @@ -46,9 +50,9 @@ def test_verify_schedule() -> None: ] ) - ship_config = _get_expedition(expedition_dir).ship_config + ship_speed_knots = _get_expedition(expedition_dir).ship_speed_knots - schedule.verify(ship_config.ship_speed_knots, None) + schedule.verify(ship_speed_knots, None) def test_get_instruments() -> None: @@ -145,18 +149,16 @@ def test_get_instruments() -> None: def test_verify_schedule_errors( schedule: Schedule, check_space_time_region: bool, error, match ) -> None: - ship_config = _get_expedition(expedition_dir).ship_config - + expedition = _get_expedition(expedition_dir) input_data = _load_input_data( expedition_dir, - schedule, - ship_config, + expedition, input_data=Path("expedition_dir/input_data"), ) with pytest.raises(error, match=match): schedule.verify( - ship_config.ship_speed_knots, + expedition.ship_speed_knots, input_data, check_space_time_region=check_space_time_region, ) @@ -185,89 +187,91 @@ def schedule_no_xbt(schedule): @pytest.fixture -def ship_config(tmp_file): +def instruments_config(tmp_file): with open(tmp_file, "w") as file: file.write(get_example_expedition()) - return Expedition.from_yaml(tmp_file).ship_config + return Expedition.from_yaml(tmp_file).instruments_config @pytest.fixture -def ship_config_no_xbt(ship_config): - delattr(ship_config, "xbt_config") - return ship_config +def instruments_config_no_xbt(instruments_config): + delattr(instruments_config, "xbt_config") + return instruments_config @pytest.fixture -def ship_config_no_ctd(ship_config): - delattr(ship_config, "ctd_config") - return ship_config +def instruments_config_no_ctd(instruments_config): + delattr(instruments_config, "ctd_config") + return instruments_config @pytest.fixture -def ship_config_no_ctd_bgc(ship_config): - delattr(ship_config, "ctd_bgc_config") - return ship_config +def instruments_config_no_ctd_bgc(instruments_config): + delattr(instruments_config, "ctd_bgc_config") + return instruments_config @pytest.fixture -def ship_config_no_argo_float(ship_config): - delattr(ship_config, "argo_float_config") - return ship_config +def instruments_config_no_argo_float(instruments_config): + delattr(instruments_config, "argo_float_config") + return instruments_config @pytest.fixture -def ship_config_no_drifter(ship_config): - delattr(ship_config, "drifter_config") - return ship_config +def instruments_config_no_drifter(instruments_config): + delattr(instruments_config, "drifter_config") + return instruments_config -def test_verify_ship_config(ship_config, schedule) -> None: - ship_config.verify(schedule) +def test_verify_instruments_config(instruments_config, schedule) -> None: + instruments_config.verify(schedule) -def test_verify_ship_config_no_instrument(ship_config, schedule_no_xbt) -> None: - ship_config.verify(schedule_no_xbt) +def test_verify_instruments_config_no_instrument( + instruments_config, schedule_no_xbt +) -> None: + instruments_config.verify(schedule_no_xbt) @pytest.mark.parametrize( - "ship_config_fixture,error,match", + "instruments_config_fixture,error,match", [ pytest.param( - "ship_config_no_xbt", + "instruments_config_no_xbt", ConfigError, - "Schedule includes instrument 'XBT', but ship_config does not provide configuration for it.", + "Schedule includes instrument 'XBT', but instruments_config does not provide configuration for it.", id="ShipConfigNoXBT", ), pytest.param( - "ship_config_no_ctd", + "instruments_config_no_ctd", ConfigError, - "Schedule includes instrument 'CTD', but ship_config does not provide configuration for it.", + "Schedule includes instrument 'CTD', but instruments_config does not provide configuration for it.", id="ShipConfigNoCTD", ), pytest.param( - "ship_config_no_ctd_bgc", + "instruments_config_no_ctd_bgc", ConfigError, - "Schedule includes instrument 'CTD_BGC', but ship_config does not provide configuration for it.", + "Schedule includes instrument 'CTD_BGC', but instruments_config does not provide configuration for it.", id="ShipConfigNoCTD_BGC", ), pytest.param( - "ship_config_no_argo_float", + "instruments_config_no_argo_float", ConfigError, - "Schedule includes instrument 'ARGO_FLOAT', but ship_config does not provide configuration for it.", + "Schedule includes instrument 'ARGO_FLOAT', but instruments_config does not provide configuration for it.", id="ShipConfigNoARGO_FLOAT", ), pytest.param( - "ship_config_no_drifter", + "instruments_config_no_drifter", ConfigError, - "Schedule includes instrument 'DRIFTER', but ship_config does not provide configuration for it.", + "Schedule includes instrument 'DRIFTER', but instruments_config does not provide configuration for it.", id="ShipConfigNoDRIFTER", ), ], ) -def test_verify_ship_config_errors( - request, schedule, ship_config_fixture, error, match +def test_verify_instruments_config_errors( + request, schedule, instruments_config_fixture, error, match ) -> None: - ship_config = request.getfixturevalue(ship_config_fixture) + instruments_config = request.getfixturevalue(instruments_config_fixture) with pytest.raises(error, match=match): - ship_config.verify(schedule) + instruments_config.verify(schedule) diff --git a/tests/expedition/test_simulate_schedule.py b/tests/expedition/test_simulate_schedule.py index f3f1b94a..ad2abfc8 100644 --- a/tests/expedition/test_simulate_schedule.py +++ b/tests/expedition/test_simulate_schedule.py @@ -16,7 +16,7 @@ def test_simulate_schedule_feasible() -> None: projection = pyproj.Geod(ellps="WGS84") expedition = Expedition.from_yaml("expedition_dir/expedition.yaml") - expedition.ship_config.ship_speed_knots = 10.0 + expedition.ship_speed_knots = 10.0 expedition.schedule = Schedule( waypoints=[ Waypoint(location=Location(0, 0), time=base_time), @@ -35,7 +35,7 @@ def test_simulate_schedule_too_far() -> None: projection = pyproj.Geod(ellps="WGS84") expedition = Expedition.from_yaml("expedition_dir/expedition.yaml") - expedition.ship_config.ship_speed_knots = 10.0 + expedition.ship_speed_knots = 10.0 expedition.schedule = Schedule( waypoints=[ Waypoint(location=Location(0, 0), time=base_time), @@ -50,8 +50,12 @@ def test_simulate_schedule_too_far() -> None: def test_time_in_minutes_in_ship_schedule() -> None: """Test whether the pydantic serializer picks up the time *in minutes* in the ship schedule.""" - ship_config = Expedition.from_yaml("expedition_dir/expedition.yaml").ship_config - assert ship_config.adcp_config.period == timedelta(minutes=5) - assert ship_config.ctd_config.stationkeeping_time == timedelta(minutes=20) - assert ship_config.ctd_bgc_config.stationkeeping_time == timedelta(minutes=20) - assert ship_config.ship_underwater_st_config.period == timedelta(minutes=5) + instruments_config = Expedition.from_yaml( + "expedition_dir/expedition.yaml" + ).instruments_config + assert instruments_config.adcp_config.period == timedelta(minutes=5) + assert instruments_config.ctd_config.stationkeeping_time == timedelta(minutes=20) + assert instruments_config.ctd_bgc_config.stationkeeping_time == timedelta( + minutes=20 + ) + assert instruments_config.ship_underwater_st_config.period == timedelta(minutes=5) From 9db0d1a55391233bf32b85816ccd0518d02bf9d0 Mon Sep 17 00:00:00 2001 From: j-atkins <106238905+j-atkins@users.noreply.github.com> Date: Mon, 20 Oct 2025 15:50:35 +0200 Subject: [PATCH 12/15] update quickstart guide and utils.mfp_to_yaml docstring to clarify expedition.yaml content --- docs/user-guide/quickstart.md | 2 +- src/virtualship/utils.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/user-guide/quickstart.md b/docs/user-guide/quickstart.md index 04dbea90..dabc324f 100644 --- a/docs/user-guide/quickstart.md +++ b/docs/user-guide/quickstart.md @@ -46,7 +46,7 @@ virtualship init EXPEDITION_NAME --from-mfp CoordinatesExport.xlsx The `CoordinatesExport.xlsx` in the `virtualship init` command refers to the .xlsx file exported from MFP. Replace the filename with the name of your exported .xlsx file (and make sure to move it from the Downloads to the folder/directory in which you are running the expedition). ``` -This will create a folder/directory called `EXPEDITION_NAME` with a single file: `expedition.yaml` containing details on both the expedition schedule and ship configuration, based on the sampling site coordinates that you specified in your MFP export. The `--from-mfp` flag indicates that the exported coordinates will be used. +This will create a folder/directory called `EXPEDITION_NAME` with a single file: `expedition.yaml` containing details on the ship speed and instrument configurations, as well as the expedition schedule based on the sampling site coordinates that you specified in your MFP export. The `--from-mfp` flag indicates that the exported coordinates will be used. ```{note} For advanced users: it is also possible to run the expedition initialisation step without an MFP .xlsx export file. In this case you should simply run `virtualship init EXPEDITION_NAME` in the CLI. This will write an example `expedition.yaml` file in the `EXPEDITION_NAME` folder/directory. This file contains example waypoints, timings, instrument selections, and ship configuration, but can be edited or propagated through the rest of the workflow unedited to run a sample expedition. diff --git a/src/virtualship/utils.py b/src/virtualship/utils.py index e620cf8b..1f55f6f3 100644 --- a/src/virtualship/utils.py +++ b/src/virtualship/utils.py @@ -114,7 +114,7 @@ def validate_coordinates(coordinates_data): def mfp_to_yaml(coordinates_file_path: str, yaml_output_path: str): # noqa: D417 """ - Generates an expedition.yaml file with spatial and temporal information based on instrument data from MFP excel file. Ship config portion of the YAML file is sourced from static version. + Generates an expedition.yaml file with schedule information based on data from MFP excel file. The ship speed and instrument configurations entries in the YAML file are sourced from the static version. Parameters ---------- From 193a713a4be029c70707b2ab951119a02cb36097 Mon Sep 17 00:00:00 2001 From: j-atkins <106238905+j-atkins@users.noreply.github.com> Date: Mon, 20 Oct 2025 16:16:56 +0200 Subject: [PATCH 13/15] fix argo tutorial error output --- .../tutorials/Argo_data_tutorial.ipynb | 34 ++++--------------- 1 file changed, 7 insertions(+), 27 deletions(-) diff --git a/docs/user-guide/tutorials/Argo_data_tutorial.ipynb b/docs/user-guide/tutorials/Argo_data_tutorial.ipynb index 81dbcd27..e8235315 100644 --- a/docs/user-guide/tutorials/Argo_data_tutorial.ipynb +++ b/docs/user-guide/tutorials/Argo_data_tutorial.ipynb @@ -14,7 +14,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -55,35 +55,15 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": {}, "outputs": [ { - "ename": "FileNotFoundError", - "evalue": "No such file or directory: '/Users/erik/Desktop/VSC_Argo/results/argo_floats.zarr'", - "output_type": "error", - "traceback": [ - "\u001b[31m---------------------------------------------------------------------------\u001b[39m", - "\u001b[31mKeyError\u001b[39m Traceback (most recent call last)", - "\u001b[36mFile \u001b[39m\u001b[32m/opt/anaconda3/envs/ship/lib/python3.12/site-packages/xarray/backends/zarr.py:1844\u001b[39m, in \u001b[36m_get_open_params\u001b[39m\u001b[34m(store, mode, synchronizer, group, consolidated, consolidate_on_close, chunk_store, storage_options, zarr_version, use_zarr_fill_value_as_mask, zarr_format)\u001b[39m\n\u001b[32m 1843\u001b[39m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[32m-> \u001b[39m\u001b[32m1844\u001b[39m zarr_root_group = \u001b[43mzarr\u001b[49m\u001b[43m.\u001b[49m\u001b[43mopen_consolidated\u001b[49m\u001b[43m(\u001b[49m\u001b[43mstore\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mopen_kwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 1845\u001b[39m \u001b[38;5;28;01mexcept\u001b[39;00m (\u001b[38;5;167;01mValueError\u001b[39;00m, \u001b[38;5;167;01mKeyError\u001b[39;00m):\n\u001b[32m 1846\u001b[39m \u001b[38;5;66;03m# ValueError in zarr-python 3.x, KeyError in 2.x.\u001b[39;00m\n", - "\u001b[36mFile \u001b[39m\u001b[32m/opt/anaconda3/envs/ship/lib/python3.12/site-packages/zarr/convenience.py:1362\u001b[39m, in \u001b[36mopen_consolidated\u001b[39m\u001b[34m(store, metadata_key, mode, **kwargs)\u001b[39m\n\u001b[32m 1361\u001b[39m \u001b[38;5;66;03m# setup metadata store\u001b[39;00m\n\u001b[32m-> \u001b[39m\u001b[32m1362\u001b[39m meta_store = \u001b[43mConsolidatedStoreClass\u001b[49m\u001b[43m(\u001b[49m\u001b[43mstore\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmetadata_key\u001b[49m\u001b[43m=\u001b[49m\u001b[43mmetadata_key\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 1364\u001b[39m \u001b[38;5;66;03m# pass through\u001b[39;00m\n", - "\u001b[36mFile \u001b[39m\u001b[32m/opt/anaconda3/envs/ship/lib/python3.12/site-packages/zarr/storage.py:3045\u001b[39m, in \u001b[36mConsolidatedMetadataStore.__init__\u001b[39m\u001b[34m(self, store, metadata_key)\u001b[39m\n\u001b[32m 3044\u001b[39m \u001b[38;5;66;03m# retrieve consolidated metadata\u001b[39;00m\n\u001b[32m-> \u001b[39m\u001b[32m3045\u001b[39m meta = json_loads(\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mstore\u001b[49m\u001b[43m[\u001b[49m\u001b[43mmetadata_key\u001b[49m\u001b[43m]\u001b[49m)\n\u001b[32m 3047\u001b[39m \u001b[38;5;66;03m# check format of consolidated metadata\u001b[39;00m\n", - "\u001b[36mFile \u001b[39m\u001b[32m/opt/anaconda3/envs/ship/lib/python3.12/site-packages/zarr/storage.py:1120\u001b[39m, in \u001b[36mDirectoryStore.__getitem__\u001b[39m\u001b[34m(self, key)\u001b[39m\n\u001b[32m 1119\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[32m-> \u001b[39m\u001b[32m1120\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mKeyError\u001b[39;00m(key)\n", - "\u001b[31mKeyError\u001b[39m: '.zmetadata'", - "\nDuring handling of the above exception, another exception occurred:\n", - "\u001b[31mGroupNotFoundError\u001b[39m Traceback (most recent call last)", - "\u001b[36mFile \u001b[39m\u001b[32m/opt/anaconda3/envs/ship/lib/python3.12/site-packages/xarray/backends/zarr.py:1848\u001b[39m, in \u001b[36m_get_open_params\u001b[39m\u001b[34m(store, mode, synchronizer, group, consolidated, consolidate_on_close, chunk_store, storage_options, zarr_version, use_zarr_fill_value_as_mask, zarr_format)\u001b[39m\n\u001b[32m 1847\u001b[39m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[32m-> \u001b[39m\u001b[32m1848\u001b[39m zarr_root_group = \u001b[43mzarr\u001b[49m\u001b[43m.\u001b[49m\u001b[43mopen_group\u001b[49m\u001b[43m(\u001b[49m\u001b[43mstore\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mopen_kwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 1849\u001b[39m emit_user_level_warning(\n\u001b[32m 1850\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mFailed to open Zarr store with consolidated metadata, \u001b[39m\u001b[33m\"\u001b[39m\n\u001b[32m 1851\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mbut successfully read with non-consolidated metadata. \u001b[39m\u001b[33m\"\u001b[39m\n\u001b[32m (...)\u001b[39m\u001b[32m 1861\u001b[39m \u001b[38;5;167;01mRuntimeWarning\u001b[39;00m,\n\u001b[32m 1862\u001b[39m )\n", - "\u001b[36mFile \u001b[39m\u001b[32m/opt/anaconda3/envs/ship/lib/python3.12/site-packages/zarr/hierarchy.py:1578\u001b[39m, in \u001b[36mopen_group\u001b[39m\u001b[34m(store, mode, cache_attrs, synchronizer, path, chunk_store, storage_options, zarr_version, meta_array)\u001b[39m\n\u001b[32m 1577\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m ContainsArrayError(path)\n\u001b[32m-> \u001b[39m\u001b[32m1578\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m GroupNotFoundError(path)\n\u001b[32m 1580\u001b[39m \u001b[38;5;28;01melif\u001b[39;00m mode == \u001b[33m\"\u001b[39m\u001b[33mw\u001b[39m\u001b[33m\"\u001b[39m:\n", - "\u001b[31mGroupNotFoundError\u001b[39m: group not found at path ''", - "\nThe above exception was the direct cause of the following exception:\n", - "\u001b[31mFileNotFoundError\u001b[39m Traceback (most recent call last)", - "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[2]\u001b[39m\u001b[32m, line 1\u001b[39m\n\u001b[32m----> \u001b[39m\u001b[32m1\u001b[39m ds = \u001b[43mxr\u001b[49m\u001b[43m.\u001b[49m\u001b[43mopen_zarr\u001b[49m\u001b[43m(\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43m/Users/erik/Desktop/VSC_Argo/results/argo_floats.zarr\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n", - "\u001b[36mFile \u001b[39m\u001b[32m/opt/anaconda3/envs/ship/lib/python3.12/site-packages/xarray/backends/zarr.py:1565\u001b[39m, in \u001b[36mopen_zarr\u001b[39m\u001b[34m(store, group, synchronizer, chunks, decode_cf, mask_and_scale, decode_times, concat_characters, decode_coords, drop_variables, consolidated, overwrite_encoded_chunks, chunk_store, storage_options, decode_timedelta, use_cftime, zarr_version, zarr_format, use_zarr_fill_value_as_mask, chunked_array_type, from_array_kwargs, create_default_indexes, **kwargs)\u001b[39m\n\u001b[32m 1551\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mTypeError\u001b[39;00m(\n\u001b[32m 1552\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mopen_zarr() got unexpected keyword arguments \u001b[39m\u001b[33m\"\u001b[39m + \u001b[33m\"\u001b[39m\u001b[33m,\u001b[39m\u001b[33m\"\u001b[39m.join(kwargs.keys())\n\u001b[32m 1553\u001b[39m )\n\u001b[32m 1555\u001b[39m backend_kwargs = {\n\u001b[32m 1556\u001b[39m \u001b[33m\"\u001b[39m\u001b[33msynchronizer\u001b[39m\u001b[33m\"\u001b[39m: synchronizer,\n\u001b[32m 1557\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mconsolidated\u001b[39m\u001b[33m\"\u001b[39m: consolidated,\n\u001b[32m (...)\u001b[39m\u001b[32m 1562\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mzarr_format\u001b[39m\u001b[33m\"\u001b[39m: zarr_format,\n\u001b[32m 1563\u001b[39m }\n\u001b[32m-> \u001b[39m\u001b[32m1565\u001b[39m ds = \u001b[43mopen_dataset\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 1566\u001b[39m \u001b[43m \u001b[49m\u001b[43mfilename_or_obj\u001b[49m\u001b[43m=\u001b[49m\u001b[43mstore\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1567\u001b[39m \u001b[43m \u001b[49m\u001b[43mgroup\u001b[49m\u001b[43m=\u001b[49m\u001b[43mgroup\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1568\u001b[39m \u001b[43m \u001b[49m\u001b[43mdecode_cf\u001b[49m\u001b[43m=\u001b[49m\u001b[43mdecode_cf\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1569\u001b[39m \u001b[43m \u001b[49m\u001b[43mmask_and_scale\u001b[49m\u001b[43m=\u001b[49m\u001b[43mmask_and_scale\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1570\u001b[39m \u001b[43m \u001b[49m\u001b[43mdecode_times\u001b[49m\u001b[43m=\u001b[49m\u001b[43mdecode_times\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1571\u001b[39m \u001b[43m \u001b[49m\u001b[43mconcat_characters\u001b[49m\u001b[43m=\u001b[49m\u001b[43mconcat_characters\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1572\u001b[39m \u001b[43m \u001b[49m\u001b[43mdecode_coords\u001b[49m\u001b[43m=\u001b[49m\u001b[43mdecode_coords\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1573\u001b[39m \u001b[43m \u001b[49m\u001b[43mengine\u001b[49m\u001b[43m=\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mzarr\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[32m 1574\u001b[39m \u001b[43m \u001b[49m\u001b[43mchunks\u001b[49m\u001b[43m=\u001b[49m\u001b[43mchunks\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1575\u001b[39m \u001b[43m \u001b[49m\u001b[43mdrop_variables\u001b[49m\u001b[43m=\u001b[49m\u001b[43mdrop_variables\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1576\u001b[39m \u001b[43m \u001b[49m\u001b[43mcreate_default_indexes\u001b[49m\u001b[43m=\u001b[49m\u001b[43mcreate_default_indexes\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1577\u001b[39m \u001b[43m \u001b[49m\u001b[43mchunked_array_type\u001b[49m\u001b[43m=\u001b[49m\u001b[43mchunked_array_type\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1578\u001b[39m \u001b[43m \u001b[49m\u001b[43mfrom_array_kwargs\u001b[49m\u001b[43m=\u001b[49m\u001b[43mfrom_array_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1579\u001b[39m \u001b[43m \u001b[49m\u001b[43mbackend_kwargs\u001b[49m\u001b[43m=\u001b[49m\u001b[43mbackend_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1580\u001b[39m \u001b[43m \u001b[49m\u001b[43mdecode_timedelta\u001b[49m\u001b[43m=\u001b[49m\u001b[43mdecode_timedelta\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1581\u001b[39m \u001b[43m \u001b[49m\u001b[43muse_cftime\u001b[49m\u001b[43m=\u001b[49m\u001b[43muse_cftime\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1582\u001b[39m \u001b[43m \u001b[49m\u001b[43mzarr_version\u001b[49m\u001b[43m=\u001b[49m\u001b[43mzarr_version\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1583\u001b[39m \u001b[43m \u001b[49m\u001b[43muse_zarr_fill_value_as_mask\u001b[49m\u001b[43m=\u001b[49m\u001b[43muse_zarr_fill_value_as_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1584\u001b[39m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 1585\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m ds\n", - "\u001b[36mFile \u001b[39m\u001b[32m/opt/anaconda3/envs/ship/lib/python3.12/site-packages/xarray/backends/api.py:750\u001b[39m, in \u001b[36mopen_dataset\u001b[39m\u001b[34m(filename_or_obj, engine, chunks, cache, decode_cf, mask_and_scale, decode_times, decode_timedelta, use_cftime, concat_characters, decode_coords, drop_variables, create_default_indexes, inline_array, chunked_array_type, from_array_kwargs, backend_kwargs, **kwargs)\u001b[39m\n\u001b[32m 738\u001b[39m decoders = _resolve_decoders_kwargs(\n\u001b[32m 739\u001b[39m decode_cf,\n\u001b[32m 740\u001b[39m open_backend_dataset_parameters=backend.open_dataset_parameters,\n\u001b[32m (...)\u001b[39m\u001b[32m 746\u001b[39m decode_coords=decode_coords,\n\u001b[32m 747\u001b[39m )\n\u001b[32m 749\u001b[39m overwrite_encoded_chunks = kwargs.pop(\u001b[33m\"\u001b[39m\u001b[33moverwrite_encoded_chunks\u001b[39m\u001b[33m\"\u001b[39m, \u001b[38;5;28;01mNone\u001b[39;00m)\n\u001b[32m--> \u001b[39m\u001b[32m750\u001b[39m backend_ds = \u001b[43mbackend\u001b[49m\u001b[43m.\u001b[49m\u001b[43mopen_dataset\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 751\u001b[39m \u001b[43m \u001b[49m\u001b[43mfilename_or_obj\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 752\u001b[39m \u001b[43m \u001b[49m\u001b[43mdrop_variables\u001b[49m\u001b[43m=\u001b[49m\u001b[43mdrop_variables\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 753\u001b[39m \u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mdecoders\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 754\u001b[39m \u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 755\u001b[39m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 756\u001b[39m ds = _dataset_from_backend_dataset(\n\u001b[32m 757\u001b[39m backend_ds,\n\u001b[32m 758\u001b[39m filename_or_obj,\n\u001b[32m (...)\u001b[39m\u001b[32m 769\u001b[39m **kwargs,\n\u001b[32m 770\u001b[39m )\n\u001b[32m 771\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m ds\n", - "\u001b[36mFile \u001b[39m\u001b[32m/opt/anaconda3/envs/ship/lib/python3.12/site-packages/xarray/backends/zarr.py:1636\u001b[39m, in \u001b[36mZarrBackendEntrypoint.open_dataset\u001b[39m\u001b[34m(self, filename_or_obj, mask_and_scale, decode_times, concat_characters, decode_coords, drop_variables, use_cftime, decode_timedelta, group, mode, synchronizer, consolidated, chunk_store, storage_options, zarr_version, zarr_format, store, engine, use_zarr_fill_value_as_mask, cache_members)\u001b[39m\n\u001b[32m 1634\u001b[39m filename_or_obj = _normalize_path(filename_or_obj)\n\u001b[32m 1635\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m store:\n\u001b[32m-> \u001b[39m\u001b[32m1636\u001b[39m store = \u001b[43mZarrStore\u001b[49m\u001b[43m.\u001b[49m\u001b[43mopen_group\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 1637\u001b[39m \u001b[43m \u001b[49m\u001b[43mfilename_or_obj\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1638\u001b[39m \u001b[43m \u001b[49m\u001b[43mgroup\u001b[49m\u001b[43m=\u001b[49m\u001b[43mgroup\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1639\u001b[39m \u001b[43m \u001b[49m\u001b[43mmode\u001b[49m\u001b[43m=\u001b[49m\u001b[43mmode\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1640\u001b[39m \u001b[43m \u001b[49m\u001b[43msynchronizer\u001b[49m\u001b[43m=\u001b[49m\u001b[43msynchronizer\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1641\u001b[39m \u001b[43m \u001b[49m\u001b[43mconsolidated\u001b[49m\u001b[43m=\u001b[49m\u001b[43mconsolidated\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1642\u001b[39m \u001b[43m \u001b[49m\u001b[43mconsolidate_on_close\u001b[49m\u001b[43m=\u001b[49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[32m 1643\u001b[39m \u001b[43m \u001b[49m\u001b[43mchunk_store\u001b[49m\u001b[43m=\u001b[49m\u001b[43mchunk_store\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1644\u001b[39m \u001b[43m \u001b[49m\u001b[43mstorage_options\u001b[49m\u001b[43m=\u001b[49m\u001b[43mstorage_options\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1645\u001b[39m \u001b[43m \u001b[49m\u001b[43mzarr_version\u001b[49m\u001b[43m=\u001b[49m\u001b[43mzarr_version\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1646\u001b[39m \u001b[43m \u001b[49m\u001b[43muse_zarr_fill_value_as_mask\u001b[49m\u001b[43m=\u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[32m 1647\u001b[39m \u001b[43m \u001b[49m\u001b[43mzarr_format\u001b[49m\u001b[43m=\u001b[49m\u001b[43mzarr_format\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1648\u001b[39m \u001b[43m \u001b[49m\u001b[43mcache_members\u001b[49m\u001b[43m=\u001b[49m\u001b[43mcache_members\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1649\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 1651\u001b[39m store_entrypoint = StoreBackendEntrypoint()\n\u001b[32m 1652\u001b[39m \u001b[38;5;28;01mwith\u001b[39;00m close_on_error(store):\n", - "\u001b[36mFile \u001b[39m\u001b[32m/opt/anaconda3/envs/ship/lib/python3.12/site-packages/xarray/backends/zarr.py:714\u001b[39m, in \u001b[36mZarrStore.open_group\u001b[39m\u001b[34m(cls, store, mode, synchronizer, group, consolidated, consolidate_on_close, chunk_store, storage_options, append_dim, write_region, safe_chunks, align_chunks, zarr_version, zarr_format, use_zarr_fill_value_as_mask, write_empty, cache_members)\u001b[39m\n\u001b[32m 688\u001b[39m \u001b[38;5;129m@classmethod\u001b[39m\n\u001b[32m 689\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34mopen_group\u001b[39m(\n\u001b[32m 690\u001b[39m \u001b[38;5;28mcls\u001b[39m,\n\u001b[32m (...)\u001b[39m\u001b[32m 707\u001b[39m cache_members: \u001b[38;5;28mbool\u001b[39m = \u001b[38;5;28;01mTrue\u001b[39;00m,\n\u001b[32m 708\u001b[39m ):\n\u001b[32m 709\u001b[39m (\n\u001b[32m 710\u001b[39m zarr_group,\n\u001b[32m 711\u001b[39m consolidate_on_close,\n\u001b[32m 712\u001b[39m close_store_on_close,\n\u001b[32m 713\u001b[39m use_zarr_fill_value_as_mask,\n\u001b[32m--> \u001b[39m\u001b[32m714\u001b[39m ) = \u001b[43m_get_open_params\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 715\u001b[39m \u001b[43m \u001b[49m\u001b[43mstore\u001b[49m\u001b[43m=\u001b[49m\u001b[43mstore\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 716\u001b[39m \u001b[43m \u001b[49m\u001b[43mmode\u001b[49m\u001b[43m=\u001b[49m\u001b[43mmode\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 717\u001b[39m \u001b[43m \u001b[49m\u001b[43msynchronizer\u001b[49m\u001b[43m=\u001b[49m\u001b[43msynchronizer\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 718\u001b[39m \u001b[43m \u001b[49m\u001b[43mgroup\u001b[49m\u001b[43m=\u001b[49m\u001b[43mgroup\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 719\u001b[39m \u001b[43m \u001b[49m\u001b[43mconsolidated\u001b[49m\u001b[43m=\u001b[49m\u001b[43mconsolidated\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 720\u001b[39m \u001b[43m \u001b[49m\u001b[43mconsolidate_on_close\u001b[49m\u001b[43m=\u001b[49m\u001b[43mconsolidate_on_close\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 721\u001b[39m \u001b[43m \u001b[49m\u001b[43mchunk_store\u001b[49m\u001b[43m=\u001b[49m\u001b[43mchunk_store\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 722\u001b[39m \u001b[43m \u001b[49m\u001b[43mstorage_options\u001b[49m\u001b[43m=\u001b[49m\u001b[43mstorage_options\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 723\u001b[39m \u001b[43m \u001b[49m\u001b[43mzarr_version\u001b[49m\u001b[43m=\u001b[49m\u001b[43mzarr_version\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 724\u001b[39m \u001b[43m \u001b[49m\u001b[43muse_zarr_fill_value_as_mask\u001b[49m\u001b[43m=\u001b[49m\u001b[43muse_zarr_fill_value_as_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 725\u001b[39m \u001b[43m \u001b[49m\u001b[43mzarr_format\u001b[49m\u001b[43m=\u001b[49m\u001b[43mzarr_format\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 726\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 728\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mcls\u001b[39m(\n\u001b[32m 729\u001b[39m zarr_group,\n\u001b[32m 730\u001b[39m mode,\n\u001b[32m (...)\u001b[39m\u001b[32m 739\u001b[39m cache_members=cache_members,\n\u001b[32m 740\u001b[39m )\n", - "\u001b[36mFile \u001b[39m\u001b[32m/opt/anaconda3/envs/ship/lib/python3.12/site-packages/xarray/backends/zarr.py:1864\u001b[39m, in \u001b[36m_get_open_params\u001b[39m\u001b[34m(store, mode, synchronizer, group, consolidated, consolidate_on_close, chunk_store, storage_options, zarr_version, use_zarr_fill_value_as_mask, zarr_format)\u001b[39m\n\u001b[32m 1849\u001b[39m emit_user_level_warning(\n\u001b[32m 1850\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mFailed to open Zarr store with consolidated metadata, \u001b[39m\u001b[33m\"\u001b[39m\n\u001b[32m 1851\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mbut successfully read with non-consolidated metadata. \u001b[39m\u001b[33m\"\u001b[39m\n\u001b[32m (...)\u001b[39m\u001b[32m 1861\u001b[39m \u001b[38;5;167;01mRuntimeWarning\u001b[39;00m,\n\u001b[32m 1862\u001b[39m )\n\u001b[32m 1863\u001b[39m \u001b[38;5;28;01mexcept\u001b[39;00m missing_exc \u001b[38;5;28;01mas\u001b[39;00m err:\n\u001b[32m-> \u001b[39m\u001b[32m1864\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mFileNotFoundError\u001b[39;00m(\n\u001b[32m 1865\u001b[39m \u001b[33mf\u001b[39m\u001b[33m\"\u001b[39m\u001b[33mNo such file or directory: \u001b[39m\u001b[33m'\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mstore\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m'\u001b[39m\u001b[33m\"\u001b[39m\n\u001b[32m 1866\u001b[39m ) \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01merr\u001b[39;00m\n\u001b[32m 1868\u001b[39m \u001b[38;5;66;03m# but the user should still receive a DataTree whose root is the group they asked for\u001b[39;00m\n\u001b[32m 1869\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m group \u001b[38;5;129;01mand\u001b[39;00m group != \u001b[33m\"\u001b[39m\u001b[33m/\u001b[39m\u001b[33m\"\u001b[39m:\n", - "\u001b[31mFileNotFoundError\u001b[39m: No such file or directory: '/Users/erik/Desktop/VSC_Argo/results/argo_floats.zarr'" + "name": "stderr", + "output_type": "stream", + "text": [ + "/Users/erik/anaconda3/envs/parcels/lib/python3.12/site-packages/xarray/coding/times.py:254: RuntimeWarning: invalid value encountered in cast\n", + " flat_num_dates_ns_int = (flat_num_dates * _NS_PER_TIME_DELTA[delta]).astype(\n" ] } ], From 5f71ec001cabf4c24811cc64a71f57da9a0a9614 Mon Sep 17 00:00:00 2001 From: j-atkins <106238905+j-atkins@users.noreply.github.com> Date: Mon, 20 Oct 2025 17:34:00 +0200 Subject: [PATCH 14/15] revive ship_config top-level key in expedition.yaml --- src/virtualship/models/__init__.py | 2 ++ src/virtualship/models/expedition.py | 6 ++++-- src/virtualship/static/expedition.yaml | 3 ++- tests/expedition/expedition_dir/expedition.yaml | 3 ++- 4 files changed, 10 insertions(+), 4 deletions(-) diff --git a/src/virtualship/models/__init__.py b/src/virtualship/models/__init__.py index 01460efb..a2f1546c 100644 --- a/src/virtualship/models/__init__.py +++ b/src/virtualship/models/__init__.py @@ -10,6 +10,7 @@ InstrumentsConfig, InstrumentType, Schedule, + ShipConfig, ShipUnderwaterSTConfig, Waypoint, XBTConfig, @@ -27,6 +28,7 @@ __all__ = [ # noqa: RUF022 "Location", "Schedule", + "ShipConfig", "Waypoint", "InstrumentType", "ArgoFloatConfig", diff --git a/src/virtualship/models/expedition.py b/src/virtualship/models/expedition.py index 8540d7eb..2e073b84 100644 --- a/src/virtualship/models/expedition.py +++ b/src/virtualship/models/expedition.py @@ -29,7 +29,7 @@ class Expedition(pydantic.BaseModel): schedule: Schedule instruments_config: InstrumentsConfig - ship_speed_knots: float = pydantic.Field(gt=0.0) + ship_config: ShipConfig model_config = pydantic.ConfigDict(extra="forbid") @@ -46,11 +46,13 @@ def from_yaml(cls, file_path: str) -> Expedition: return Expedition(**data) -class ShipConfig: +class ShipConfig(pydantic.BaseModel): """Configuration of the ship.""" ship_speed_knots: float = pydantic.Field(gt=0.0) + # TODO: room here for adding more ship config options in future PRs (e.g. max_days_at_sea)... + model_config = pydantic.ConfigDict(extra="forbid") diff --git a/src/virtualship/static/expedition.yaml b/src/virtualship/static/expedition.yaml index 849a123b..1a9e3922 100644 --- a/src/virtualship/static/expedition.yaml +++ b/src/virtualship/static/expedition.yaml @@ -71,4 +71,5 @@ instruments_config: deceleration_coefficient: 0.00225 ship_underwater_st_config: period_minutes: 5.0 -ship_speed_knots: 10.0 +ship_config: + ship_speed_knots: 10.0 diff --git a/tests/expedition/expedition_dir/expedition.yaml b/tests/expedition/expedition_dir/expedition.yaml index edfac696..9468028f 100644 --- a/tests/expedition/expedition_dir/expedition.yaml +++ b/tests/expedition/expedition_dir/expedition.yaml @@ -42,4 +42,5 @@ instruments_config: lifetime_minutes: 40320.0 ship_underwater_st_config: period_minutes: 5.0 -ship_speed_knots: 10.0 +ship_config: + ship_speed_knots: 10.0 From 19b20770459bdf70f99f091ee8ddbf382f0085a7 Mon Sep 17 00:00:00 2001 From: j-atkins <106238905+j-atkins@users.noreply.github.com> Date: Mon, 20 Oct 2025 17:34:29 +0200 Subject: [PATCH 15/15] update simulation, documentation and tests with revived ship_config --- docs/user-guide/quickstart.md | 2 +- src/virtualship/cli/_fetch.py | 2 +- src/virtualship/cli/_plan.py | 23 ++++++++----------- src/virtualship/expedition/do_expedition.py | 4 +++- .../expedition/simulate_schedule.py | 4 +++- src/virtualship/utils.py | 8 +++---- tests/cli/test_plan.py | 2 +- tests/expedition/test_expedition.py | 6 ++--- tests/expedition/test_simulate_schedule.py | 4 ++-- 9 files changed, 28 insertions(+), 27 deletions(-) diff --git a/docs/user-guide/quickstart.md b/docs/user-guide/quickstart.md index dabc324f..45d4050f 100644 --- a/docs/user-guide/quickstart.md +++ b/docs/user-guide/quickstart.md @@ -46,7 +46,7 @@ virtualship init EXPEDITION_NAME --from-mfp CoordinatesExport.xlsx The `CoordinatesExport.xlsx` in the `virtualship init` command refers to the .xlsx file exported from MFP. Replace the filename with the name of your exported .xlsx file (and make sure to move it from the Downloads to the folder/directory in which you are running the expedition). ``` -This will create a folder/directory called `EXPEDITION_NAME` with a single file: `expedition.yaml` containing details on the ship speed and instrument configurations, as well as the expedition schedule based on the sampling site coordinates that you specified in your MFP export. The `--from-mfp` flag indicates that the exported coordinates will be used. +This will create a folder/directory called `EXPEDITION_NAME` with a single file: `expedition.yaml` containing details on the ship and instrument configurations, as well as the expedition schedule based on the sampling site coordinates that you specified in your MFP export. The `--from-mfp` flag indicates that the exported coordinates will be used. ```{note} For advanced users: it is also possible to run the expedition initialisation step without an MFP .xlsx export file. In this case you should simply run `virtualship init EXPEDITION_NAME` in the CLI. This will write an example `expedition.yaml` file in the `EXPEDITION_NAME` folder/directory. This file contains example waypoints, timings, instrument selections, and ship configuration, but can be edited or propagated through the rest of the workflow unedited to run a sample expedition. diff --git a/src/virtualship/cli/_fetch.py b/src/virtualship/cli/_fetch.py index a2ed6bb1..60008304 100644 --- a/src/virtualship/cli/_fetch.py +++ b/src/virtualship/cli/_fetch.py @@ -51,7 +51,7 @@ def _fetch(path: str | Path, username: str | None, password: str | None) -> None expedition = _get_expedition(path) expedition.schedule.verify( - expedition.ship_speed_knots, + expedition.ship_config.ship_speed_knots, input_data=None, check_space_time_region=True, ignore_missing_fieldsets=True, diff --git a/src/virtualship/cli/_plan.py b/src/virtualship/cli/_plan.py index 2c0baf48..87bfe336 100644 --- a/src/virtualship/cli/_plan.py +++ b/src/virtualship/cli/_plan.py @@ -38,6 +38,7 @@ Expedition, InstrumentType, Location, + ShipConfig, ShipUnderwaterSTConfig, SpatialRange, TimeRange, @@ -185,12 +186,12 @@ def compose(self) -> ComposeResult: collapsed=False, ): attr = "ship_speed_knots" - validators = group_validators(Expedition, attr) + validators = group_validators(ShipConfig, attr) with Horizontal(classes="ship_speed"): yield Label("[b]Ship Speed (knots):[/b]") yield Input( id="speed", - type=type_to_textual(get_field_type(Expedition, attr)), + type=type_to_textual(get_field_type(ShipConfig, attr)), validators=[ Function( validator, @@ -201,8 +202,8 @@ def compose(self) -> ComposeResult: classes="ship_speed_input", placeholder="knots", value=str( - self.expedition.ship_speed_knots - if self.expedition.ship_speed_knots + self.expedition.ship_config.ship_speed_knots + if self.expedition.ship_config.ship_speed_knots else "" ), ) @@ -542,16 +543,12 @@ def save_changes(self) -> bool: def _update_ship_speed(self): attr = "ship_speed_knots" - field_type = get_field_type(Expedition, attr) + field_type = get_field_type(type(self.expedition.ship_config), attr) value = field_type(self.query_one("#speed").value) - try: - if not (value > 0): - raise ValueError("ship_speed_knots must be greater than 0") - except TypeError: - raise UnexpectedError("Invalid ship speed value") from None - - # persist to the Expedition instance - self.expedition.ship_speed_knots = value + ShipConfig.model_validate( + {**self.expedition.ship_config.model_dump(), attr: value} + ) + self.expedition.ship_config.ship_speed_knots = value def _update_instrument_configs(self): for instrument_name, info in INSTRUMENT_FIELDS.items(): diff --git a/src/virtualship/expedition/do_expedition.py b/src/virtualship/expedition/do_expedition.py index 4960f59f..5c46d2eb 100644 --- a/src/virtualship/expedition/do_expedition.py +++ b/src/virtualship/expedition/do_expedition.py @@ -60,7 +60,9 @@ def do_expedition(expedition_dir: str | Path, input_data: Path | None = None) -> print("\n---- WAYPOINT VERIFICATION ----") # verify schedule is valid - expedition.schedule.verify(expedition.ship_speed_knots, loaded_input_data) + expedition.schedule.verify( + expedition.ship_config.ship_speed_knots, loaded_input_data + ) # simulate the schedule schedule_results = simulate_schedule( diff --git a/src/virtualship/expedition/simulate_schedule.py b/src/virtualship/expedition/simulate_schedule.py index 4e8a2fa1..3b78c5c7 100644 --- a/src/virtualship/expedition/simulate_schedule.py +++ b/src/virtualship/expedition/simulate_schedule.py @@ -125,7 +125,9 @@ def _progress_time_traveling_towards(self, location: Location) -> None: lons2=location.lon, lats2=location.lat, ) - ship_speed_meter_per_second = self._expedition.ship_speed_knots * 1852 / 3600 + ship_speed_meter_per_second = ( + self._expedition.ship_config.ship_speed_knots * 1852 / 3600 + ) azimuth1 = geodinv[0] distance_to_next_waypoint = geodinv[2] time_to_reach = timedelta( diff --git a/src/virtualship/utils.py b/src/virtualship/utils.py index 1f55f6f3..0a39d035 100644 --- a/src/virtualship/utils.py +++ b/src/virtualship/utils.py @@ -114,7 +114,7 @@ def validate_coordinates(coordinates_data): def mfp_to_yaml(coordinates_file_path: str, yaml_output_path: str): # noqa: D417 """ - Generates an expedition.yaml file with schedule information based on data from MFP excel file. The ship speed and instrument configurations entries in the YAML file are sourced from the static version. + Generates an expedition.yaml file with schedule information based on data from MFP excel file. The ship and instrument configurations entries in the YAML file are sourced from the static version. Parameters ---------- @@ -189,14 +189,14 @@ def mfp_to_yaml(coordinates_file_path: str, yaml_output_path: str): # noqa: D41 yaml.safe_load(get_example_expedition()).get("instruments_config") ) - # extract ship speed from static - ship_speed_knots = yaml.safe_load(get_example_expedition()).get("ship_speed_knots") + # extract ship config from static + ship_config = yaml.safe_load(get_example_expedition()).get("ship_config") # combine to Expedition object expedition = Expedition( schedule=schedule, instruments_config=instruments_config, - ship_speed_knots=ship_speed_knots, + ship_config=ship_config, ) # Save to YAML file diff --git a/tests/cli/test_plan.py b/tests/cli/test_plan.py index f5a379b7..421feba0 100644 --- a/tests/cli/test_plan.py +++ b/tests/cli/test_plan.py @@ -106,7 +106,7 @@ async def test_UI_changes(): with open(expedition_path) as f: saved_expedition = yaml.safe_load(f) - assert saved_expedition["ship_speed_knots"] == float(NEW_SPEED) + assert saved_expedition["ship_config"]["ship_speed_knots"] == float(NEW_SPEED) # check schedule.verify() methods are working by purposefully making invalid schedule (i.e. ship speed too slow to reach waypoints) invalid_speed = "0.0001" diff --git a/tests/expedition/test_expedition.py b/tests/expedition/test_expedition.py index 8ddbadb9..a4643e03 100644 --- a/tests/expedition/test_expedition.py +++ b/tests/expedition/test_expedition.py @@ -34,7 +34,7 @@ def test_import_export_expedition(tmpdir) -> None: expedition = Expedition( schedule=schedule, instruments_config=get_expedition.instruments_config, - ship_speed_knots=get_expedition.ship_speed_knots, + ship_config=get_expedition.ship_config, ) expedition.to_yaml(out_path) @@ -50,7 +50,7 @@ def test_verify_schedule() -> None: ] ) - ship_speed_knots = _get_expedition(expedition_dir).ship_speed_knots + ship_speed_knots = _get_expedition(expedition_dir).ship_config.ship_speed_knots schedule.verify(ship_speed_knots, None) @@ -158,7 +158,7 @@ def test_verify_schedule_errors( with pytest.raises(error, match=match): schedule.verify( - expedition.ship_speed_knots, + expedition.ship_config.ship_speed_knots, input_data, check_space_time_region=check_space_time_region, ) diff --git a/tests/expedition/test_simulate_schedule.py b/tests/expedition/test_simulate_schedule.py index ad2abfc8..bad8c9ad 100644 --- a/tests/expedition/test_simulate_schedule.py +++ b/tests/expedition/test_simulate_schedule.py @@ -16,7 +16,7 @@ def test_simulate_schedule_feasible() -> None: projection = pyproj.Geod(ellps="WGS84") expedition = Expedition.from_yaml("expedition_dir/expedition.yaml") - expedition.ship_speed_knots = 10.0 + expedition.ship_config.ship_speed_knots = 10.0 expedition.schedule = Schedule( waypoints=[ Waypoint(location=Location(0, 0), time=base_time), @@ -35,7 +35,7 @@ def test_simulate_schedule_too_far() -> None: projection = pyproj.Geod(ellps="WGS84") expedition = Expedition.from_yaml("expedition_dir/expedition.yaml") - expedition.ship_speed_knots = 10.0 + expedition.ship_config.ship_speed_knots = 10.0 expedition.schedule = Schedule( waypoints=[ Waypoint(location=Location(0, 0), time=base_time),