virtualship 0.0.3.dev44__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. virtualship/__init__.py +18 -0
  2. virtualship/_version_setup.py +16 -0
  3. virtualship/cli/__init__.py +1 -0
  4. virtualship/cli/commands.py +40 -0
  5. virtualship/cli/main.py +17 -0
  6. virtualship/expedition/__init__.py +28 -0
  7. virtualship/expedition/checkpoint.py +51 -0
  8. virtualship/expedition/do_expedition.py +150 -0
  9. virtualship/expedition/expedition_cost.py +27 -0
  10. virtualship/expedition/input_data.py +177 -0
  11. virtualship/expedition/instrument_type.py +11 -0
  12. virtualship/expedition/schedule.py +44 -0
  13. virtualship/expedition/ship_config.py +156 -0
  14. virtualship/expedition/simulate_measurements.py +104 -0
  15. virtualship/expedition/simulate_schedule.py +264 -0
  16. virtualship/expedition/verify_schedule.py +164 -0
  17. virtualship/expedition/waypoint.py +16 -0
  18. virtualship/instruments/__init__.py +5 -0
  19. virtualship/instruments/adcp.py +78 -0
  20. virtualship/instruments/argo_float.py +184 -0
  21. virtualship/instruments/ctd.py +137 -0
  22. virtualship/instruments/drifter.py +111 -0
  23. virtualship/instruments/ship_underwater_st.py +76 -0
  24. virtualship/location.py +44 -0
  25. virtualship/make_realistic/__init__.py +6 -0
  26. virtualship/make_realistic/adcp_make_realistic.py +66 -0
  27. virtualship/make_realistic/ctd_make_realistic.py +222 -0
  28. virtualship/spacetime.py +14 -0
  29. virtualship-0.0.3.dev44.dist-info/LICENSE +21 -0
  30. virtualship-0.0.3.dev44.dist-info/METADATA +159 -0
  31. virtualship-0.0.3.dev44.dist-info/RECORD +34 -0
  32. virtualship-0.0.3.dev44.dist-info/WHEEL +5 -0
  33. virtualship-0.0.3.dev44.dist-info/entry_points.txt +2 -0
  34. virtualship-0.0.3.dev44.dist-info/top_level.txt +1 -0
@@ -0,0 +1,18 @@
1
+ """Code for the Virtual Ship Classroom, where Marine Scientists can combine Copernicus Marine Data with an OceanParcels ship to go on a virtual expedition."""
2
+
3
+ from importlib.metadata import version as _version
4
+
5
+ from .location import Location
6
+ from .spacetime import Spacetime
7
+
8
+ try:
9
+ __version__ = _version("virtualship")
10
+ except Exception:
11
+ # Local copy or not installed with setuptools
12
+ __version__ = "unknown"
13
+
14
+ __all__ = [
15
+ "Location",
16
+ "Spacetime",
17
+ "__version__",
18
+ ]
@@ -0,0 +1,16 @@
1
+ # file generated by setuptools_scm
2
+ # don't change, don't track in version control
3
+ TYPE_CHECKING = False
4
+ if TYPE_CHECKING:
5
+ from typing import Tuple, Union
6
+ VERSION_TUPLE = Tuple[Union[int, str], ...]
7
+ else:
8
+ VERSION_TUPLE = object
9
+
10
+ version: str
11
+ __version__: str
12
+ __version_tuple__: VERSION_TUPLE
13
+ version_tuple: VERSION_TUPLE
14
+
15
+ __version__ = version = '0.0.3.dev44'
16
+ __version_tuple__ = version_tuple = (0, 0, 3, 'dev44')
@@ -0,0 +1 @@
1
+ """Command line interface tools."""
@@ -0,0 +1,40 @@
1
+ from pathlib import Path
2
+
3
+ import click
4
+
5
+ from virtualship.expedition.do_expedition import do_expedition
6
+
7
+
8
+ @click.command(
9
+ help="Initialize a directory for a new expedition, with an example configuration."
10
+ )
11
+ @click.argument(
12
+ "path",
13
+ type=click.Path(exists=False, file_okay=False, dir_okay=True),
14
+ # help="Expedition directory",
15
+ )
16
+ def init(path):
17
+ """Entrypoint for the tool."""
18
+ raise NotImplementedError("Not implemented yet.")
19
+
20
+
21
+ @click.command(
22
+ help="Download the relevant data specified in an expedition directory (i.e., by the expedition config)."
23
+ )
24
+ @click.argument(
25
+ "path",
26
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, readable=True),
27
+ )
28
+ def fetch(path):
29
+ """Entrypoint for the tool."""
30
+ raise NotImplementedError("Not implemented yet.")
31
+
32
+
33
+ @click.command(help="Do the expedition.")
34
+ @click.argument(
35
+ "path",
36
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, readable=True),
37
+ )
38
+ def run(path):
39
+ """Entrypoint for the tool."""
40
+ do_expedition(Path(path))
@@ -0,0 +1,17 @@
1
+ import click
2
+
3
+ from . import commands
4
+
5
+
6
+ @click.group()
7
+ @click.version_option()
8
+ def cli():
9
+ pass
10
+
11
+
12
+ cli.add_command(commands.init)
13
+ cli.add_command(commands.fetch)
14
+ cli.add_command(commands.run)
15
+
16
+ if __name__ == "__main__":
17
+ cli()
@@ -0,0 +1,28 @@
1
+ """Everything for simulating an expedition."""
2
+
3
+ from .do_expedition import do_expedition
4
+ from .instrument_type import InstrumentType
5
+ from .schedule import Schedule
6
+ from .ship_config import (
7
+ ADCPConfig,
8
+ ArgoFloatConfig,
9
+ CTDConfig,
10
+ DrifterConfig,
11
+ ShipConfig,
12
+ ShipUnderwaterSTConfig,
13
+ )
14
+ from .waypoint import Waypoint
15
+
16
+ __all__ = [
17
+ "ADCPConfig",
18
+ "ArgoFloatConfig",
19
+ "CTDConfig",
20
+ "DrifterConfig",
21
+ "InstrumentType",
22
+ "Schedule",
23
+ "ShipConfig",
24
+ "ShipUnderwaterSTConfig",
25
+ "Waypoint",
26
+ "do_expedition",
27
+ "instruments",
28
+ ]
@@ -0,0 +1,51 @@
1
+ """Checkpoint class."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from pathlib import Path
6
+
7
+ import pydantic
8
+ import yaml
9
+
10
+ from .instrument_type import InstrumentType
11
+ from .schedule import Schedule
12
+
13
+
14
+ class _YamlDumper(yaml.SafeDumper):
15
+ pass
16
+
17
+
18
+ _YamlDumper.add_representer(
19
+ InstrumentType, lambda dumper, data: dumper.represent_data(data.value)
20
+ )
21
+
22
+
23
+ class Checkpoint(pydantic.BaseModel):
24
+ """
25
+ A checkpoint of schedule simulation.
26
+
27
+ Copy of the schedule until where the simulation proceeded without troubles.
28
+ """
29
+
30
+ past_schedule: Schedule
31
+
32
+ def to_yaml(self, file_path: str | Path) -> None:
33
+ """
34
+ Write checkpoint to yaml file.
35
+
36
+ :param file_path: Path to the file to write to.
37
+ """
38
+ with open(file_path, "w") as file:
39
+ yaml.dump(self.model_dump(by_alias=True), file, Dumper=_YamlDumper)
40
+
41
+ @classmethod
42
+ def from_yaml(cls, file_path: str | Path) -> Checkpoint:
43
+ """
44
+ Load checkpoint from yaml file.
45
+
46
+ :param file_path: Path to the file to load from.
47
+ :returns: The checkpoint.
48
+ """
49
+ with open(file_path) as file:
50
+ data = yaml.safe_load(file)
51
+ return Checkpoint(**data)
@@ -0,0 +1,150 @@
1
+ """do_expedition function."""
2
+
3
+ import os
4
+ import shutil
5
+ from pathlib import Path
6
+
7
+ import pyproj
8
+
9
+ from .checkpoint import Checkpoint
10
+ from .expedition_cost import expedition_cost
11
+ from .input_data import InputData
12
+ from .schedule import Schedule
13
+ from .ship_config import ShipConfig
14
+ from .simulate_measurements import simulate_measurements
15
+ from .simulate_schedule import ScheduleProblem, simulate_schedule
16
+ from .verify_schedule import verify_schedule
17
+
18
+
19
+ def do_expedition(expedition_dir: str | Path) -> None:
20
+ """
21
+ Perform an expedition, providing terminal feedback and file output.
22
+
23
+ :param expedition_dir: The base directory for the expedition.
24
+ """
25
+ if isinstance(expedition_dir, str):
26
+ expedition_dir = Path(expedition_dir)
27
+
28
+ # load ship configuration
29
+ ship_config = _get_ship_config(expedition_dir)
30
+ if ship_config is None:
31
+ return
32
+
33
+ # load schedule
34
+ schedule = _get_schedule(expedition_dir)
35
+ if schedule is None:
36
+ return
37
+
38
+ # load last checkpoint
39
+ checkpoint = _load_checkpoint(expedition_dir)
40
+ if checkpoint is None:
41
+ checkpoint = Checkpoint(past_schedule=Schedule(waypoints=[]))
42
+
43
+ # verify that schedule and checkpoint match
44
+ if (
45
+ not schedule.waypoints[: len(checkpoint.past_schedule.waypoints)]
46
+ == checkpoint.past_schedule.waypoints
47
+ ):
48
+ print(
49
+ "Past waypoints in schedule have been changed! Restore past schedule and only change future waypoints."
50
+ )
51
+ return
52
+
53
+ # projection used to sail between waypoints
54
+ projection = pyproj.Geod(ellps="WGS84")
55
+
56
+ # load fieldsets
57
+ input_data = _load_input_data(
58
+ expedition_dir=expedition_dir, ship_config=ship_config
59
+ )
60
+
61
+ # verify schedule makes sense
62
+ verify_schedule(projection, ship_config, schedule, input_data)
63
+
64
+ # simulate the schedule
65
+ schedule_results = simulate_schedule(
66
+ projection=projection, ship_config=ship_config, schedule=schedule
67
+ )
68
+ if isinstance(schedule_results, ScheduleProblem):
69
+ print(
70
+ "Update your schedule and continue the expedition by running the tool again."
71
+ )
72
+ _save_checkpoint(
73
+ Checkpoint(
74
+ past_schedule=Schedule(
75
+ waypoints=schedule.waypoints[: schedule_results.failed_waypoint_i]
76
+ )
77
+ ),
78
+ expedition_dir,
79
+ )
80
+ return
81
+
82
+ # delete and create results directory
83
+ if os.path.exists(expedition_dir.joinpath("results")):
84
+ shutil.rmtree(expedition_dir.joinpath("results"))
85
+ os.makedirs(expedition_dir.joinpath("results"))
86
+
87
+ # calculate expedition cost in US$
88
+ assert (
89
+ schedule.waypoints[0].time is not None
90
+ ), "First waypoint has no time. This should not be possible as it should have been verified before."
91
+ time_past = schedule_results.time - schedule.waypoints[0].time
92
+ cost = expedition_cost(schedule_results, time_past)
93
+ with open(expedition_dir.joinpath("results", "cost.txt"), "w") as file:
94
+ file.writelines(f"cost: {cost} US$")
95
+ print(f"This expedition took {time_past} and would have cost {cost:,.0f} US$.")
96
+
97
+ # simulate measurements
98
+ print("Simulating measurements. This may take a while..")
99
+ simulate_measurements(
100
+ expedition_dir,
101
+ ship_config,
102
+ input_data,
103
+ schedule_results.measurements_to_simulate,
104
+ )
105
+ print("Done simulating measurements.")
106
+
107
+ print("Your expedition has concluded successfully!")
108
+ print("Your measurements can be found in the results directory.")
109
+
110
+
111
+ def _get_ship_config(expedition_dir: Path) -> ShipConfig | None:
112
+ file_path = expedition_dir.joinpath("ship_config.yaml")
113
+ try:
114
+ return ShipConfig.from_yaml(file_path)
115
+ except FileNotFoundError:
116
+ print(f'Schedule not found. Save it to "{file_path}".')
117
+ return None
118
+
119
+
120
+ def _load_input_data(expedition_dir: Path, ship_config: ShipConfig) -> InputData:
121
+ return InputData.load(
122
+ directory=expedition_dir.joinpath("input_data"),
123
+ load_adcp=ship_config.adcp_config is not None,
124
+ load_argo_float=ship_config.argo_float_config is not None,
125
+ load_ctd=ship_config.ctd_config is not None,
126
+ load_drifter=ship_config.drifter_config is not None,
127
+ load_ship_underwater_st=ship_config.ship_underwater_st_config is not None,
128
+ )
129
+
130
+
131
+ def _get_schedule(expedition_dir: Path) -> Schedule | None:
132
+ file_path = expedition_dir.joinpath("schedule.yaml")
133
+ try:
134
+ return Schedule.from_yaml(file_path)
135
+ except FileNotFoundError:
136
+ print(f'Schedule not found. Save it to "{file_path}".')
137
+ return None
138
+
139
+
140
+ def _load_checkpoint(expedition_dir: Path) -> Checkpoint | None:
141
+ file_path = expedition_dir.joinpath("checkpoint.yaml")
142
+ try:
143
+ return Checkpoint.from_yaml(file_path)
144
+ except FileNotFoundError:
145
+ return None
146
+
147
+
148
+ def _save_checkpoint(checkpoint: Checkpoint, expedition_dir: Path) -> None:
149
+ file_path = expedition_dir.joinpath("checkpoint.yaml")
150
+ checkpoint.to_yaml(file_path)
@@ -0,0 +1,27 @@
1
+ """expedition_cost function."""
2
+
3
+ from datetime import timedelta
4
+
5
+ from .simulate_schedule import ScheduleOk
6
+
7
+
8
+ def expedition_cost(schedule_results: ScheduleOk, time_past: timedelta) -> float:
9
+ """
10
+ Calculate the cost of the expedition in US$.
11
+
12
+ :param schedule_results: Results from schedule simulation.
13
+ :param time_past: Time the expedition took.
14
+ :returns: The calculated cost of the expedition in US$.
15
+ """
16
+ SHIP_COST_PER_DAY = 30000
17
+ DRIFTER_DEPLOY_COST = 2500
18
+ ARGO_DEPLOY_COST = 15000
19
+
20
+ ship_cost = SHIP_COST_PER_DAY / 24 * time_past.total_seconds() // 3600
21
+ num_argos = len(schedule_results.measurements_to_simulate.argo_floats)
22
+ argo_cost = num_argos * ARGO_DEPLOY_COST
23
+ num_drifters = len(schedule_results.measurements_to_simulate.drifters)
24
+ drifter_cost = num_drifters * DRIFTER_DEPLOY_COST
25
+
26
+ cost = ship_cost + argo_cost + drifter_cost
27
+ return cost
@@ -0,0 +1,177 @@
1
+ """InputData class."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from dataclasses import dataclass
6
+ from pathlib import Path
7
+
8
+ from parcels import Field, FieldSet
9
+
10
+
11
+ @dataclass
12
+ class InputData:
13
+ """A collection of fieldsets that function as input data for simulation."""
14
+
15
+ adcp_fieldset: FieldSet | None
16
+ argo_float_fieldset: FieldSet | None
17
+ ctd_fieldset: FieldSet | None
18
+ drifter_fieldset: FieldSet | None
19
+ ship_underwater_st_fieldset: FieldSet | None
20
+
21
+ @classmethod
22
+ def load(
23
+ cls,
24
+ directory: str | Path,
25
+ load_adcp: bool,
26
+ load_argo_float: bool,
27
+ load_ctd: bool,
28
+ load_drifter: bool,
29
+ load_ship_underwater_st: bool,
30
+ ) -> InputData:
31
+ """
32
+ Create an instance of this class from netCDF files.
33
+
34
+ For now this function makes a lot of assumption about file location and contents.
35
+
36
+ :param directory: Base directory of the expedition.
37
+ :param load_adcp: Whether to load the ADCP fieldset.
38
+ :param load_argo_float: Whether to load the argo float fieldset.
39
+ :param load_ctd: Whether to load the CTD fieldset.
40
+ :param load_drifter: Whether to load the drifter fieldset.
41
+ :param load_ship_underwater_st: Whether to load the ship underwater ST fieldset.
42
+ :returns: An instance of this class with loaded fieldsets.
43
+ """
44
+ if load_drifter:
45
+ drifter_fieldset = cls._load_drifter_fieldset(directory)
46
+ else:
47
+ drifter_fieldset = None
48
+ if load_argo_float:
49
+ argo_float_fieldset = cls._load_argo_float_fieldset(directory)
50
+ else:
51
+ argo_float_fieldset = None
52
+ if load_adcp or load_ctd or load_ship_underwater_st:
53
+ default_fieldset = cls._load_default_fieldset(directory)
54
+ if load_adcp:
55
+ adcp_fieldset = default_fieldset
56
+ else:
57
+ adcp_fieldset = None
58
+ if load_ctd:
59
+ ctd_fieldset = default_fieldset
60
+ else:
61
+ ctd_fieldset = None
62
+ if load_ship_underwater_st:
63
+ ship_underwater_st_fieldset = default_fieldset
64
+ else:
65
+ ship_underwater_st_fieldset = None
66
+
67
+ return InputData(
68
+ adcp_fieldset=adcp_fieldset,
69
+ argo_float_fieldset=argo_float_fieldset,
70
+ ctd_fieldset=ctd_fieldset,
71
+ drifter_fieldset=drifter_fieldset,
72
+ ship_underwater_st_fieldset=ship_underwater_st_fieldset,
73
+ )
74
+
75
+ @classmethod
76
+ def _load_default_fieldset(cls, directory: str | Path) -> FieldSet:
77
+ filenames = {
78
+ "U": directory.joinpath("default_uv.nc"),
79
+ "V": directory.joinpath("default_uv.nc"),
80
+ "S": directory.joinpath("default_s.nc"),
81
+ "T": directory.joinpath("default_t.nc"),
82
+ }
83
+ variables = {"U": "uo", "V": "vo", "S": "so", "T": "thetao"}
84
+ dimensions = {
85
+ "lon": "longitude",
86
+ "lat": "latitude",
87
+ "time": "time",
88
+ "depth": "depth",
89
+ }
90
+
91
+ # create the fieldset and set interpolation methods
92
+ fieldset = FieldSet.from_netcdf(
93
+ filenames, variables, dimensions, allow_time_extrapolation=True
94
+ )
95
+ fieldset.T.interp_method = "linear_invdist_land_tracer"
96
+ fieldset.S.interp_method = "linear_invdist_land_tracer"
97
+
98
+ # make depth negative
99
+ for g in fieldset.gridset.grids:
100
+ g.depth = -g.depth
101
+
102
+ # add bathymetry data
103
+ bathymetry_file = directory.joinpath("bathymetry.nc")
104
+ bathymetry_variables = ("bathymetry", "deptho")
105
+ bathymetry_dimensions = {"lon": "longitude", "lat": "latitude"}
106
+ bathymetry_field = Field.from_netcdf(
107
+ bathymetry_file, bathymetry_variables, bathymetry_dimensions
108
+ )
109
+ # make depth negative
110
+ bathymetry_field.data = -bathymetry_field.data
111
+ fieldset.add_field(bathymetry_field)
112
+
113
+ # read in data already
114
+ fieldset.computeTimeChunk(0, 1)
115
+
116
+ return fieldset
117
+
118
+ @classmethod
119
+ def _load_drifter_fieldset(cls, directory: str | Path) -> FieldSet:
120
+ filenames = {
121
+ "U": directory.joinpath("drifter_uv.nc"),
122
+ "V": directory.joinpath("drifter_uv.nc"),
123
+ "T": directory.joinpath("drifter_t.nc"),
124
+ }
125
+ variables = {"U": "uo", "V": "vo", "T": "thetao"}
126
+ dimensions = {
127
+ "lon": "longitude",
128
+ "lat": "latitude",
129
+ "time": "time",
130
+ "depth": "depth",
131
+ }
132
+
133
+ fieldset = FieldSet.from_netcdf(
134
+ filenames, variables, dimensions, allow_time_extrapolation=False
135
+ )
136
+ fieldset.T.interp_method = "linear_invdist_land_tracer"
137
+
138
+ # make depth negative
139
+ for g in fieldset.gridset.grids:
140
+ g.depth = -g.depth
141
+
142
+ # read in data already
143
+ fieldset.computeTimeChunk(0, 1)
144
+
145
+ return fieldset
146
+
147
+ @classmethod
148
+ def _load_argo_float_fieldset(cls, directory: str | Path) -> FieldSet:
149
+ filenames = {
150
+ "U": directory.joinpath("argo_float_uv.nc"),
151
+ "V": directory.joinpath("argo_float_uv.nc"),
152
+ "S": directory.joinpath("argo_float_s.nc"),
153
+ "T": directory.joinpath("argo_float_t.nc"),
154
+ }
155
+ variables = {"U": "uo", "V": "vo", "S": "so", "T": "thetao"}
156
+ dimensions = {
157
+ "lon": "longitude",
158
+ "lat": "latitude",
159
+ "time": "time",
160
+ "depth": "depth",
161
+ }
162
+
163
+ fieldset = FieldSet.from_netcdf(
164
+ filenames, variables, dimensions, allow_time_extrapolation=False
165
+ )
166
+ fieldset.T.interp_method = "linear_invdist_land_tracer"
167
+ fieldset.S.interp_method = "linear_invdist_land_tracer"
168
+
169
+ # make depth negative
170
+ for g in fieldset.gridset.grids:
171
+ if max(g.depth) > 0:
172
+ g.depth = -g.depth
173
+
174
+ # read in data already
175
+ fieldset.computeTimeChunk(0, 1)
176
+
177
+ return fieldset
@@ -0,0 +1,11 @@
1
+ """InstrumentType Enum."""
2
+
3
+ from enum import Enum
4
+
5
+
6
+ class InstrumentType(Enum):
7
+ """Types of instruments."""
8
+
9
+ CTD = "CTD"
10
+ DRIFTER = "DRIFTER"
11
+ ARGO_FLOAT = "ARGO_FLOAT"
@@ -0,0 +1,44 @@
1
+ """Schedule class."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from pathlib import Path
6
+
7
+ import pydantic
8
+ import yaml
9
+
10
+ from .waypoint import Waypoint
11
+
12
+
13
+ class Schedule(pydantic.BaseModel):
14
+ """Schedule of the virtual ship."""
15
+
16
+ waypoints: list[Waypoint]
17
+
18
+ model_config = pydantic.ConfigDict(extra="forbid")
19
+
20
+ def to_yaml(self, file_path: str | Path) -> None:
21
+ """
22
+ Write schedule to yaml file.
23
+
24
+ :param file_path: Path to the file to write to.
25
+ """
26
+ with open(file_path, "w") as file:
27
+ yaml.dump(
28
+ self.model_dump(
29
+ by_alias=True,
30
+ ),
31
+ file,
32
+ )
33
+
34
+ @classmethod
35
+ def from_yaml(cls, file_path: str | Path) -> Schedule:
36
+ """
37
+ Load schedule from yaml file.
38
+
39
+ :param file_path: Path to the file to load from.
40
+ :returns: The schedule.
41
+ """
42
+ with open(file_path) as file:
43
+ data = yaml.safe_load(file)
44
+ return Schedule(**data)