ladim 2.0.9__py3-none-any.whl → 2.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ladim/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
- __version__ = '2.0.9'
1
+ __version__ = '2.1.5'
2
2
 
3
3
  from .main import main, run
ladim/config.py CHANGED
@@ -82,7 +82,6 @@ def convert_1_to_2(c):
82
82
  out['solver']['stop'] = dict_get(c, 'time_control.stop_time')
83
83
  out['solver']['step'] = dt_sec
84
84
  out['solver']['seed'] = dict_get(c, 'numerics.seed')
85
- out['solver']['order'] = ['release', 'forcing', 'output', 'tracker', 'ibm', 'state']
86
85
 
87
86
  out['grid'] = {}
88
87
  out['grid']['file'] = dict_get(c, [
@@ -93,7 +92,7 @@ def convert_1_to_2(c):
93
92
  out['grid']['start_time'] = np.datetime64(dict_get(c, 'time_control.start_time', '1970'), 's')
94
93
  out['grid']['subgrid'] = dict_get(c, 'gridforce.subgrid', None)
95
94
 
96
- out['forcing'] = {}
95
+ out['forcing'] = {k: v for k, v in c.get('gridforce', {}).items() if k not in ('input_file', 'module')}
97
96
  out['forcing']['file'] = dict_get(c, ['gridforce.input_file', 'files.input_file'])
98
97
  out['forcing']['first_file'] = dict_get(c, 'gridforce.first_file', "")
99
98
  out['forcing']['last_file'] = dict_get(c, 'gridforce.last_file', "")
@@ -142,7 +141,6 @@ def convert_1_to_2(c):
142
141
 
143
142
  out['ibm'] = {}
144
143
  if 'ibm' in c:
145
- out['ibm']['module'] = 'ladim.ibms.LegacyIBM'
146
144
  out['ibm']['legacy_module'] = dict_get(c, ['ibm.ibm_module', 'ibm.module'])
147
145
  if out['ibm']['legacy_module'] == 'ladim.ibms.ibm_salmon_lice':
148
146
  out['ibm']['legacy_module'] = 'ladim_plugins.salmon_lice'
ladim/forcing.py CHANGED
@@ -1,10 +1,19 @@
1
- from .model import Model, Module
1
+ import typing
2
+ if typing.TYPE_CHECKING:
3
+ from ladim.model import Model
2
4
 
3
5
 
4
- class Forcing(Module):
6
+ class Forcing:
7
+ @staticmethod
8
+ def from_roms(**conf):
9
+ return RomsForcing(**conf)
10
+
5
11
  def velocity(self, X, Y, Z, tstep=0.0):
6
12
  raise NotImplementedError
7
13
 
14
+ def update(self, model: "Model"):
15
+ raise NotImplementedError
16
+
8
17
 
9
18
  class RomsForcing(Forcing):
10
19
  def __init__(self, file, variables=None, **conf):
@@ -37,11 +46,7 @@ class RomsForcing(Forcing):
37
46
 
38
47
  grid_ref = GridReference()
39
48
  legacy_conf = dict(
40
- gridforce=dict(
41
- input_file=file,
42
- first_file=conf.get('first_file', ""),
43
- last_file=conf.get('last_file', ""),
44
- ),
49
+ gridforce=dict(input_file=file, **conf),
45
50
  ibm_forcing=conf.get('ibm_forcing', []),
46
51
  start_time=conf.get('start_time', None),
47
52
  stop_time=conf.get('stop_time', None),
@@ -50,7 +55,7 @@ class RomsForcing(Forcing):
50
55
  if conf.get('subgrid', None) is not None:
51
56
  legacy_conf['gridforce']['subgrid'] = conf['subgrid']
52
57
 
53
- from .model import load_class
58
+ from .utilities import load_class
54
59
  LegacyForcing = load_class(conf.get('legacy_module', 'ladim.gridforce.ROMS.Forcing'))
55
60
 
56
61
  # Allow gridforce module in current directory
@@ -63,7 +68,7 @@ class RomsForcing(Forcing):
63
68
  # self.U = self.forcing.U
64
69
  # self.V = self.forcing.V
65
70
 
66
- def update(self, model: Model):
71
+ def update(self, model: "Model"):
67
72
  elapsed = model.solver.time - model.solver.start
68
73
  t = elapsed // model.solver.step
69
74
 
ladim/grid.py CHANGED
@@ -1,16 +1,19 @@
1
- from .model import Module
2
1
  import numpy as np
3
2
  from typing import Sequence
4
3
  from scipy.ndimage import map_coordinates
5
4
 
6
5
 
7
- class Grid(Module):
6
+ class Grid:
8
7
  """
9
8
  The grid class represents the coordinate system used for particle tracking.
10
9
  It contains methods for converting between global coordinates (latitude,
11
10
  longitude, depth and posix time) and internal coordinates.
12
11
  """
13
12
 
13
+ @staticmethod
14
+ def from_roms(**conf):
15
+ return RomsGrid(**conf)
16
+
14
17
  def ingrid(self, X, Y):
15
18
  raise NotImplementedError
16
19
 
@@ -188,7 +191,7 @@ class RomsGrid(Grid):
188
191
  if subgrid is not None:
189
192
  legacy_conf['gridforce']['subgrid'] = subgrid
190
193
 
191
- from .model import load_class
194
+ from .utilities import load_class
192
195
  LegacyGrid = load_class(legacy_module)
193
196
 
194
197
  # Allow gridforce module in current directory
ladim/gridforce/ROMS.py CHANGED
@@ -62,7 +62,7 @@ class Grid:
62
62
  # Here, imax, jmax refers to whole grid
63
63
  jmax, imax = ncid.variables["h"].shape
64
64
  whole_grid = [1, imax - 1, 1, jmax - 1]
65
- if "subgrid" in config["gridforce"]:
65
+ if config["gridforce"].get('subgrid', None):
66
66
  limits = list(config["gridforce"]["subgrid"])
67
67
  else:
68
68
  limits = whole_grid
ladim/ibms/__init__.py CHANGED
@@ -1,18 +1,22 @@
1
- from ..model import Model, Module
2
1
  import numpy as np
2
+ import typing
3
3
 
4
+ if typing.TYPE_CHECKING:
5
+ from ..model import Model
4
6
 
5
- class IBM(Module):
6
- pass
7
7
 
8
+ class IBM:
9
+ def __init__(self, legacy_module=None, conf: dict = None):
10
+ from ..utilities import load_class
8
11
 
9
- class LegacyIBM(IBM):
10
- def __init__(self, legacy_module, conf):
11
- from ..model import load_class
12
- LegacyIbmClass = load_class(legacy_module + '.IBM')
13
- self._ibm = LegacyIbmClass(conf)
12
+ if legacy_module is None:
13
+ UserIbmClass = EmptyIBM
14
+ else:
15
+ UserIbmClass = load_class(legacy_module + '.IBM')
14
16
 
15
- def update(self, model: Model):
17
+ self.user_ibm = UserIbmClass(conf or {})
18
+
19
+ def update(self, model: "Model"):
16
20
  grid = model.grid
17
21
  state = model.state
18
22
 
@@ -23,4 +27,12 @@ class LegacyIBM(IBM):
23
27
  )
24
28
 
25
29
  forcing = model.forcing
26
- self._ibm.update_ibm(grid, state, forcing)
30
+ self.user_ibm.update_ibm(grid, state, forcing)
31
+
32
+
33
+ class EmptyIBM:
34
+ def __init__(self, _):
35
+ pass
36
+
37
+ def update_ibm(self, grid, state, forcing):
38
+ return
ladim/model.py CHANGED
@@ -1,29 +1,11 @@
1
- import importlib
2
- import importlib.util
3
- import sys
4
- from pathlib import Path
5
-
6
- from typing import TYPE_CHECKING
7
- if TYPE_CHECKING:
8
- from ladim.grid import Grid
9
- from ladim.forcing import Forcing
10
- from ladim.ibms import IBM
11
- from ladim.output import Output
12
- from ladim.release import Releaser
13
- from ladim.state import State
14
- from ladim.tracker import Tracker
15
- from ladim.solver import Solver
16
-
17
- DEFAULT_MODULES = dict(
18
- grid='ladim.grid.RomsGrid',
19
- forcing='ladim.forcing.RomsForcing',
20
- release='ladim.release.TextFileReleaser',
21
- state='ladim.state.DynamicState',
22
- output='ladim.output.RaggedOutput',
23
- ibm='ladim.ibms.IBM',
24
- tracker='ladim.tracker.HorizontalTracker',
25
- solver='ladim.solver.Solver',
26
- )
1
+ from ladim.ibms import IBM
2
+ from ladim.solver import Solver
3
+ from ladim.release import Releaser
4
+ from ladim.grid import Grid
5
+ from ladim.forcing import Forcing
6
+ from ladim.state import State
7
+ from ladim.tracker import Tracker
8
+ from ladim.output import Output
27
9
 
28
10
 
29
11
  class Model:
@@ -58,25 +40,22 @@ class Model:
58
40
  :return: An initialized Model class
59
41
  """
60
42
 
61
- # Create new version of the config dict without the 'model' keyword
62
- def remove_module_key(d: dict):
63
- return {k: v for k, v in d.items() if k != 'module'}
43
+ grid = Grid.from_roms(**config['grid'])
44
+ forcing = Forcing.from_roms(**config['forcing'])
64
45
 
65
- # Initialize modules
66
- module_names = (
67
- 'grid', 'forcing', 'release', 'state', 'output', 'ibm', 'tracker',
68
- 'solver',
46
+ release = Releaser.from_textfile(
47
+ lonlat_converter=grid.ll2xy, **config['release']
69
48
  )
70
- modules = dict()
71
- for name in module_names:
72
- subconf = config.get(name, dict())
73
- modules[name] = Module.from_config(
74
- conf=remove_module_key(subconf),
75
- module=subconf.get('module', DEFAULT_MODULES[name]),
76
- )
49
+ tracker = Tracker.from_config(**config['tracker'])
50
+
51
+ output = Output(**config['output'])
52
+ ibm = IBM(**config['ibm'])
53
+ solver = Solver(**config['solver'])
77
54
 
78
- # Initialize model
79
- return Model(**modules)
55
+ state = State()
56
+
57
+ # noinspection PyTypeChecker
58
+ return Model(grid, forcing, release, state, output, ibm, tracker, solver)
80
59
 
81
60
  @property
82
61
  def modules(self) -> dict:
@@ -98,47 +77,3 @@ class Model:
98
77
  for m in self.modules.values():
99
78
  if hasattr(m, 'close') and callable(m.close):
100
79
  m.close()
101
-
102
-
103
- def load_class(name):
104
- pkg, cls = name.rsplit(sep='.', maxsplit=1)
105
-
106
- # Check if "pkg" is an existing file
107
- spec = None
108
- module_name = None
109
- file_name = pkg + '.py'
110
- if Path(file_name).exists():
111
- # This can return None if there were import errors
112
- module_name = pkg
113
- spec = importlib.util.spec_from_file_location(module_name, file_name)
114
-
115
- # If pkg can not be interpreted as a file, use regular import
116
- if spec is None:
117
- return getattr(importlib.import_module(pkg), cls)
118
-
119
- # File import
120
- else:
121
- module = importlib.util.module_from_spec(spec)
122
- sys.modules[module_name] = module
123
- spec.loader.exec_module(module)
124
- return getattr(module, cls)
125
-
126
-
127
- class Module:
128
- @staticmethod
129
- def from_config(conf: dict, module: str) -> "Module":
130
- """
131
- Initialize a module using a configuration dict.
132
-
133
- :param conf: The configuration parameters of the module
134
- :param module: The fully qualified name of the module
135
- :return: An initialized module
136
- """
137
- cls = load_class(module)
138
- return cls(**conf)
139
-
140
- def update(self, model: Model):
141
- pass
142
-
143
- def close(self):
144
- pass
ladim/output.py CHANGED
@@ -1,13 +1,11 @@
1
- from .model import Model, Module
2
1
  import netCDF4 as nc
3
2
  import numpy as np
3
+ import typing
4
+ if typing.TYPE_CHECKING:
5
+ from .model import Model
4
6
 
5
7
 
6
- class Output(Module):
7
- pass
8
-
9
-
10
- class RaggedOutput(Output):
8
+ class Output:
11
9
  def __init__(self, variables: dict, file: str, frequency):
12
10
  """
13
11
  Writes simulation output to netCDF file in ragged array format
@@ -52,7 +50,7 @@ class RaggedOutput(Output):
52
50
  """Returns a handle to the netCDF dataset currently being written to"""
53
51
  return self._dset
54
52
 
55
- def update(self, model: Model):
53
+ def update(self, model: "Model"):
56
54
  if self._dset is None:
57
55
  self._create_dset()
58
56
 
ladim/release.py CHANGED
@@ -1,173 +1,110 @@
1
1
  import contextlib
2
-
3
- from .model import Model, Module
4
2
  import numpy as np
5
3
  import pandas as pd
6
4
  from .utilities import read_timedelta
7
5
  import logging
6
+ import typing
8
7
 
8
+ if typing.TYPE_CHECKING:
9
+ from ladim.model import Model
9
10
 
10
- logger = logging.getLogger(__name__)
11
11
 
12
+ logger = logging.getLogger(__name__)
12
13
 
13
- class Releaser(Module):
14
- pass
15
14
 
15
+ class Releaser:
16
+ def __init__(self, particle_generator: typing.Callable[[float, float], pd.DataFrame]):
17
+ self.particle_generator = particle_generator
16
18
 
17
- class TextFileReleaser(Releaser):
18
- def __init__(
19
- self, file, colnames: list = None, formats: dict = None,
20
- frequency=(0, 's'), defaults=None,
19
+ @staticmethod
20
+ def from_textfile(
21
+ file, colnames: list = None, formats: dict = None,
22
+ frequency=(0, 's'), defaults=None, lonlat_converter=None,
21
23
  ):
22
24
  """
23
25
  Release module which reads from a text file
24
26
 
25
27
  The text file must be a whitespace-separated csv file
26
28
 
29
+ :param lonlat_converter: Function that converts lon, lat coordinates to
30
+ x, y coordinates
31
+
27
32
  :param file: Release file
28
33
 
29
34
  :param colnames: Column names, if the release file does not contain any
30
35
 
31
36
  :param formats: Data column formats, one dict entry per column. If any column
32
- is missing, the default format is used. Keys should correspond to column names.
33
- Values should be either ``"float"``, ``"int"`` or ``"time"``. Default value
34
- is ``"float"`` for all columns except ``release_time``, which has default
35
- value ``"time"``.
37
+ is missing, the default format is used. Keys should correspond to column names.
38
+ Values should be either ``"float"``, ``"int"`` or ``"time"``. Default value
39
+ is ``"float"`` for all columns except ``release_time``, which has default
40
+ value ``"time"``.
36
41
 
37
42
  :param frequency: A two-element list with entries ``[value, unit]``, where
38
- ``unit`` can be any numpy-compatible timedelta unit (such as "s", "m", "h", "D").
43
+ ``unit`` can be any numpy-compatible timedelta unit (such as "s", "m", "h", "D").
39
44
 
40
45
  :param defaults: A dict of variables to be added to each particle. The keys
41
46
  are the variable names, the values are the initial values at particle
42
47
  release.
43
48
  """
44
49
 
45
- # Release file
46
- self._csv_fname = file # Path name
47
- self._csv_column_names = colnames # Column headers
48
- self._csv_column_formats = formats or dict()
49
- self._dataframe = None
50
-
51
- # Continuous release variables
52
- self._frequency = read_timedelta(frequency) / np.timedelta64(1, 's')
53
- self._last_release_dataframe = pd.DataFrame()
54
- self._last_release_time = np.int64(-4611686018427387904)
55
-
56
- # Other parameters
57
- self._defaults = defaults or dict()
58
-
59
- def update(self, model: Model):
50
+ release_table = ReleaseTable.from_filename_or_stream(
51
+ file=file,
52
+ column_names=colnames,
53
+ column_formats=formats or dict(),
54
+ interval=read_timedelta(frequency) / np.timedelta64(1, 's'),
55
+ defaults=defaults or dict(),
56
+ lonlat_converter=lonlat_converter,
57
+ )
58
+ return Releaser(particle_generator=release_table.subset)
59
+
60
+ def update(self, model: "Model"):
60
61
  self._add_new(model)
61
62
  self._kill_old(model)
62
63
 
63
64
  # noinspection PyMethodMayBeStatic
64
- def _kill_old(self, model: Model):
65
+ def _kill_old(self, model: "Model"):
65
66
  state = model.state
66
67
  if 'alive' in state:
67
68
  alive = state['alive']
68
69
  alive &= model.grid.ingrid(state['X'], state['Y'])
69
70
  state.remove(~alive)
70
71
 
71
- def _add_new(self, model: Model):
72
+ def _add_new(self, model: "Model"):
72
73
  # Get the portion of the release dataset that corresponds to
73
74
  # current simulation time
74
- df = release_data_subset(
75
- dataframe=self.dataframe,
76
- start_time=model.solver.time,
77
- stop_time=model.solver.time + model.solver.step,
78
- ).copy(deep=True)
75
+ df = self.particle_generator(
76
+ model.solver.time,
77
+ model.solver.time + model.solver.step,
78
+ )
79
79
 
80
80
  # If there are no new particles, but the state is empty, we should
81
81
  # still initialize the state by adding the appropriate columns
82
82
  if (len(df) == 0) and ('X' not in model.state):
83
83
  model.state.append(df.to_dict(orient='list'))
84
- self._last_release_dataframe = df
85
84
 
86
- # If there are no new particles and we don't use continuous release,
87
- # we are done.
88
- continuous_release = bool(self._frequency)
89
- if (len(df) == 0) and not continuous_release:
90
- return
91
-
92
- # If we have continuous release, but there are no new particles and
93
- # the last release is recent, we are also done
94
- current_time = model.solver.time
95
- elapsed_since_last_write = current_time - self._last_release_time
96
- last_release_is_recent = (elapsed_since_last_write < self._frequency)
97
- if continuous_release and (len(df) == 0) and last_release_is_recent:
85
+ # If there are no new particles, we are done.
86
+ if len(df) == 0:
98
87
  return
99
88
 
100
89
  # If we are at the final time step, we should not release any more particles
101
- if continuous_release and model.solver.time >= model.solver.stop:
90
+ if model.solver.time >= model.solver.stop:
102
91
  return
103
92
 
104
- # If we have continuous release, but there are no new particles and
105
- # the last release is NOT recent, we should replace empty
106
- # dataframe with the previously released dataframe
107
- if continuous_release:
108
- if (len(df) == 0) and not last_release_is_recent:
109
- df = self._last_release_dataframe
110
- self._last_release_dataframe = df # Update release dataframe
111
- self._last_release_time = current_time
112
-
113
- # If positions are given as lat/lon coordinates, we should convert
114
- if "X" not in df.columns or "Y" not in df.columns:
115
- if "lon" not in df.columns or "lat" not in df.columns:
116
- logger.critical("Particle release must have position")
117
- raise ValueError()
118
- # else
119
- X, Y = model.grid.ll2xy(df["lon"].values, df["lat"].values)
120
- df.rename(columns=dict(lon="X", lat="Y"), inplace=True)
121
- df["X"] = X
122
- df["Y"] = Y
123
-
124
- # Add default variables, if any
125
- for k, v in self._defaults.items():
126
- if k not in df:
127
- df[k] = v
128
-
129
- # Expand multiplicity variable, if any
130
- if 'mult' in df:
131
- df = df.loc[np.repeat(df.index, df['mult'].values.astype('i4'))]
132
- df = df.reset_index(drop=True).drop(columns='mult')
133
-
134
93
  # Add new particles
135
94
  new_particles = df.to_dict(orient='list')
136
95
  state = model.state
137
96
  state.append(new_particles)
138
97
 
139
- @property
140
- def dataframe(self):
141
- @contextlib.contextmanager
142
- def open_or_relay(file_or_buf, *args, **kwargs):
143
- if hasattr(file_or_buf, 'read'):
144
- yield file_or_buf
145
- else:
146
- with open(file_or_buf, *args, **kwargs) as f:
147
- yield f
148
-
149
- if self._dataframe is None:
150
- if isinstance(self._csv_fname, pd.DataFrame):
151
- self._dataframe = self._csv_fname
152
-
153
- else:
154
- # noinspection PyArgumentList
155
- with open_or_relay(self._csv_fname, 'r', encoding='utf-8') as fp:
156
- self._dataframe = load_release_file(
157
- stream=fp,
158
- names=self._csv_column_names,
159
- formats=self._csv_column_formats,
160
- )
161
- return self._dataframe
162
-
163
-
164
- def release_data_subset(dataframe, start_time, stop_time):
165
- start_idx, stop_idx = sorted_interval(
166
- dataframe['release_time'].values,
167
- start_time,
168
- stop_time,
98
+
99
+ def release_data_subset(dataframe, start_time, stop_time, interval: typing.Any = 0):
100
+ events = resolve_schedule(
101
+ times=dataframe['release_time'].values,
102
+ interval=interval,
103
+ start_time=start_time,
104
+ stop_time=stop_time,
169
105
  )
170
- return dataframe.iloc[start_idx:stop_idx]
106
+
107
+ return dataframe.iloc[events]
171
108
 
172
109
 
173
110
  def load_release_file(stream, names: list, formats: dict) -> pd.DataFrame:
@@ -188,25 +125,6 @@ def load_release_file(stream, names: list, formats: dict) -> pd.DataFrame:
188
125
  return df
189
126
 
190
127
 
191
- def sorted_interval(v, a, b):
192
- """
193
- Searches for an interval in a sorted array
194
-
195
- Returns the start (inclusive) and stop (exclusive) indices of
196
- elements in *v* that are greater than or equal to *a* and
197
- less than *b*. In other words, returns *start* and *stop* such
198
- that v[start:stop] == v[(v >= a) & (v < b)]
199
-
200
- :param v: Sorted input array
201
- :param a: Lower bound of array values (inclusive)
202
- :param b: Upper bound of array values (exclusive)
203
- :returns: A tuple (start, stop) defining the output interval
204
- """
205
- start = np.searchsorted(v, a, side='left')
206
- stop = np.searchsorted(v, b, side='left')
207
- return start, stop
208
-
209
-
210
128
  def get_converters(varnames: list, conf: dict) -> dict:
211
129
  """
212
130
  Given a list of varnames and config keywords, return a dict of converters
@@ -236,3 +154,223 @@ def get_converters(varnames: list, conf: dict) -> dict:
236
154
  converters[varname] = dtype_func
237
155
 
238
156
  return converters
157
+
158
+
159
+ class ReleaseTable:
160
+ def __init__(
161
+ self,
162
+ dataframe: pd.DataFrame,
163
+ interval: float,
164
+ defaults: dict[str, typing.Any],
165
+ lonlat_converter: typing.Callable[[np.ndarray, np.ndarray], tuple[np.ndarray, np.ndarray]],
166
+ ):
167
+ self.dataframe = dataframe
168
+ self.interval = interval
169
+ self.defaults = defaults
170
+ self.lonlat_converter = lonlat_converter
171
+
172
+ def subset(self, start_time, stop_time):
173
+ events = resolve_schedule(
174
+ times=self.dataframe['release_time'].values,
175
+ interval=self.interval,
176
+ start_time=start_time,
177
+ stop_time=stop_time,
178
+ )
179
+
180
+ df = self.dataframe.iloc[events].copy(deep=True)
181
+ df = replace_lonlat_in_release_table(df, self.lonlat_converter)
182
+ df = add_default_variables_in_release_table(df, self.defaults)
183
+ df = expand_multiplicity_in_release_table(df)
184
+
185
+ return df
186
+
187
+ @staticmethod
188
+ def from_filename_or_stream(file, column_names, column_formats, interval, defaults, lonlat_converter):
189
+ with open_or_relay(file, 'r', encoding='utf-8') as fp:
190
+ return ReleaseTable.from_stream(
191
+ fp, column_names, column_formats, interval, defaults, lonlat_converter)
192
+
193
+ @staticmethod
194
+ def from_stream(fp, column_names, column_formats, interval, defaults, lonlat_converter):
195
+ df = load_release_file(stream=fp, names=column_names, formats=column_formats)
196
+ return ReleaseTable(df, interval, defaults, lonlat_converter)
197
+
198
+
199
+ def replace_lonlat_in_release_table(df, lonlat_converter):
200
+ if "lon" not in df.columns or "lat" not in df.columns:
201
+ return df
202
+
203
+ X, Y = lonlat_converter(df["lon"].values, df["lat"].values)
204
+ df_new = df.drop(columns=['X', 'Y', 'lat', 'lon'], errors='ignore')
205
+ df_new["X"] = X
206
+ df_new["Y"] = Y
207
+ return df_new
208
+
209
+
210
+ def add_default_variables_in_release_table(df, defaults):
211
+ df_new = df.copy()
212
+ for k, v in defaults.items():
213
+ if k not in df:
214
+ df_new[k] = v
215
+ return df_new
216
+
217
+
218
+ def expand_multiplicity_in_release_table(df):
219
+ if 'mult' not in df:
220
+ return df
221
+ df = df.loc[np.repeat(df.index, df['mult'].values.astype('i4'))]
222
+ df = df.reset_index(drop=True).drop(columns='mult')
223
+ return df
224
+
225
+
226
+ def resolve_schedule(times, interval, start_time, stop_time):
227
+ """
228
+ Convert decriptions of repeated events to actual event indices
229
+
230
+ The variable `times` specifies start time of scheduled events. Each event occurs
231
+ repeatedly (specified by `interval`) until there is a new scheduling time.
232
+ The function returns the index of all events occuring within the time span.
233
+
234
+ Example 1: times = [0, 0], interval = 2. These are 2 events (index [0, 1]),
235
+ occuring at times [0, 2, 4, 6, ...], starting at time = 0. The time interval
236
+ start_time = 0, stop_time = 6 will contain the event times 0, 2, 4. The
237
+ returned event indices are [0, 1, 0, 1, 0, 1].
238
+
239
+ Example 2: times = [0, 0, 3, 3, 3], interval = 2. The schedule starts with
240
+ 2 events (index [0, 1]) occuring at time = 0. At time = 2, there are no new
241
+ scheduled events, and the previous events are repeated. At time = 3 there
242
+ are 3 new events scheduled (index [2, 3, 4]), which cancel the previous
243
+ events. The new events are repeated at times [3, 5, 7, ...]. The time
244
+ interval start_time = 0, stop_time = 7 contain the event times [0, 2, 3, 5].
245
+ The returned event indices are [0, 1, 0, 1, 2, 3, 4, 2, 3, 4].
246
+
247
+ :param times: Nondecreasing list of event times
248
+ :param interval: Maximum interval between scheduled times
249
+ :param start_time: Start time of schedule
250
+ :param stop_time: Stop time of schedule (not inclusive)
251
+ :return: Index of events in resolved schedule
252
+ """
253
+
254
+ sched = Schedule(times=np.asarray(times), events=np.arange(len(times)))
255
+ sched2 = sched.resolve(start_time, stop_time, interval)
256
+ return sched2.events
257
+
258
+
259
+ class Schedule:
260
+ def __init__(self, times: np.ndarray, events: np.ndarray):
261
+ self.times = times.view()
262
+ self.events = events.view()
263
+ self.times.flags.writeable = False
264
+ self.events.flags.writeable = False
265
+
266
+ def valid(self):
267
+ return np.all(np.diff(self.times) >= 0)
268
+
269
+ def copy(self):
270
+ return Schedule(times=self.times.copy(), events=self.events.copy())
271
+
272
+ def append(self, other: "Schedule"):
273
+ return Schedule(
274
+ times=np.concatenate((self.times, other.times)),
275
+ events=np.concatenate((self.events, other.events)),
276
+ )
277
+
278
+ def extend_backwards_using_interval(self, time, interval):
279
+ min_time = self.times[0]
280
+ if min_time <= time:
281
+ return self
282
+
283
+ num_extensions = int(np.ceil((min_time - time) / interval))
284
+ new_time = min_time - num_extensions * interval
285
+ return self.extend_backwards(new_time)
286
+
287
+ def extend_backwards(self, new_minimum_time):
288
+ idx_to_be_copied = (self.times == self.times[0])
289
+ num_to_be_copied = np.count_nonzero(idx_to_be_copied)
290
+ extension = Schedule(
291
+ times=np.repeat(new_minimum_time, num_to_be_copied),
292
+ events=self.events[idx_to_be_copied],
293
+ )
294
+ return extension.append(self)
295
+
296
+ def trim_tail(self, stop_time):
297
+ num = np.sum(self.times < stop_time)
298
+ return Schedule(
299
+ times=self.times[:num],
300
+ events=self.events[:num],
301
+ )
302
+
303
+ def trim_head(self, start_time):
304
+ num = np.sum(self.times < start_time)
305
+ return Schedule(
306
+ times=self.times[num:],
307
+ events=self.events[num:],
308
+ )
309
+
310
+ def rightshift_closest_time_value(self, time, interval):
311
+ # If interval=0 is specified, this means there is nothing to right-shift
312
+ if interval <= 0:
313
+ return self
314
+
315
+ # Find largest value that is smaller than or equal to time
316
+ idx_target_time = sum(self.times <= time) - 1
317
+
318
+ # If no tabulated time values are smaller than the given time, there
319
+ # is nothing to right-shift
320
+ if idx_target_time == -1:
321
+ return self
322
+
323
+ # Compute new value to write
324
+ target_time = self.times[idx_target_time]
325
+ num_offsets = np.ceil((time - target_time) / interval)
326
+ new_target_time = target_time + num_offsets * interval
327
+
328
+ # Check if the new value is larger than the next value
329
+ if idx_target_time + 1 < len(self.times): # If not, then there is no next value
330
+ next_time = self.times[idx_target_time + 1]
331
+ if new_target_time > next_time:
332
+ return self
333
+
334
+ # Change times
335
+ new_times = self.times.copy()
336
+ new_times[self.times == target_time] = new_target_time
337
+ return Schedule(times=new_times, events=self.events)
338
+
339
+ def expand(self, interval, stop):
340
+ # If there are no times, there should be no expansion
341
+ # Also, interval = 0 means no expansion
342
+ if (len(self.times) == 0) or (interval <= 0):
343
+ return self
344
+
345
+ t_unq, t_inv, t_cnt = np.unique(self.times, return_inverse=True, return_counts=True)
346
+ stop2 = np.maximum(stop, t_unq[-1])
347
+ diff = np.diff(np.concatenate((t_unq, [stop2])))
348
+ unq_repeats = np.ceil(diff / interval).astype(int)
349
+ repeats = np.repeat(unq_repeats, t_cnt)
350
+
351
+ base_times = np.repeat(self.times, repeats)
352
+ offsets = [i * interval for n in repeats for i in range(n)]
353
+ times = base_times + offsets
354
+ events = np.repeat(self.events, repeats)
355
+
356
+ idx = np.lexsort((events, times))
357
+
358
+ return Schedule(times=times[idx], events=events[idx])
359
+
360
+ def resolve(self, start, stop, interval):
361
+ s = self
362
+ if interval:
363
+ s = s.rightshift_closest_time_value(start, interval)
364
+ s = s.trim_head(start)
365
+ s = s.trim_tail(stop)
366
+ s = s.expand(interval, stop)
367
+ return s
368
+
369
+
370
+ @contextlib.contextmanager
371
+ def open_or_relay(file_or_buf, *args, **kwargs):
372
+ if hasattr(file_or_buf, 'read'):
373
+ yield file_or_buf
374
+ else:
375
+ with open(file_or_buf, *args, **kwargs) as f:
376
+ yield f
ladim/solver.py CHANGED
@@ -1,9 +1,12 @@
1
1
  import numpy as np
2
2
 
3
+ from typing import TYPE_CHECKING
4
+ if TYPE_CHECKING:
5
+ from ladim.model import Model
6
+
3
7
 
4
8
  class Solver:
5
- def __init__(self, start, stop, step, order=None, seed=None):
6
- self.order = order or ('release', 'forcing', 'tracker', 'ibm', 'output')
9
+ def __init__(self, start, stop, step, seed=None):
7
10
  self.start = np.datetime64(start, 's').astype('int64')
8
11
  self.stop = np.datetime64(stop, 's').astype('int64')
9
12
  self.step = np.timedelta64(step, 's').astype('int64')
@@ -12,12 +15,13 @@ class Solver:
12
15
  if seed is not None:
13
16
  np.random.seed(seed)
14
17
 
15
- def run(self, model):
16
- modules = model.modules
17
- ordered_modules = [modules[k] for k in self.order if k in modules]
18
-
18
+ def run(self, model: "Model"):
19
19
  self.time = self.start
20
20
  while self.time <= self.stop:
21
- for m in ordered_modules:
22
- m.update(model)
21
+ model.release.update(model)
22
+ model.forcing.update(model)
23
+ model.output.update(model)
24
+ model.tracker.update(model)
25
+ model.ibm.update(model)
26
+
23
27
  self.time += self.step
ladim/state.py CHANGED
@@ -1,9 +1,8 @@
1
1
  import pandas as pd
2
2
  import numpy as np
3
- from .model import Model, Module
4
3
 
5
4
 
6
- class State(Module):
5
+ class State:
7
6
  """
8
7
  The state module contains static and dynamic particle properties
9
8
 
@@ -12,19 +11,24 @@ class State(Module):
12
11
  depth of all particles by 1, use state['Z'] += 1
13
12
  """
14
13
 
14
+ def __init__(self):
15
+ self._num_released = 0
16
+ self._varnames = set()
17
+ self._data = pd.DataFrame()
18
+
15
19
  @property
16
20
  def size(self):
17
21
  """
18
22
  Current number of particles
19
23
  """
20
- raise NotImplementedError
24
+ return len(self._data)
21
25
 
22
26
  @property
23
27
  def released(self):
24
28
  """
25
29
  Total number of released particles
26
30
  """
27
- raise NotImplementedError
31
+ return self._num_released
28
32
 
29
33
  def append(self, particles: dict):
30
34
  """
@@ -34,41 +38,6 @@ class State(Module):
34
38
 
35
39
  :param particles: A mapping from variable names to values
36
40
  """
37
- raise NotImplementedError
38
-
39
- def remove(self, particles):
40
- """
41
- Remove particles
42
-
43
- :param particles: Boolean index of particles to remove
44
- :return:
45
- """
46
- raise NotImplementedError
47
-
48
- def __getitem__(self, item):
49
- raise NotImplementedError
50
-
51
- def __setitem__(self, key, value):
52
- raise NotImplementedError
53
-
54
- def __len__(self):
55
- return self.size
56
-
57
- def __contains__(self, item):
58
- raise NotImplementedError
59
-
60
-
61
- class DynamicState(State):
62
- def __init__(self):
63
- self._num_released = 0
64
- self._varnames = set()
65
- self._data = pd.DataFrame()
66
-
67
- @property
68
- def released(self):
69
- return self._num_released
70
-
71
- def append(self, particles: dict):
72
41
  # If there are no new particles, do nothing
73
42
  if not particles:
74
43
  return
@@ -92,22 +61,30 @@ class DynamicState(State):
92
61
  self._num_released += num_new_particles
93
62
 
94
63
  def remove(self, particles):
64
+ """
65
+ Remove particles
66
+
67
+ :param particles: Boolean index of particles to remove
68
+ :return:
69
+ """
95
70
  if not np.any(particles):
96
71
  return
97
72
 
98
73
  keep = ~particles
99
74
  self._data = self._data.iloc[keep]
100
75
 
101
- @property
102
- def size(self):
103
- return len(self._data)
104
-
105
76
  def __getitem__(self, item):
106
77
  return self._data[item].values
107
78
 
108
79
  def __setitem__(self, item, value):
109
80
  self._data[item] = value
110
81
 
82
+ def __len__(self):
83
+ return self.size
84
+
85
+ def __contains__(self, item):
86
+ return item in self._data
87
+
111
88
  def __getattr__(self, item):
112
89
  if item not in self:
113
90
  raise AttributeError(f'Attribute not defined: {item}')
@@ -124,6 +101,3 @@ class DynamicState(State):
124
101
  self._data[item] = value
125
102
  else:
126
103
  raise AttributeError(f"Attribute not defined: '{item}'")
127
-
128
- def __contains__(self, item):
129
- return item in self._data
ladim/tracker.py CHANGED
@@ -1,21 +1,23 @@
1
- from .model import Model, Module
2
1
  import numpy as np
2
+ import typing
3
+ if typing.TYPE_CHECKING:
4
+ from .model import Model
3
5
 
4
6
 
5
- class Tracker(Module):
6
- pass
7
-
8
-
9
- class HorizontalTracker:
7
+ class Tracker:
10
8
  """The physical particle tracking kernel"""
9
+ def __init__(self, integrator, diffusion):
10
+ self.integrator = integrator
11
+ self.diffusion = diffusion # [m2.s-1]
11
12
 
12
- def __init__(self, method, diffusion) -> None:
13
+ @staticmethod
14
+ def from_config(method, diffusion):
13
15
  if not diffusion:
14
16
  method += "_nodiff"
15
- self.integrator = StochasticDifferentialEquationIntegrator.from_keyword(method)
16
- self.D = diffusion # [m2.s-1]
17
+ integrator = StochasticDifferentialEquationIntegrator.from_keyword(method)
18
+ return Tracker(integrator, diffusion)
17
19
 
18
- def update(self, model: Model):
20
+ def update(self, model: "Model"):
19
21
  state = model.state
20
22
  grid = model.grid
21
23
  forcing = model.forcing
@@ -31,7 +33,7 @@ class HorizontalTracker:
31
33
  # Set diffusion function
32
34
  def mixing(t, r):
33
35
  _ = t
34
- stddev = (2 * self.D) ** 0.5
36
+ stddev = (2 * self.diffusion) ** 0.5
35
37
  u_diff = stddev / dx
36
38
  return np.broadcast_to(u_diff, r.shape)
37
39
 
ladim/utilities.py CHANGED
@@ -30,3 +30,31 @@ def ingrid(x: float, y: float, subgrid: List[int]) -> bool:
30
30
  def read_timedelta(conf) -> np.timedelta64:
31
31
  time_value, time_unit = conf
32
32
  return np.timedelta64(time_value, time_unit)
33
+
34
+
35
+ def load_class(name):
36
+ import importlib.util
37
+ import sys
38
+ from pathlib import Path
39
+
40
+ pkg, cls = name.rsplit(sep='.', maxsplit=1)
41
+
42
+ # Check if "pkg" is an existing file
43
+ spec = None
44
+ module_name = None
45
+ file_name = pkg + '.py'
46
+ if Path(file_name).exists():
47
+ # This can return None if there were import errors
48
+ module_name = pkg
49
+ spec = importlib.util.spec_from_file_location(module_name, file_name)
50
+
51
+ # If pkg can not be interpreted as a file, use regular import
52
+ if spec is None:
53
+ return getattr(importlib.import_module(pkg), cls)
54
+
55
+ # File import
56
+ else:
57
+ module = importlib.util.module_from_spec(spec)
58
+ sys.modules[module_name] = module
59
+ spec.loader.exec_module(module)
60
+ return getattr(module, cls)
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: ladim
3
- Version: 2.0.9
3
+ Version: 2.1.5
4
4
  Summary: Lagrangian Advection and Diffusion Model
5
5
  Home-page: https://github.com/pnsaevik/ladim
6
6
  Author: Bjørn Ådlandsvik
@@ -25,6 +25,7 @@ Requires-Dist: pyproj
25
25
  Requires-Dist: pyyaml
26
26
  Requires-Dist: scipy
27
27
  Requires-Dist: xarray
28
+ Dynamic: license-file
28
29
 
29
30
  LADiM – the Lagrangian Advection and Diffusion Model
30
31
  ====================================================
@@ -0,0 +1,32 @@
1
+ ladim/__init__.py,sha256=deBjPlj3Ebki-_8oKDrAdITyzOn-VF38BpeUAPOTJUg,51
2
+ ladim/__main__.py,sha256=I1AOHBQDwQNe3fVmDMyC84JcriqidOEURoeoJSOCTzg,24
3
+ ladim/config.py,sha256=A-yLStNqqDwh_wMCIsmd8cmaXuUvz698IMlYvGZT5tE,5710
4
+ ladim/forcing.py,sha256=Dy0kvhax0YTvyAMnPJUHAJItkZdZfY1mrVY4fT8A21M,3214
5
+ ladim/grid.py,sha256=WAfvYYLiiiO8MnMkqCqKSSrVpyooymffqFWTWfF9qLM,20945
6
+ ladim/main.py,sha256=jsrJY7G37oCzQQDXLrlzc_I_jM7a1a-vmmQx9cuLRms,2906
7
+ ladim/model.py,sha256=fJNjCoK4FL45KAEUQUksRgKinehsiZre0RfKYgog-3k,2377
8
+ ladim/output.py,sha256=AlP3_FGKU_bmSLRyiR3cbbNCwMVkYcuwOjoy0397-CI,8307
9
+ ladim/release.py,sha256=P9KnZminvFPMfAJPizo72d6z8-X5ttM9Qw9p_yPtfpQ,13150
10
+ ladim/sample.py,sha256=n8wRGd_VsW_qyQe1ZoTpmfZcdcwB929vsM8PoKG6JTs,8292
11
+ ladim/solver.py,sha256=bfpb2z5ZU0k8eoFWliMMbafYddL7-AfoTvNONO43apo,785
12
+ ladim/state.py,sha256=xDP3DfuG7NZyYOkFr-KUotrbwPIGUVHhqy9Gisl-85Q,2872
13
+ ladim/tracker.py,sha256=Dpf26jAyu1xAuySRsv3-sOCXJhY3sDvmzUiUyMRUYqU,5118
14
+ ladim/utilities.py,sha256=a3ZK3OuJRfzjEDAfASsuEKItyWC-4GsGUWKEZp1hg0E,1790
15
+ ladim/gridforce/ROMS.py,sha256=VzvKXyyYSdQayCszTZQOh36aXyTaCJ7rxOhGDRjbPhE,27796
16
+ ladim/gridforce/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
+ ladim/gridforce/analytical.py,sha256=qI-1LJdjmnwwanzOdrsDZqwGgo73bT75CB7pMaxbHKM,1094
18
+ ladim/gridforce/zROMS.py,sha256=4bnrmcXiWpCAUch9uqd_0XmyKRh-Ll6sFvIHiTbTOOg,23996
19
+ ladim/ibms/__init__.py,sha256=YUbvHnFXfSJ0lTl6elc-ajyIh1LDqWYlX7Q_MHuNce4,939
20
+ ladim/ibms/light.py,sha256=POltHmKkX8-q3t9wXyfcseCKEq9Bq-kX1WEJYsr1lNQ,2737
21
+ ladim/plugins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
+ ladim-2.1.5.dist-info/licenses/LICENSE,sha256=BgtXyjNr6Ly9nQ7ZLXKpV3r5kWRLnh5MiN0dxp0Bvfc,1085
23
+ postladim/__init__.py,sha256=ND-wa5_GNg01Tp0p_1zA1VD804zbfP3o4Cmd8pWU7OE,113
24
+ postladim/cellcount.py,sha256=nCFu9iJmprubn4YmPB4W0VO02GfEb90Iif7D49w1Kss,2054
25
+ postladim/kde_plot.py,sha256=GvMWzT6VxIeXKh1cnqaGzR-4jGG_WIHGMLPpRMXIpo4,1628
26
+ postladim/particlefile.py,sha256=0aif9wYUJ-VrpQKeCef8wB5VCiBB-gWY6sxNCUYviTA,4889
27
+ postladim/variable.py,sha256=-2aihoppYMMmpSpCqaF31XvpinTMaH3Y01-USDIkbBc,6587
28
+ ladim-2.1.5.dist-info/METADATA,sha256=QxS46RvOa3FGYSgK74fjgEUlUxokvWzNZWqDihhtFHk,1906
29
+ ladim-2.1.5.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
30
+ ladim-2.1.5.dist-info/entry_points.txt,sha256=JDlNJo87GJaOkH0-BpAzTPLCrZcuPSdSlHNQ4XmnoRg,41
31
+ ladim-2.1.5.dist-info/top_level.txt,sha256=TK8Gl7d6MsrAQvqKG4b6YJCbB4UL46Se3SzsI-sJAuc,16
32
+ ladim-2.1.5.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.8.2)
2
+ Generator: setuptools (78.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,32 +0,0 @@
1
- ladim/__init__.py,sha256=yknbApyGLwkuXI_tlmaf1pWerT0JYcqsUX1925PumLM,51
2
- ladim/__main__.py,sha256=I1AOHBQDwQNe3fVmDMyC84JcriqidOEURoeoJSOCTzg,24
3
- ladim/config.py,sha256=l20q8C-TPTM8HkVbdYyDCaTD2jszPQPFwnAEdL8i9vs,5769
4
- ladim/forcing.py,sha256=880ZtuQkkyZsHKD08ogRC__oGAfu-Y_MrHm7uoZoDGU,3153
5
- ladim/grid.py,sha256=aZl8HUrY5oJSUKoVEY40reOfAEjClHuNlpzNO1GJK8k,20897
6
- ladim/main.py,sha256=jsrJY7G37oCzQQDXLrlzc_I_jM7a1a-vmmQx9cuLRms,2906
7
- ladim/model.py,sha256=zTGZtpXFsVlhT8OqaGFMteMc_nsCJEv3ENehhzKuG60,4221
8
- ladim/output.py,sha256=1d7p2f3fP5flsRaMdGgGAMU3FaQJg-0OASBAF_cN1II,8317
9
- ladim/release.py,sha256=lYpuwVYXHntW1Q-jFjkVbveGl7SSIb-EfsJfEZutbXs,8339
10
- ladim/sample.py,sha256=n8wRGd_VsW_qyQe1ZoTpmfZcdcwB929vsM8PoKG6JTs,8292
11
- ladim/solver.py,sha256=vOfVUzuQnAylPvtgdP6Ez7_pQGbYDCIYgYfXIbJJ-6M,765
12
- ladim/state.py,sha256=RkKZQn1G9SbhdA3CTq4jpiN-8YIv1QXDRgI6RZlI7U0,3436
13
- ladim/tracker.py,sha256=hSlCKlBRyLVEYP40QIGKub6mDYFOApyGhUypLrrP9w8,4977
14
- ladim/utilities.py,sha256=r7-zShqJhh0cBctDUmtfw-GBOk1eTTYR4S72b0ouiSQ,994
15
- ladim/gridforce/ROMS.py,sha256=yjEf6KM-nxSn4Hexr-5YctQ0rqjzxMDeL4mmQe3w7Vk,27788
16
- ladim/gridforce/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
- ladim/gridforce/analytical.py,sha256=qI-1LJdjmnwwanzOdrsDZqwGgo73bT75CB7pMaxbHKM,1094
18
- ladim/gridforce/zROMS.py,sha256=4bnrmcXiWpCAUch9uqd_0XmyKRh-Ll6sFvIHiTbTOOg,23996
19
- ladim/ibms/__init__.py,sha256=LLKhHJgEu-W6cbFjzg2apc-MPoY9wJF2z7S9W2EeWA0,698
20
- ladim/ibms/light.py,sha256=POltHmKkX8-q3t9wXyfcseCKEq9Bq-kX1WEJYsr1lNQ,2737
21
- ladim/plugins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
- postladim/__init__.py,sha256=ND-wa5_GNg01Tp0p_1zA1VD804zbfP3o4Cmd8pWU7OE,113
23
- postladim/cellcount.py,sha256=nCFu9iJmprubn4YmPB4W0VO02GfEb90Iif7D49w1Kss,2054
24
- postladim/kde_plot.py,sha256=GvMWzT6VxIeXKh1cnqaGzR-4jGG_WIHGMLPpRMXIpo4,1628
25
- postladim/particlefile.py,sha256=0aif9wYUJ-VrpQKeCef8wB5VCiBB-gWY6sxNCUYviTA,4889
26
- postladim/variable.py,sha256=-2aihoppYMMmpSpCqaF31XvpinTMaH3Y01-USDIkbBc,6587
27
- ladim-2.0.9.dist-info/LICENSE,sha256=BgtXyjNr6Ly9nQ7ZLXKpV3r5kWRLnh5MiN0dxp0Bvfc,1085
28
- ladim-2.0.9.dist-info/METADATA,sha256=ftaYR5SRvCDWj_8rb7EW-tk_RbpBmjmOGYOg7UP9sVs,1884
29
- ladim-2.0.9.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
30
- ladim-2.0.9.dist-info/entry_points.txt,sha256=JDlNJo87GJaOkH0-BpAzTPLCrZcuPSdSlHNQ4XmnoRg,41
31
- ladim-2.0.9.dist-info/top_level.txt,sha256=TK8Gl7d6MsrAQvqKG4b6YJCbB4UL46Se3SzsI-sJAuc,16
32
- ladim-2.0.9.dist-info/RECORD,,