ladim 2.0.1__tar.gz → 2.0.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. {ladim-2.0.1 → ladim-2.0.3}/PKG-INFO +1 -1
  2. {ladim-2.0.1 → ladim-2.0.3}/ladim/__init__.py +1 -1
  3. ladim-2.0.3/ladim/config.py +145 -0
  4. {ladim-2.0.1 → ladim-2.0.3}/ladim/gridforce/ROMS.py +33 -16
  5. {ladim-2.0.1 → ladim-2.0.3}/ladim/gridforce/zROMS.py +30 -16
  6. {ladim-2.0.1 → ladim-2.0.3}/ladim/main.py +17 -14
  7. {ladim-2.0.1 → ladim-2.0.3}/ladim/model.py +23 -1
  8. {ladim-2.0.1 → ladim-2.0.3}/ladim/state.py +8 -0
  9. {ladim-2.0.1 → ladim-2.0.3}/ladim.egg-info/PKG-INFO +1 -1
  10. {ladim-2.0.1 → ladim-2.0.3}/ladim.egg-info/SOURCES.txt +3 -0
  11. ladim-2.0.3/tests/test_config.py +88 -0
  12. ladim-2.0.3/tests/test_forcing.py +10 -0
  13. ladim-2.0.3/tests/test_model.py +33 -0
  14. ladim-2.0.1/ladim/config.py +0 -125
  15. {ladim-2.0.1 → ladim-2.0.3}/LICENSE +0 -0
  16. {ladim-2.0.1 → ladim-2.0.3}/README.md +0 -0
  17. {ladim-2.0.1 → ladim-2.0.3}/ladim/__main__.py +0 -0
  18. {ladim-2.0.1 → ladim-2.0.3}/ladim/forcing.py +0 -0
  19. {ladim-2.0.1 → ladim-2.0.3}/ladim/grid.py +0 -0
  20. {ladim-2.0.1 → ladim-2.0.3}/ladim/gridforce/__init__.py +0 -0
  21. {ladim-2.0.1 → ladim-2.0.3}/ladim/gridforce/analytical.py +0 -0
  22. {ladim-2.0.1 → ladim-2.0.3}/ladim/ibms/__init__.py +0 -0
  23. {ladim-2.0.1 → ladim-2.0.3}/ladim/ibms/light.py +0 -0
  24. {ladim-2.0.1 → ladim-2.0.3}/ladim/output.py +0 -0
  25. {ladim-2.0.1 → ladim-2.0.3}/ladim/plugins/__init__.py +0 -0
  26. {ladim-2.0.1 → ladim-2.0.3}/ladim/release.py +0 -0
  27. {ladim-2.0.1 → ladim-2.0.3}/ladim/sample.py +0 -0
  28. {ladim-2.0.1 → ladim-2.0.3}/ladim/solver.py +0 -0
  29. {ladim-2.0.1 → ladim-2.0.3}/ladim/tracker.py +0 -0
  30. {ladim-2.0.1 → ladim-2.0.3}/ladim/utilities.py +0 -0
  31. {ladim-2.0.1 → ladim-2.0.3}/ladim.egg-info/dependency_links.txt +0 -0
  32. {ladim-2.0.1 → ladim-2.0.3}/ladim.egg-info/entry_points.txt +0 -0
  33. {ladim-2.0.1 → ladim-2.0.3}/ladim.egg-info/requires.txt +0 -0
  34. {ladim-2.0.1 → ladim-2.0.3}/ladim.egg-info/top_level.txt +0 -0
  35. {ladim-2.0.1 → ladim-2.0.3}/postladim/__init__.py +0 -0
  36. {ladim-2.0.1 → ladim-2.0.3}/postladim/cellcount.py +0 -0
  37. {ladim-2.0.1 → ladim-2.0.3}/postladim/kde_plot.py +0 -0
  38. {ladim-2.0.1 → ladim-2.0.3}/postladim/particlefile.py +0 -0
  39. {ladim-2.0.1 → ladim-2.0.3}/postladim/variable.py +0 -0
  40. {ladim-2.0.1 → ladim-2.0.3}/pyproject.toml +0 -0
  41. {ladim-2.0.1 → ladim-2.0.3}/setup.cfg +0 -0
  42. {ladim-2.0.1 → ladim-2.0.3}/tests/test_ladim.py +0 -0
  43. {ladim-2.0.1 → ladim-2.0.3}/tests/test_output.py +0 -0
  44. {ladim-2.0.1 → ladim-2.0.3}/tests/test_release.py +0 -0
  45. {ladim-2.0.1 → ladim-2.0.3}/tests/test_solver.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ladim
3
- Version: 2.0.1
3
+ Version: 2.0.3
4
4
  Summary: Lagrangian Advection and Diffusion Model
5
5
  Home-page: https://github.com/pnsaevik/ladim
6
6
  Author: Bjørn Ådlandsvik
@@ -1,3 +1,3 @@
1
- __version__ = '2.0.1'
1
+ __version__ = '2.0.3'
2
2
 
3
3
  from .main import main, run
@@ -0,0 +1,145 @@
1
+ """
2
+ Functions for parsing configuration parameters.
3
+
4
+ The module contains functions for parsing input configuration
5
+ parameters, appending default values and converting between
6
+ different versions of config file formats.
7
+ """
8
+ import numpy as np
9
+
10
+
11
+ def configure(module_conf):
12
+ import yaml
13
+
14
+ # Handle variations of input config type
15
+ if isinstance(module_conf, dict):
16
+ config_dict = module_conf
17
+ else:
18
+ config_dict = yaml.safe_load(module_conf)
19
+
20
+ if 'version' not in config_dict:
21
+ if 'particle_release' in config_dict:
22
+ config_dict['version'] = 1
23
+ else:
24
+ config_dict['version'] = 2
25
+
26
+ return _versioned_configure(config_dict)
27
+
28
+
29
+ def _versioned_configure(config_dict):
30
+ if config_dict['version'] == 1:
31
+ config_dict = convert_1_to_2(config_dict)
32
+
33
+ return config_dict
34
+
35
+
36
+ def dict_get(d, items, default=None):
37
+ if isinstance(items, str):
38
+ items = [items]
39
+
40
+ for item in items:
41
+ try:
42
+ return dict_get_single(d, item)
43
+ except KeyError:
44
+ pass
45
+
46
+ return default
47
+
48
+
49
+ def dict_get_single(d, item):
50
+ tokens = str(item).split(sep='.')
51
+ sub_dict = d
52
+ for t in tokens:
53
+ if t in sub_dict:
54
+ sub_dict = sub_dict[t]
55
+ else:
56
+ raise KeyError
57
+
58
+ return sub_dict
59
+
60
+
61
+ def convert_1_to_2(c):
62
+
63
+ out = {}
64
+
65
+ # Read timedelta
66
+ dt_sec = None
67
+ if 'numerics' in c:
68
+ if 'dt' in c['numerics']:
69
+ dt_value, dt_unit = c['numerics']['dt']
70
+ dt_sec = np.timedelta64(dt_value, dt_unit).astype('timedelta64[s]').astype('int64')
71
+
72
+ out['version'] = 2
73
+
74
+ out['solver'] = {}
75
+ out['solver']['start'] = dict_get(c, 'time_control.start_time')
76
+ out['solver']['stop'] = dict_get(c, 'time_control.stop_time')
77
+ out['solver']['step'] = dt_sec
78
+ out['solver']['seed'] = dict_get(c, 'numerics.seed')
79
+ out['solver']['order'] = ['release', 'forcing', 'output', 'tracker', 'ibm', 'state']
80
+
81
+ out['grid'] = {}
82
+ out['grid']['file'] = dict_get(c, [
83
+ 'files.grid_file', 'gridforce.grid_file',
84
+ 'files.input_file', 'gridforce.input_file'])
85
+ out['grid']['legacy_module'] = dict_get(c, 'gridforce.module', '') + '.Grid'
86
+ out['grid']['start_time'] = np.datetime64(dict_get(c, 'time_control.start_time', '1970'), 's')
87
+
88
+ out['forcing'] = {}
89
+ out['forcing']['file'] = dict_get(c, ['gridforce.input_file', 'files.input_file'])
90
+ out['forcing']['legacy_module'] = dict_get(c, 'gridforce.module', '') + '.Forcing'
91
+ out['forcing']['start_time'] = np.datetime64(dict_get(c, 'time_control.start_time', '1970'), 's')
92
+ out['forcing']['stop_time'] = np.datetime64(dict_get(c, 'time_control.stop_time', '1970'), 's')
93
+ out['forcing']['dt'] = dt_sec
94
+ out['forcing']['ibm_forcing'] = dict_get(c, 'gridforce.ibm_forcing', [])
95
+
96
+ out['output'] = {}
97
+ out['output']['file'] = dict_get(c, 'files.output_file')
98
+ out['output']['frequency'] = dict_get(c, 'output_variables.outper')
99
+ out['output']['variables'] = {}
100
+
101
+ # Convert output variable format spec
102
+ outvar_names = dict_get(c, 'output_variables.particle', []).copy()
103
+ outvar_names += dict_get(c, 'output_variables.instance', [])
104
+ for v in outvar_names:
105
+ out['output']['variables'][v] = c['output_variables'][v].copy()
106
+ if v == 'release_time' and 'units' in c['output_variables'][v]:
107
+ out['output']['variables'][v]['units'] = 'seconds since 1970-01-01'
108
+ for v in dict_get(c, 'output_variables.particle', []):
109
+ out['output']['variables'][v]['kind'] = 'initial'
110
+
111
+ out['tracker'] = {}
112
+ out['tracker']['method'] = dict_get(c, 'numerics.advection')
113
+ out['tracker']['diffusion'] = dict_get(c, 'numerics.diffusion')
114
+
115
+ # Read release config
116
+ out['release'] = {}
117
+ out['release']['file'] = dict_get(c, 'files.particle_release_file')
118
+ out['release']['colnames'] = dict_get(c, 'particle_release.variables', [])
119
+ if dict_get(c, 'particle_release.release_type', '') == 'continuous':
120
+ out['release']['frequency'] = dict_get(c, 'particle_release.release_frequency', [0, 's'])
121
+ out['release']['formats'] = {
122
+ c.get('particle_release', {})[v]: v
123
+ for v in dict_get(c, 'particle_release.variables', [])
124
+ if v in c.get('particle_release', {}).keys()
125
+ }
126
+ out['release']['defaults'] = {
127
+ k: np.float64(0)
128
+ for k in dict_get(c, 'state.ibm_variables', []) + dict_get(c, 'ibm.variables', [])
129
+ if k not in out['release']['colnames']
130
+ }
131
+
132
+ out['ibm'] = {}
133
+ if 'ibm' in c:
134
+ out['ibm']['module'] = 'ladim.ibms.LegacyIBM'
135
+ out['ibm']['legacy_module'] = dict_get(c, ['ibm.ibm_module', 'ibm.module'])
136
+ out['ibm']['conf'] = {}
137
+ out['ibm']['conf']['dt'] = dt_sec
138
+ out['ibm']['conf']['output_instance'] = dict_get(c, 'output_variables.instance', [])
139
+ out['ibm']['conf']['nc_attributes'] = {
140
+ k: v
141
+ for k, v in out['output']['variables'].items()
142
+ }
143
+ out['ibm']['conf']['ibm'] = {k: v for k, v in c['ibm'].items() if k != 'ibm_module'}
144
+
145
+ return out
@@ -15,10 +15,12 @@ import glob
15
15
  import logging
16
16
  import numpy as np
17
17
  from netCDF4 import Dataset, num2date
18
-
19
18
  from ladim.sample import sample2D, bilin_inv
20
19
 
21
20
 
21
+ logger = logging.getLogger(__name__)
22
+
23
+
22
24
  class Grid:
23
25
  """Simple ROMS grid object
24
26
 
@@ -34,7 +36,7 @@ class Grid:
34
36
 
35
37
  def __init__(self, config):
36
38
 
37
- logging.info("Initializing ROMS-type grid object")
39
+ logger.info("Initializing ROMS-type grid object")
38
40
 
39
41
  # Grid file
40
42
  if "grid_file" in config["gridforce"]:
@@ -44,14 +46,14 @@ class Grid:
44
46
  files.sort()
45
47
  grid_file = files[0]
46
48
  else:
47
- logging.error("No grid file specified")
49
+ logger.error("No grid file specified")
48
50
  raise SystemExit(1)
49
51
 
50
52
  try:
51
53
  ncid = Dataset(grid_file)
52
54
  ncid.set_auto_mask(False)
53
55
  except OSError:
54
- logging.error("Could not open grid file " + grid_file)
56
+ logger.error("Could not open grid file " + grid_file)
55
57
  raise SystemExit(1)
56
58
 
57
59
  # Subgrid, only considers internal grid cells
@@ -180,6 +182,8 @@ class Grid:
180
182
  """Return the depth of grid cells"""
181
183
  I = X.round().astype(int) - self.i0
182
184
  J = Y.round().astype(int) - self.j0
185
+ I = np.minimum(np.maximum(I, 0), self.H.shape[1] - 1)
186
+ J = np.minimum(np.maximum(J, 0), self.H.shape[0] - 1)
183
187
  return self.H[J, I]
184
188
 
185
189
  def lonlat(self, X, Y, method="bilinear"):
@@ -189,6 +193,8 @@ class Grid:
189
193
  # else: containing grid cell, less accurate
190
194
  I = X.round().astype("int") - self.i0
191
195
  J = Y.round().astype("int") - self.j0
196
+ I = np.minimum(np.maximum(I, 0), self.lon.shape[1] - 1)
197
+ J = np.minimum(np.maximum(J, 0), self.lon.shape[0] - 1)
192
198
  return self.lon[J, I], self.lat[J, I]
193
199
 
194
200
  def ingrid(self, X: np.ndarray, Y: np.ndarray) -> np.ndarray:
@@ -204,6 +210,11 @@ class Grid:
204
210
  """Returns True for points on land"""
205
211
  I = X.round().astype(int) - self.i0
206
212
  J = Y.round().astype(int) - self.j0
213
+
214
+ # Constrain to valid indices
215
+ I = np.minimum(np.maximum(I, 0), self.M.shape[-1] - 1)
216
+ J = np.minimum(np.maximum(J, 0), self.M.shape[-2] - 1)
217
+
207
218
  return self.M[J, I] < 1
208
219
 
209
220
  # Error if point outside
@@ -242,7 +253,7 @@ class Forcing:
242
253
 
243
254
  def __init__(self, config, grid):
244
255
 
245
- logging.info("Initiating forcing")
256
+ logger.info("Initiating forcing")
246
257
 
247
258
  self._grid = grid # Get the grid object, make private?
248
259
  # self.config = config["gridforce"]
@@ -255,9 +266,9 @@ class Forcing:
255
266
  files = self.find_files(config["gridforce"])
256
267
  numfiles = len(files)
257
268
  if numfiles == 0:
258
- logging.error("No input file: {}".format(config["gridforce"]["input_file"]))
269
+ logger.error("No input file: {}".format(config["gridforce"]["input_file"]))
259
270
  raise SystemExit(3)
260
- logging.info("Number of forcing files = {}".format(numfiles))
271
+ logger.info("Number of forcing files = {}".format(numfiles))
261
272
 
262
273
  # ---------------------------
263
274
  # Overview of all the files
@@ -357,6 +368,7 @@ class Forcing:
357
368
  all_frames = [] # All time frames
358
369
  num_frames = {} # Number of time frames in each file
359
370
  for fname in files:
371
+ logger.info(f'Open forcing file {fname}')
360
372
  with Dataset(fname) as nc:
361
373
  new_times = nc.variables["ocean_time"][:]
362
374
  num_frames[fname] = len(new_times)
@@ -370,11 +382,11 @@ class Forcing:
370
382
  if np.any(I):
371
383
  i = I.nonzero()[0][0] + 1 # Index of first out-of-order frame
372
384
  oooframe = str(all_frames[i]).split('.')[0] # Remove microseconds
373
- logging.info(f"Time frame {i} = {oooframe} out of order")
374
- logging.critical("Forcing time frames not strictly sorted")
385
+ logger.info(f"Time frame {i} = {oooframe} out of order")
386
+ logger.critical("Forcing time frames not strictly sorted")
375
387
  raise SystemExit(4)
376
388
 
377
- logging.info(f"Number of available forcing times = {len(all_frames)}")
389
+ logger.info(f"Number of available forcing times = {len(all_frames)}")
378
390
  return all_frames, num_frames
379
391
 
380
392
  @staticmethod
@@ -382,8 +394,8 @@ class Forcing:
382
394
 
383
395
  time0 = all_frames[0]
384
396
  time1 = all_frames[-1]
385
- logging.info(f"First forcing time = {time0}")
386
- logging.info(f"Last forcing time = {time1}")
397
+ logger.info(f"First forcing time = {time0}")
398
+ logger.info(f"Last forcing time = {time1}")
387
399
  start_time = np.datetime64(config["start_time"])
388
400
  dt = np.timedelta64(int(config["dt"]), "s")
389
401
 
@@ -391,10 +403,10 @@ class Forcing:
391
403
  # ------------------------------------------------------
392
404
 
393
405
  if time0 > start_time:
394
- logging.error("No forcing at start time")
406
+ logger.error("No forcing at start time")
395
407
  raise SystemExit(3)
396
408
  if time1 < config["stop_time"]:
397
- logging.error("No forcing at stop time")
409
+ logger.error("No forcing at stop time")
398
410
  raise SystemExit(3)
399
411
 
400
412
  # Make a list steps of the forcing time steps
@@ -427,7 +439,7 @@ class Forcing:
427
439
  interpolate_velocity_in_time = True
428
440
  interpolate_ibm_forcing_in_time = False
429
441
 
430
- logging.debug("Updating forcing, time step = {}".format(t))
442
+ logger.debug("Updating forcing, time step = {}".format(t))
431
443
  if t in self.steps: # No time interpolation
432
444
  self.U = self.Unew
433
445
  self.V = self.Vnew
@@ -486,7 +498,7 @@ class Forcing:
486
498
 
487
499
  # Handle file opening/closing
488
500
  # Always read velocity before other fields
489
- logging.info("Reading velocity for time step = {}".format(n))
501
+ logger.info("Reading velocity for time step = {}".format(n))
490
502
 
491
503
  # If finished a file or first read (self._nc == "")
492
504
  if not self._nc: # First read
@@ -799,6 +811,11 @@ def sample3D(F, X, Y, K, A, method="bilinear"):
799
811
  # else: method == 'nearest'
800
812
  I = X.round().astype("int")
801
813
  J = Y.round().astype("int")
814
+
815
+ # Constrain to valid indices
816
+ I = np.minimum(np.maximum(I, 0), F.shape[-1] - 1)
817
+ J = np.minimum(np.maximum(J, 0), F.shape[-2] - 1)
818
+
802
819
  return F[K, J, I]
803
820
 
804
821
 
@@ -18,6 +18,9 @@ from netCDF4 import Dataset, num2date
18
18
  from ladim.sample import sample2D
19
19
 
20
20
 
21
+ logger = logging.getLogger(__name__)
22
+
23
+
21
24
  class Grid:
22
25
  """Simple ROMS grid object
23
26
 
@@ -33,13 +36,24 @@ class Grid:
33
36
 
34
37
  def __init__(self, config):
35
38
 
36
- logging.info("Initializing zROMS grid object")
39
+ logger.info("Initializing zROMS grid object")
40
+
41
+ # Grid file
42
+ if "grid_file" in config["gridforce"]:
43
+ grid_file = config["gridforce"]["grid_file"]
44
+ elif "input_file" in config["gridforce"]:
45
+ files = glob.glob(config["gridforce"]["input_file"])
46
+ files.sort()
47
+ grid_file = files[0]
48
+ else:
49
+ logger.error("No grid file specified")
50
+ raise SystemExit(1)
51
+
37
52
  try:
38
- ncid = Dataset(config["gridforce"]["grid_file"])
53
+ ncid = Dataset(grid_file)
54
+ ncid.set_auto_mask(False)
39
55
  except OSError:
40
- logging.error(
41
- "Grid file {} not found".format(config["gridforce"]["grid_file"])
42
- )
56
+ logger.error("Could not open grid file " + grid_file)
43
57
  raise SystemExit(1)
44
58
 
45
59
  # Subgrid, only considers internal grid cells
@@ -188,7 +202,7 @@ class Forcing:
188
202
 
189
203
  def __init__(self, config, grid):
190
204
 
191
- logging.info("Initiating forcing")
205
+ logger.info("Initiating forcing")
192
206
 
193
207
  self._grid = grid # Get the grid object, make private?
194
208
 
@@ -199,9 +213,9 @@ class Forcing:
199
213
  files.sort()
200
214
  numfiles = len(files)
201
215
  if numfiles == 0:
202
- logging.error("No input file: {}".format(config["gridforce"]["input_file"]))
216
+ logger.error("No input file: {}".format(config["gridforce"]["input_file"]))
203
217
  raise SystemExit(3)
204
- logging.info("Number of forcing files = {}".format(numfiles))
218
+ logger.info("Number of forcing files = {}".format(numfiles))
205
219
 
206
220
  # ----------------------------------------
207
221
  # Open first file for some general info
@@ -244,20 +258,20 @@ class Forcing:
244
258
  num_frames = [] # Available time frames in each file
245
259
  # change_times = [] # Times for change of file
246
260
  for fname in files:
247
- print(fname)
261
+ logging.info(f'Load {fname}')
248
262
  with Dataset(fname) as nc:
249
263
  # new_times = nc.variables['ocean_time'][:]
250
264
  new_times = nc.variables["time"][:]
251
265
  times.extend(new_times)
252
266
  num_frames.append(len(new_times))
253
- logging.info("Number of available forcing times = {:d}".format(len(times)))
267
+ logger.info("Number of available forcing times = {:d}".format(len(times)))
254
268
 
255
269
  # Find first/last forcing times
256
270
  # -----------------------------
257
271
  time0 = num2date(times[0], time_units)
258
272
  time1 = num2date(times[-1], time_units)
259
- logging.info("time0 = {}".format(str(time0)))
260
- logging.info("time1 = {}".format(str(time1)))
273
+ logger.info("time0 = {}".format(str(time0)))
274
+ logger.info("time1 = {}".format(str(time1)))
261
275
  # print(time0)
262
276
  # print(time1)
263
277
  start_time = np.datetime64(config["start_time"])
@@ -269,10 +283,10 @@ class Forcing:
269
283
  # Use logging module for this
270
284
 
271
285
  if time0 > start_time:
272
- logging.error("No forcing at start time")
286
+ logger.error("No forcing at start time")
273
287
  raise SystemExit(3)
274
288
  if time1 < config["stop_time"]:
275
- logging.error("No forcing at stop time")
289
+ logger.error("No forcing at stop time")
276
290
  raise SystemExit(3)
277
291
 
278
292
  # Make a list steps of the forcing time steps
@@ -362,7 +376,7 @@ class Forcing:
362
376
  interpolate_velocity_in_time = True
363
377
  interpolate_ibm_forcing_in_time = False
364
378
 
365
- logging.debug("Updating forcing, time step = {}".format(t))
379
+ logger.debug("Updating forcing, time step = {}".format(t))
366
380
  if t in self.steps: # No time interpolation
367
381
  self.U = self.Unew
368
382
  self.V = self.Vnew
@@ -399,7 +413,7 @@ class Forcing:
399
413
 
400
414
  # Handle file opening/closing
401
415
  # Always read velocity before other fields
402
- logging.debug("Reading velocity for time step = {}".format(n))
416
+ logger.debug("Reading velocity for time step = {}".format(n))
403
417
  first = True
404
418
  if first: # Open file initiallt
405
419
  self._nc = Dataset(self._files[self.file_idx[n]])
@@ -12,13 +12,14 @@ Lagrangian Advection and Diffusion Model
12
12
  # ---------------------------------
13
13
 
14
14
  import logging
15
-
16
15
  import ladim
17
-
18
16
  from .config import configure
19
17
  from .model import Model
20
18
 
21
19
 
20
+ logger = logging.getLogger(__name__)
21
+
22
+
22
23
  def main(config_stream, loglevel=logging.INFO):
23
24
  """Main function for LADiM"""
24
25
 
@@ -46,7 +47,9 @@ def run():
46
47
 
47
48
  logging.basicConfig(
48
49
  level=logging.INFO,
49
- format='%(levelname)s:%(module)s - %(message)s')
50
+ format='%(asctime)s %(levelname)s:%(module)s - %(message)s',
51
+ datefmt='%Y-%m-%d %H:%M:%S',
52
+ )
50
53
 
51
54
  # ====================
52
55
  # Parse command line
@@ -67,23 +70,23 @@ def run():
67
70
 
68
71
  args = parser.parse_args()
69
72
 
70
- logging.info(" ================================================")
71
- logging.info(" === Lagrangian Advection and Diffusion Model ===")
72
- logging.info(" ================================================\n")
73
+ logger.info(" ================================================")
74
+ logger.info(" === Lagrangian Advection and Diffusion Model ===")
75
+ logger.info(" ================================================\n")
73
76
 
74
- logging.info(f"ladim path: {ladim.__file__.strip('__init.py__')}")
75
- logging.info(f"ladim version: {ladim.__version__}\n")
76
- logging.info(f"python version: {sys.version.split()[0]}\n")
77
+ logger.info(f"ladim path: {ladim.__file__.strip('__init.py__')}")
78
+ logger.info(f"ladim version: {ladim.__version__}\n")
79
+ logger.info(f"python version: {sys.version.split()[0]}\n")
77
80
 
78
- logging.info(f" Configuration file: {args.config_file}")
79
- logging.info(f" loglevel = {logging.getLevelName(args.loglevel)}")
81
+ logger.info(f" Configuration file: {args.config_file}")
82
+ logger.info(f" loglevel = {logging.getLevelName(args.loglevel)}")
80
83
 
81
84
  # =============
82
85
  # Sanity check
83
86
  # =============
84
87
 
85
88
  if not Path(args.config_file).exists():
86
- logging.critical(f'Configuration file {args.config_file} not found')
89
+ logger.critical(f'Configuration file {args.config_file} not found')
87
90
  raise SystemExit(1)
88
91
 
89
92
  # ===================
@@ -92,7 +95,7 @@ def run():
92
95
 
93
96
  # Start message
94
97
  now = datetime.datetime.now().replace(microsecond=0)
95
- logging.info(f'LADiM simulation starting, wall time={now}')
98
+ logger.info(f'LADiM simulation starting, wall time={now}')
96
99
 
97
100
  fp = open(args.config_file, encoding='utf8')
98
101
  ladim.main(config_stream=fp, loglevel=args.loglevel)
@@ -100,4 +103,4 @@ def run():
100
103
  # Reset logging and print final message
101
104
  logging.getLogger().setLevel(logging.INFO)
102
105
  now = datetime.datetime.now().replace(microsecond=0)
103
- logging.info(f'LADiM simulation finished, wall time={now}')
106
+ logger.info(f'LADiM simulation finished, wall time={now}')
@@ -1,4 +1,7 @@
1
1
  import importlib
2
+ import importlib.util
3
+ import sys
4
+ from pathlib import Path
2
5
 
3
6
  from typing import TYPE_CHECKING
4
7
  if TYPE_CHECKING:
@@ -101,7 +104,26 @@ class Model:
101
104
 
102
105
  def load_class(name):
103
106
  pkg, cls = name.rsplit(sep='.', maxsplit=1)
104
- return getattr(importlib.import_module(pkg), cls)
107
+
108
+ # Check if "pkg" is an existing file
109
+ spec = None
110
+ module_name = None
111
+ file_name = pkg + '.py'
112
+ if Path(file_name).exists():
113
+ # This can return None if there were import errors
114
+ module_name = pkg
115
+ spec = importlib.util.spec_from_file_location(module_name, file_name)
116
+
117
+ # If pkg can not be interpreted as a file, use regular import
118
+ if spec is None:
119
+ return getattr(importlib.import_module(pkg), cls)
120
+
121
+ # File import
122
+ else:
123
+ module = importlib.util.module_from_spec(spec)
124
+ sys.modules[module_name] = module
125
+ spec.loader.exec_module(module)
126
+ return getattr(module, cls)
105
127
 
106
128
 
107
129
  class Module:
@@ -122,6 +122,14 @@ class DynamicState(State):
122
122
  raise AttributeError(f'Attribute not defined: {item}')
123
123
  return self[item]
124
124
 
125
+ def __setattr__(self, item, value):
126
+ if item in list(self.__dict__.keys()) + ['_data', '_model', '_num_released', '_varnames']:
127
+ super().__setattr__(item, value)
128
+ elif item in self._data:
129
+ self._data[item] = value
130
+ else:
131
+ raise AttributeError(f"Attribute not defined: '{item}'")
132
+
125
133
  def __contains__(self, item):
126
134
  return item in self._data
127
135
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ladim
3
- Version: 2.0.1
3
+ Version: 2.0.3
4
4
  Summary: Lagrangian Advection and Diffusion Model
5
5
  Home-page: https://github.com/pnsaevik/ladim
6
6
  Author: Bjørn Ådlandsvik
@@ -34,7 +34,10 @@ postladim/cellcount.py
34
34
  postladim/kde_plot.py
35
35
  postladim/particlefile.py
36
36
  postladim/variable.py
37
+ tests/test_config.py
38
+ tests/test_forcing.py
37
39
  tests/test_ladim.py
40
+ tests/test_model.py
38
41
  tests/test_output.py
39
42
  tests/test_release.py
40
43
  tests/test_solver.py
@@ -0,0 +1,88 @@
1
+ from ladim import config
2
+ from pathlib import Path
3
+ import yaml
4
+ import numpy as np
5
+ import datetime
6
+
7
+
8
+ class Test_dict_get:
9
+ def test_returns_correct_when_existing(self):
10
+ mydict = dict(a=1, b=dict(c=2, d=3))
11
+ assert config.dict_get(mydict, 'a') == 1
12
+ assert config.dict_get(mydict, 'b') == dict(c=2, d=3)
13
+ assert config.dict_get(mydict, 'b.c') == 2
14
+ assert config.dict_get(mydict, 'b.d') == 3
15
+
16
+ def test_returns_default_when_nonexisting(self):
17
+ mydict = dict(a=1, b=dict(c=2, d=3))
18
+ assert config.dict_get(mydict, 'A', 23) == 23
19
+ assert config.dict_get(mydict, 'b.C') is None
20
+
21
+ def test_can_try_alternatives(self):
22
+ mydict = dict(a=1, b=dict(c=2, d=3))
23
+ assert config.dict_get(mydict, ['A', 'a'], 23) == 1
24
+ assert config.dict_get(mydict, ['b.C', 'b.c']) == 2
25
+ assert config.dict_get(mydict, ['b.C', 'b.D']) is None
26
+
27
+
28
+ class Test_convert_1_to_2:
29
+ def test_matches_snapshot(self):
30
+ fname_in = Path(__file__).parent / 'sample_data/ex1/ladim.yaml'
31
+ with open(fname_in, encoding='utf-8') as fp:
32
+ dict_in = yaml.safe_load(fp)
33
+
34
+ dict_out = config.convert_1_to_2(dict_in)
35
+
36
+ assert dict_out == {
37
+ 'forcing': {
38
+ 'dt': 60,
39
+ 'file': '../forcing*.nc',
40
+ 'ibm_forcing': [],
41
+ 'legacy_module': 'ladim.gridforce.ROMS.Forcing',
42
+ 'start_time': np.datetime64('2015-09-07T01:00:00'),
43
+ 'stop_time': np.datetime64('2015-09-07T01:05:00')},
44
+ 'grid': {
45
+ 'file': '../forcing*.nc',
46
+ 'legacy_module': 'ladim.gridforce.ROMS.Grid',
47
+ 'start_time': np.datetime64('2015-09-07T01:00:00')},
48
+ 'ibm': {},
49
+ 'output': {
50
+ 'file': 'out.nc',
51
+ 'frequency': [60, 's'],
52
+ 'variables': {
53
+ 'X': {
54
+ 'long_name': 'particle X-coordinate', 'ncformat': 'f4'},
55
+ 'Y': {
56
+ 'long_name': 'particle Y-coordinate', 'ncformat': 'f4'},
57
+ 'Z': {
58
+ 'long_name': 'particle depth',
59
+ 'ncformat': 'f4',
60
+ 'positive': 'down',
61
+ 'standard_name': 'depth_below_surface',
62
+ 'units': 'm'},
63
+ 'pid': {
64
+ 'long_name': 'particle identifier', 'ncformat': 'i4'},
65
+ 'release_time': {
66
+ 'kind': 'initial',
67
+ 'long_name': 'particle release time',
68
+ 'ncformat': 'i4',
69
+ 'units': 'seconds since '
70
+ '1970-01-01'}}},
71
+ 'release': {
72
+ 'colnames': ['release_time', 'X', 'Y', 'Z', 'group_id'],
73
+ 'defaults': {},
74
+ 'file': 'particles.rls',
75
+ 'formats': {'time': 'release_time'},
76
+ 'frequency': [1, 'm']},
77
+ 'solver': {
78
+ 'order': ['release', 'forcing', 'output', 'tracker', 'ibm', 'state'],
79
+ 'seed': 0,
80
+ 'start': datetime.datetime(2015, 9, 7, 1, 0),
81
+ 'step': 60,
82
+ 'stop': datetime.datetime(2015, 9, 7, 1, 5)},
83
+ 'tracker': {'diffusion': 0.1, 'method': 'RK4'},
84
+ 'version': 2
85
+ }
86
+
87
+ def test_accepts_empty_dict(self):
88
+ config.convert_1_to_2(dict())
@@ -0,0 +1,10 @@
1
+ from ladim.gridforce import ROMS
2
+ import numpy as np
3
+
4
+
5
+ class Test_z2s:
6
+ def test_returns_interpolated_s_level(self):
7
+ zrho = np.array([-5, -4, -3, -2]).reshape((4, 1, 1))
8
+ k, a = ROMS.z2s(zrho, np.zeros(5), np.zeros(5), np.array([6, 5, 3.5, 2, 0]))
9
+ assert k.tolist() == [1, 1, 2, 3, 3]
10
+ assert a.tolist() == [1.0, 1.0, 0.5, 0.0, 0.0]
@@ -0,0 +1,33 @@
1
+
2
+ from ladim import model
3
+ import pytest
4
+ import os
5
+
6
+
7
+ @pytest.fixture()
8
+ def tmp_path_with_chdir(tmp_path):
9
+ original_dir = os.getcwd()
10
+ os.chdir(tmp_path)
11
+ yield tmp_path
12
+ os.chdir(original_dir)
13
+
14
+
15
+ class Test_load_class:
16
+ def test_can_load_fully_qualified_class(self):
17
+ numpy_dtype_class = model.load_class('numpy.dtype')
18
+ numpy_dtype_i4 = numpy_dtype_class('i4')
19
+ assert numpy_dtype_i4.itemsize == 4
20
+
21
+ def test_can_load_class_in_current_directory(self, tmp_path_with_chdir):
22
+ # Set up a python file in the temp dir which contains a class
23
+ module_path = tmp_path_with_chdir / 'my_module.py'
24
+ module_path.write_text(
25
+ 'class MyClass:\n'
26
+ ' def timestwo(self, a):\n'
27
+ ' return a * 2\n'
28
+ )
29
+
30
+ # Run and test code
31
+ MyClass = model.load_class('my_module.MyClass')
32
+ obj = MyClass()
33
+ assert obj.timestwo(4) == 8
@@ -1,125 +0,0 @@
1
- """
2
- Functions for parsing configuration parameters.
3
-
4
- The module contains functions for parsing input configuration
5
- parameters, appending default values and converting between
6
- different versions of config file formats.
7
- """
8
- import numpy as np
9
-
10
-
11
- def configure(module_conf):
12
- import yaml
13
-
14
- # Handle variations of input config type
15
- if isinstance(module_conf, dict):
16
- config_dict = module_conf
17
- else:
18
- config_dict = yaml.safe_load(module_conf)
19
-
20
- if 'version' not in config_dict:
21
- if 'particle_release' in config_dict:
22
- config_dict['version'] = 1
23
- else:
24
- config_dict['version'] = 2
25
-
26
- return _versioned_configure(config_dict)
27
-
28
-
29
- def _versioned_configure(config_dict):
30
- if config_dict['version'] == 1:
31
- config_dict = _convert_1_to_2(config_dict)
32
-
33
- return config_dict
34
-
35
-
36
- def _convert_1_to_2(c):
37
- # Read timedelta
38
- dt_value, dt_unit = c['numerics']['dt']
39
- dt_sec = np.timedelta64(dt_value, dt_unit).astype('timedelta64[s]').astype('int64')
40
-
41
- # Read output variables
42
- outvars = dict()
43
- outvar_names = c['output_variables'].get('particle', []) + c['output_variables'].get('instance', [])
44
- for v in outvar_names:
45
- outvars[v] = c['output_variables'][v].copy()
46
- for v in c['output_variables'].get('particle', []):
47
- outvars[v]['kind'] = 'initial'
48
- if 'release_time' in outvars and 'units' in outvars['release_time']:
49
- outvars['release_time']['units'] = 'seconds since 1970-01-01'
50
-
51
- # Read release config
52
- relconf = dict(
53
- file=c['files']['particle_release_file'],
54
- frequency=c['particle_release'].get('release_frequency', [0, 's']),
55
- colnames=c['particle_release']['variables'],
56
- formats={
57
- c['particle_release'][v]: v
58
- for v in c['particle_release']['variables']
59
- if v in c['particle_release'].keys()
60
- },
61
- )
62
- if c['particle_release'].get('release_type', '') != 'continuous':
63
- del relconf['frequency']
64
- ibmvars = c.get('state', dict()).get('ibm_variables', [])
65
- ibmvars += c.get('ibm', dict()).get('variables', [])
66
- relconf['defaults'] = {
67
- k: np.float64(0)
68
- for k in ibmvars
69
- if k not in relconf['colnames']
70
- }
71
-
72
- # Read ibm config
73
- ibmconf_legacy = c.get('ibm', dict()).copy()
74
- if 'module' in ibmconf_legacy:
75
- ibmconf_legacy['ibm_module'] = ibmconf_legacy.pop('module')
76
- ibmconf = dict()
77
- if 'ibm_module' in ibmconf_legacy:
78
- ibmconf['module'] = 'ladim.ibms.LegacyIBM'
79
- ibmconf['legacy_module'] = ibmconf_legacy['ibm_module']
80
- ibmconf['conf'] = dict(
81
- dt=dt_sec,
82
- output_instance=c.get('output_variables', {}).get('instance', []),
83
- nc_attributes={k: v for k, v in outvars.items()}
84
- )
85
- ibmconf['conf']['ibm'] = {
86
- k: v
87
- for k, v in ibmconf_legacy.items()
88
- if k != 'ibm_module'
89
- }
90
-
91
- config_dict = dict(
92
- version=2,
93
- solver=dict(
94
- start=c['time_control']['start_time'],
95
- stop=c['time_control']['stop_time'],
96
- step=dt_sec,
97
- seed=c['numerics'].get('seed', None),
98
- order=['release', 'forcing', 'output', 'tracker', 'ibm', 'state'],
99
- ),
100
- grid=dict(
101
- file=c['gridforce']['input_file'],
102
- legacy_module=c['gridforce']['module'] + '.Grid',
103
- start_time=np.datetime64(c['time_control']['start_time'], 's'),
104
- ),
105
- forcing=dict(
106
- file=c['gridforce']['input_file'],
107
- legacy_module=c['gridforce']['module'] + '.Forcing',
108
- start_time=np.datetime64(c['time_control']['start_time'], 's'),
109
- stop_time=np.datetime64(c['time_control']['stop_time'], 's'),
110
- dt=dt_sec,
111
- ibm_forcing=c['gridforce'].get('ibm_forcing', []),
112
- ),
113
- release=relconf,
114
- output=dict(
115
- file=c['files']['output_file'],
116
- frequency=c['output_variables']['outper'],
117
- variables=outvars,
118
- ),
119
- tracker=dict(
120
- method=c['numerics']['advection'],
121
- diffusion=c['numerics']['diffusion'],
122
- ),
123
- ibm=ibmconf,
124
- )
125
- return config_dict
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes