ladim 2.0.1__py3-none-any.whl → 2.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ladim/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
- __version__ = '2.0.1'
1
+ __version__ = '2.0.3'
2
2
 
3
3
  from .main import main, run
ladim/config.py CHANGED
@@ -28,98 +28,118 @@ def configure(module_conf):
28
28
 
29
29
  def _versioned_configure(config_dict):
30
30
  if config_dict['version'] == 1:
31
- config_dict = _convert_1_to_2(config_dict)
31
+ config_dict = convert_1_to_2(config_dict)
32
32
 
33
33
  return config_dict
34
34
 
35
35
 
36
- def _convert_1_to_2(c):
36
+ def dict_get(d, items, default=None):
37
+ if isinstance(items, str):
38
+ items = [items]
39
+
40
+ for item in items:
41
+ try:
42
+ return dict_get_single(d, item)
43
+ except KeyError:
44
+ pass
45
+
46
+ return default
47
+
48
+
49
+ def dict_get_single(d, item):
50
+ tokens = str(item).split(sep='.')
51
+ sub_dict = d
52
+ for t in tokens:
53
+ if t in sub_dict:
54
+ sub_dict = sub_dict[t]
55
+ else:
56
+ raise KeyError
57
+
58
+ return sub_dict
59
+
60
+
61
+ def convert_1_to_2(c):
62
+
63
+ out = {}
64
+
37
65
  # Read timedelta
38
- dt_value, dt_unit = c['numerics']['dt']
39
- dt_sec = np.timedelta64(dt_value, dt_unit).astype('timedelta64[s]').astype('int64')
66
+ dt_sec = None
67
+ if 'numerics' in c:
68
+ if 'dt' in c['numerics']:
69
+ dt_value, dt_unit = c['numerics']['dt']
70
+ dt_sec = np.timedelta64(dt_value, dt_unit).astype('timedelta64[s]').astype('int64')
71
+
72
+ out['version'] = 2
73
+
74
+ out['solver'] = {}
75
+ out['solver']['start'] = dict_get(c, 'time_control.start_time')
76
+ out['solver']['stop'] = dict_get(c, 'time_control.stop_time')
77
+ out['solver']['step'] = dt_sec
78
+ out['solver']['seed'] = dict_get(c, 'numerics.seed')
79
+ out['solver']['order'] = ['release', 'forcing', 'output', 'tracker', 'ibm', 'state']
80
+
81
+ out['grid'] = {}
82
+ out['grid']['file'] = dict_get(c, [
83
+ 'files.grid_file', 'gridforce.grid_file',
84
+ 'files.input_file', 'gridforce.input_file'])
85
+ out['grid']['legacy_module'] = dict_get(c, 'gridforce.module', '') + '.Grid'
86
+ out['grid']['start_time'] = np.datetime64(dict_get(c, 'time_control.start_time', '1970'), 's')
87
+
88
+ out['forcing'] = {}
89
+ out['forcing']['file'] = dict_get(c, ['gridforce.input_file', 'files.input_file'])
90
+ out['forcing']['legacy_module'] = dict_get(c, 'gridforce.module', '') + '.Forcing'
91
+ out['forcing']['start_time'] = np.datetime64(dict_get(c, 'time_control.start_time', '1970'), 's')
92
+ out['forcing']['stop_time'] = np.datetime64(dict_get(c, 'time_control.stop_time', '1970'), 's')
93
+ out['forcing']['dt'] = dt_sec
94
+ out['forcing']['ibm_forcing'] = dict_get(c, 'gridforce.ibm_forcing', [])
40
95
 
41
- # Read output variables
42
- outvars = dict()
43
- outvar_names = c['output_variables'].get('particle', []) + c['output_variables'].get('instance', [])
96
+ out['output'] = {}
97
+ out['output']['file'] = dict_get(c, 'files.output_file')
98
+ out['output']['frequency'] = dict_get(c, 'output_variables.outper')
99
+ out['output']['variables'] = {}
100
+
101
+ # Convert output variable format spec
102
+ outvar_names = dict_get(c, 'output_variables.particle', []).copy()
103
+ outvar_names += dict_get(c, 'output_variables.instance', [])
44
104
  for v in outvar_names:
45
- outvars[v] = c['output_variables'][v].copy()
46
- for v in c['output_variables'].get('particle', []):
47
- outvars[v]['kind'] = 'initial'
48
- if 'release_time' in outvars and 'units' in outvars['release_time']:
49
- outvars['release_time']['units'] = 'seconds since 1970-01-01'
105
+ out['output']['variables'][v] = c['output_variables'][v].copy()
106
+ if v == 'release_time' and 'units' in c['output_variables'][v]:
107
+ out['output']['variables'][v]['units'] = 'seconds since 1970-01-01'
108
+ for v in dict_get(c, 'output_variables.particle', []):
109
+ out['output']['variables'][v]['kind'] = 'initial'
110
+
111
+ out['tracker'] = {}
112
+ out['tracker']['method'] = dict_get(c, 'numerics.advection')
113
+ out['tracker']['diffusion'] = dict_get(c, 'numerics.diffusion')
50
114
 
51
115
  # Read release config
52
- relconf = dict(
53
- file=c['files']['particle_release_file'],
54
- frequency=c['particle_release'].get('release_frequency', [0, 's']),
55
- colnames=c['particle_release']['variables'],
56
- formats={
57
- c['particle_release'][v]: v
58
- for v in c['particle_release']['variables']
59
- if v in c['particle_release'].keys()
60
- },
61
- )
62
- if c['particle_release'].get('release_type', '') != 'continuous':
63
- del relconf['frequency']
64
- ibmvars = c.get('state', dict()).get('ibm_variables', [])
65
- ibmvars += c.get('ibm', dict()).get('variables', [])
66
- relconf['defaults'] = {
116
+ out['release'] = {}
117
+ out['release']['file'] = dict_get(c, 'files.particle_release_file')
118
+ out['release']['colnames'] = dict_get(c, 'particle_release.variables', [])
119
+ if dict_get(c, 'particle_release.release_type', '') == 'continuous':
120
+ out['release']['frequency'] = dict_get(c, 'particle_release.release_frequency', [0, 's'])
121
+ out['release']['formats'] = {
122
+ c.get('particle_release', {})[v]: v
123
+ for v in dict_get(c, 'particle_release.variables', [])
124
+ if v in c.get('particle_release', {}).keys()
125
+ }
126
+ out['release']['defaults'] = {
67
127
  k: np.float64(0)
68
- for k in ibmvars
69
- if k not in relconf['colnames']
128
+ for k in dict_get(c, 'state.ibm_variables', []) + dict_get(c, 'ibm.variables', [])
129
+ if k not in out['release']['colnames']
70
130
  }
71
131
 
72
- # Read ibm config
73
- ibmconf_legacy = c.get('ibm', dict()).copy()
74
- if 'module' in ibmconf_legacy:
75
- ibmconf_legacy['ibm_module'] = ibmconf_legacy.pop('module')
76
- ibmconf = dict()
77
- if 'ibm_module' in ibmconf_legacy:
78
- ibmconf['module'] = 'ladim.ibms.LegacyIBM'
79
- ibmconf['legacy_module'] = ibmconf_legacy['ibm_module']
80
- ibmconf['conf'] = dict(
81
- dt=dt_sec,
82
- output_instance=c.get('output_variables', {}).get('instance', []),
83
- nc_attributes={k: v for k, v in outvars.items()}
84
- )
85
- ibmconf['conf']['ibm'] = {
132
+ out['ibm'] = {}
133
+ if 'ibm' in c:
134
+ out['ibm']['module'] = 'ladim.ibms.LegacyIBM'
135
+ out['ibm']['legacy_module'] = dict_get(c, ['ibm.ibm_module', 'ibm.module'])
136
+ out['ibm']['conf'] = {}
137
+ out['ibm']['conf']['dt'] = dt_sec
138
+ out['ibm']['conf']['output_instance'] = dict_get(c, 'output_variables.instance', [])
139
+ out['ibm']['conf']['nc_attributes'] = {
86
140
  k: v
87
- for k, v in ibmconf_legacy.items()
88
- if k != 'ibm_module'
141
+ for k, v in out['output']['variables'].items()
89
142
  }
143
+ out['ibm']['conf']['ibm'] = {k: v for k, v in c['ibm'].items() if k != 'ibm_module'}
90
144
 
91
- config_dict = dict(
92
- version=2,
93
- solver=dict(
94
- start=c['time_control']['start_time'],
95
- stop=c['time_control']['stop_time'],
96
- step=dt_sec,
97
- seed=c['numerics'].get('seed', None),
98
- order=['release', 'forcing', 'output', 'tracker', 'ibm', 'state'],
99
- ),
100
- grid=dict(
101
- file=c['gridforce']['input_file'],
102
- legacy_module=c['gridforce']['module'] + '.Grid',
103
- start_time=np.datetime64(c['time_control']['start_time'], 's'),
104
- ),
105
- forcing=dict(
106
- file=c['gridforce']['input_file'],
107
- legacy_module=c['gridforce']['module'] + '.Forcing',
108
- start_time=np.datetime64(c['time_control']['start_time'], 's'),
109
- stop_time=np.datetime64(c['time_control']['stop_time'], 's'),
110
- dt=dt_sec,
111
- ibm_forcing=c['gridforce'].get('ibm_forcing', []),
112
- ),
113
- release=relconf,
114
- output=dict(
115
- file=c['files']['output_file'],
116
- frequency=c['output_variables']['outper'],
117
- variables=outvars,
118
- ),
119
- tracker=dict(
120
- method=c['numerics']['advection'],
121
- diffusion=c['numerics']['diffusion'],
122
- ),
123
- ibm=ibmconf,
124
- )
125
- return config_dict
145
+ return out
ladim/gridforce/ROMS.py CHANGED
@@ -15,10 +15,12 @@ import glob
15
15
  import logging
16
16
  import numpy as np
17
17
  from netCDF4 import Dataset, num2date
18
-
19
18
  from ladim.sample import sample2D, bilin_inv
20
19
 
21
20
 
21
+ logger = logging.getLogger(__name__)
22
+
23
+
22
24
  class Grid:
23
25
  """Simple ROMS grid object
24
26
 
@@ -34,7 +36,7 @@ class Grid:
34
36
 
35
37
  def __init__(self, config):
36
38
 
37
- logging.info("Initializing ROMS-type grid object")
39
+ logger.info("Initializing ROMS-type grid object")
38
40
 
39
41
  # Grid file
40
42
  if "grid_file" in config["gridforce"]:
@@ -44,14 +46,14 @@ class Grid:
44
46
  files.sort()
45
47
  grid_file = files[0]
46
48
  else:
47
- logging.error("No grid file specified")
49
+ logger.error("No grid file specified")
48
50
  raise SystemExit(1)
49
51
 
50
52
  try:
51
53
  ncid = Dataset(grid_file)
52
54
  ncid.set_auto_mask(False)
53
55
  except OSError:
54
- logging.error("Could not open grid file " + grid_file)
56
+ logger.error("Could not open grid file " + grid_file)
55
57
  raise SystemExit(1)
56
58
 
57
59
  # Subgrid, only considers internal grid cells
@@ -180,6 +182,8 @@ class Grid:
180
182
  """Return the depth of grid cells"""
181
183
  I = X.round().astype(int) - self.i0
182
184
  J = Y.round().astype(int) - self.j0
185
+ I = np.minimum(np.maximum(I, 0), self.H.shape[1] - 1)
186
+ J = np.minimum(np.maximum(J, 0), self.H.shape[0] - 1)
183
187
  return self.H[J, I]
184
188
 
185
189
  def lonlat(self, X, Y, method="bilinear"):
@@ -189,6 +193,8 @@ class Grid:
189
193
  # else: containing grid cell, less accurate
190
194
  I = X.round().astype("int") - self.i0
191
195
  J = Y.round().astype("int") - self.j0
196
+ I = np.minimum(np.maximum(I, 0), self.lon.shape[1] - 1)
197
+ J = np.minimum(np.maximum(J, 0), self.lon.shape[0] - 1)
192
198
  return self.lon[J, I], self.lat[J, I]
193
199
 
194
200
  def ingrid(self, X: np.ndarray, Y: np.ndarray) -> np.ndarray:
@@ -204,6 +210,11 @@ class Grid:
204
210
  """Returns True for points on land"""
205
211
  I = X.round().astype(int) - self.i0
206
212
  J = Y.round().astype(int) - self.j0
213
+
214
+ # Constrain to valid indices
215
+ I = np.minimum(np.maximum(I, 0), self.M.shape[-1] - 1)
216
+ J = np.minimum(np.maximum(J, 0), self.M.shape[-2] - 1)
217
+
207
218
  return self.M[J, I] < 1
208
219
 
209
220
  # Error if point outside
@@ -242,7 +253,7 @@ class Forcing:
242
253
 
243
254
  def __init__(self, config, grid):
244
255
 
245
- logging.info("Initiating forcing")
256
+ logger.info("Initiating forcing")
246
257
 
247
258
  self._grid = grid # Get the grid object, make private?
248
259
  # self.config = config["gridforce"]
@@ -255,9 +266,9 @@ class Forcing:
255
266
  files = self.find_files(config["gridforce"])
256
267
  numfiles = len(files)
257
268
  if numfiles == 0:
258
- logging.error("No input file: {}".format(config["gridforce"]["input_file"]))
269
+ logger.error("No input file: {}".format(config["gridforce"]["input_file"]))
259
270
  raise SystemExit(3)
260
- logging.info("Number of forcing files = {}".format(numfiles))
271
+ logger.info("Number of forcing files = {}".format(numfiles))
261
272
 
262
273
  # ---------------------------
263
274
  # Overview of all the files
@@ -357,6 +368,7 @@ class Forcing:
357
368
  all_frames = [] # All time frames
358
369
  num_frames = {} # Number of time frames in each file
359
370
  for fname in files:
371
+ logger.info(f'Open forcing file {fname}')
360
372
  with Dataset(fname) as nc:
361
373
  new_times = nc.variables["ocean_time"][:]
362
374
  num_frames[fname] = len(new_times)
@@ -370,11 +382,11 @@ class Forcing:
370
382
  if np.any(I):
371
383
  i = I.nonzero()[0][0] + 1 # Index of first out-of-order frame
372
384
  oooframe = str(all_frames[i]).split('.')[0] # Remove microseconds
373
- logging.info(f"Time frame {i} = {oooframe} out of order")
374
- logging.critical("Forcing time frames not strictly sorted")
385
+ logger.info(f"Time frame {i} = {oooframe} out of order")
386
+ logger.critical("Forcing time frames not strictly sorted")
375
387
  raise SystemExit(4)
376
388
 
377
- logging.info(f"Number of available forcing times = {len(all_frames)}")
389
+ logger.info(f"Number of available forcing times = {len(all_frames)}")
378
390
  return all_frames, num_frames
379
391
 
380
392
  @staticmethod
@@ -382,8 +394,8 @@ class Forcing:
382
394
 
383
395
  time0 = all_frames[0]
384
396
  time1 = all_frames[-1]
385
- logging.info(f"First forcing time = {time0}")
386
- logging.info(f"Last forcing time = {time1}")
397
+ logger.info(f"First forcing time = {time0}")
398
+ logger.info(f"Last forcing time = {time1}")
387
399
  start_time = np.datetime64(config["start_time"])
388
400
  dt = np.timedelta64(int(config["dt"]), "s")
389
401
 
@@ -391,10 +403,10 @@ class Forcing:
391
403
  # ------------------------------------------------------
392
404
 
393
405
  if time0 > start_time:
394
- logging.error("No forcing at start time")
406
+ logger.error("No forcing at start time")
395
407
  raise SystemExit(3)
396
408
  if time1 < config["stop_time"]:
397
- logging.error("No forcing at stop time")
409
+ logger.error("No forcing at stop time")
398
410
  raise SystemExit(3)
399
411
 
400
412
  # Make a list steps of the forcing time steps
@@ -427,7 +439,7 @@ class Forcing:
427
439
  interpolate_velocity_in_time = True
428
440
  interpolate_ibm_forcing_in_time = False
429
441
 
430
- logging.debug("Updating forcing, time step = {}".format(t))
442
+ logger.debug("Updating forcing, time step = {}".format(t))
431
443
  if t in self.steps: # No time interpolation
432
444
  self.U = self.Unew
433
445
  self.V = self.Vnew
@@ -486,7 +498,7 @@ class Forcing:
486
498
 
487
499
  # Handle file opening/closing
488
500
  # Always read velocity before other fields
489
- logging.info("Reading velocity for time step = {}".format(n))
501
+ logger.info("Reading velocity for time step = {}".format(n))
490
502
 
491
503
  # If finished a file or first read (self._nc == "")
492
504
  if not self._nc: # First read
@@ -799,6 +811,11 @@ def sample3D(F, X, Y, K, A, method="bilinear"):
799
811
  # else: method == 'nearest'
800
812
  I = X.round().astype("int")
801
813
  J = Y.round().astype("int")
814
+
815
+ # Constrain to valid indices
816
+ I = np.minimum(np.maximum(I, 0), F.shape[-1] - 1)
817
+ J = np.minimum(np.maximum(J, 0), F.shape[-2] - 1)
818
+
802
819
  return F[K, J, I]
803
820
 
804
821
 
ladim/gridforce/zROMS.py CHANGED
@@ -18,6 +18,9 @@ from netCDF4 import Dataset, num2date
18
18
  from ladim.sample import sample2D
19
19
 
20
20
 
21
+ logger = logging.getLogger(__name__)
22
+
23
+
21
24
  class Grid:
22
25
  """Simple ROMS grid object
23
26
 
@@ -33,13 +36,24 @@ class Grid:
33
36
 
34
37
  def __init__(self, config):
35
38
 
36
- logging.info("Initializing zROMS grid object")
39
+ logger.info("Initializing zROMS grid object")
40
+
41
+ # Grid file
42
+ if "grid_file" in config["gridforce"]:
43
+ grid_file = config["gridforce"]["grid_file"]
44
+ elif "input_file" in config["gridforce"]:
45
+ files = glob.glob(config["gridforce"]["input_file"])
46
+ files.sort()
47
+ grid_file = files[0]
48
+ else:
49
+ logger.error("No grid file specified")
50
+ raise SystemExit(1)
51
+
37
52
  try:
38
- ncid = Dataset(config["gridforce"]["grid_file"])
53
+ ncid = Dataset(grid_file)
54
+ ncid.set_auto_mask(False)
39
55
  except OSError:
40
- logging.error(
41
- "Grid file {} not found".format(config["gridforce"]["grid_file"])
42
- )
56
+ logger.error("Could not open grid file " + grid_file)
43
57
  raise SystemExit(1)
44
58
 
45
59
  # Subgrid, only considers internal grid cells
@@ -188,7 +202,7 @@ class Forcing:
188
202
 
189
203
  def __init__(self, config, grid):
190
204
 
191
- logging.info("Initiating forcing")
205
+ logger.info("Initiating forcing")
192
206
 
193
207
  self._grid = grid # Get the grid object, make private?
194
208
 
@@ -199,9 +213,9 @@ class Forcing:
199
213
  files.sort()
200
214
  numfiles = len(files)
201
215
  if numfiles == 0:
202
- logging.error("No input file: {}".format(config["gridforce"]["input_file"]))
216
+ logger.error("No input file: {}".format(config["gridforce"]["input_file"]))
203
217
  raise SystemExit(3)
204
- logging.info("Number of forcing files = {}".format(numfiles))
218
+ logger.info("Number of forcing files = {}".format(numfiles))
205
219
 
206
220
  # ----------------------------------------
207
221
  # Open first file for some general info
@@ -244,20 +258,20 @@ class Forcing:
244
258
  num_frames = [] # Available time frames in each file
245
259
  # change_times = [] # Times for change of file
246
260
  for fname in files:
247
- print(fname)
261
+ logging.info(f'Load {fname}')
248
262
  with Dataset(fname) as nc:
249
263
  # new_times = nc.variables['ocean_time'][:]
250
264
  new_times = nc.variables["time"][:]
251
265
  times.extend(new_times)
252
266
  num_frames.append(len(new_times))
253
- logging.info("Number of available forcing times = {:d}".format(len(times)))
267
+ logger.info("Number of available forcing times = {:d}".format(len(times)))
254
268
 
255
269
  # Find first/last forcing times
256
270
  # -----------------------------
257
271
  time0 = num2date(times[0], time_units)
258
272
  time1 = num2date(times[-1], time_units)
259
- logging.info("time0 = {}".format(str(time0)))
260
- logging.info("time1 = {}".format(str(time1)))
273
+ logger.info("time0 = {}".format(str(time0)))
274
+ logger.info("time1 = {}".format(str(time1)))
261
275
  # print(time0)
262
276
  # print(time1)
263
277
  start_time = np.datetime64(config["start_time"])
@@ -269,10 +283,10 @@ class Forcing:
269
283
  # Use logging module for this
270
284
 
271
285
  if time0 > start_time:
272
- logging.error("No forcing at start time")
286
+ logger.error("No forcing at start time")
273
287
  raise SystemExit(3)
274
288
  if time1 < config["stop_time"]:
275
- logging.error("No forcing at stop time")
289
+ logger.error("No forcing at stop time")
276
290
  raise SystemExit(3)
277
291
 
278
292
  # Make a list steps of the forcing time steps
@@ -362,7 +376,7 @@ class Forcing:
362
376
  interpolate_velocity_in_time = True
363
377
  interpolate_ibm_forcing_in_time = False
364
378
 
365
- logging.debug("Updating forcing, time step = {}".format(t))
379
+ logger.debug("Updating forcing, time step = {}".format(t))
366
380
  if t in self.steps: # No time interpolation
367
381
  self.U = self.Unew
368
382
  self.V = self.Vnew
@@ -399,7 +413,7 @@ class Forcing:
399
413
 
400
414
  # Handle file opening/closing
401
415
  # Always read velocity before other fields
402
- logging.debug("Reading velocity for time step = {}".format(n))
416
+ logger.debug("Reading velocity for time step = {}".format(n))
403
417
  first = True
404
418
  if first: # Open file initiallt
405
419
  self._nc = Dataset(self._files[self.file_idx[n]])
ladim/main.py CHANGED
@@ -12,13 +12,14 @@ Lagrangian Advection and Diffusion Model
12
12
  # ---------------------------------
13
13
 
14
14
  import logging
15
-
16
15
  import ladim
17
-
18
16
  from .config import configure
19
17
  from .model import Model
20
18
 
21
19
 
20
+ logger = logging.getLogger(__name__)
21
+
22
+
22
23
  def main(config_stream, loglevel=logging.INFO):
23
24
  """Main function for LADiM"""
24
25
 
@@ -46,7 +47,9 @@ def run():
46
47
 
47
48
  logging.basicConfig(
48
49
  level=logging.INFO,
49
- format='%(levelname)s:%(module)s - %(message)s')
50
+ format='%(asctime)s %(levelname)s:%(module)s - %(message)s',
51
+ datefmt='%Y-%m-%d %H:%M:%S',
52
+ )
50
53
 
51
54
  # ====================
52
55
  # Parse command line
@@ -67,23 +70,23 @@ def run():
67
70
 
68
71
  args = parser.parse_args()
69
72
 
70
- logging.info(" ================================================")
71
- logging.info(" === Lagrangian Advection and Diffusion Model ===")
72
- logging.info(" ================================================\n")
73
+ logger.info(" ================================================")
74
+ logger.info(" === Lagrangian Advection and Diffusion Model ===")
75
+ logger.info(" ================================================\n")
73
76
 
74
- logging.info(f"ladim path: {ladim.__file__.strip('__init.py__')}")
75
- logging.info(f"ladim version: {ladim.__version__}\n")
76
- logging.info(f"python version: {sys.version.split()[0]}\n")
77
+ logger.info(f"ladim path: {ladim.__file__.strip('__init.py__')}")
78
+ logger.info(f"ladim version: {ladim.__version__}\n")
79
+ logger.info(f"python version: {sys.version.split()[0]}\n")
77
80
 
78
- logging.info(f" Configuration file: {args.config_file}")
79
- logging.info(f" loglevel = {logging.getLevelName(args.loglevel)}")
81
+ logger.info(f" Configuration file: {args.config_file}")
82
+ logger.info(f" loglevel = {logging.getLevelName(args.loglevel)}")
80
83
 
81
84
  # =============
82
85
  # Sanity check
83
86
  # =============
84
87
 
85
88
  if not Path(args.config_file).exists():
86
- logging.critical(f'Configuration file {args.config_file} not found')
89
+ logger.critical(f'Configuration file {args.config_file} not found')
87
90
  raise SystemExit(1)
88
91
 
89
92
  # ===================
@@ -92,7 +95,7 @@ def run():
92
95
 
93
96
  # Start message
94
97
  now = datetime.datetime.now().replace(microsecond=0)
95
- logging.info(f'LADiM simulation starting, wall time={now}')
98
+ logger.info(f'LADiM simulation starting, wall time={now}')
96
99
 
97
100
  fp = open(args.config_file, encoding='utf8')
98
101
  ladim.main(config_stream=fp, loglevel=args.loglevel)
@@ -100,4 +103,4 @@ def run():
100
103
  # Reset logging and print final message
101
104
  logging.getLogger().setLevel(logging.INFO)
102
105
  now = datetime.datetime.now().replace(microsecond=0)
103
- logging.info(f'LADiM simulation finished, wall time={now}')
106
+ logger.info(f'LADiM simulation finished, wall time={now}')
ladim/model.py CHANGED
@@ -1,4 +1,7 @@
1
1
  import importlib
2
+ import importlib.util
3
+ import sys
4
+ from pathlib import Path
2
5
 
3
6
  from typing import TYPE_CHECKING
4
7
  if TYPE_CHECKING:
@@ -101,7 +104,26 @@ class Model:
101
104
 
102
105
  def load_class(name):
103
106
  pkg, cls = name.rsplit(sep='.', maxsplit=1)
104
- return getattr(importlib.import_module(pkg), cls)
107
+
108
+ # Check if "pkg" is an existing file
109
+ spec = None
110
+ module_name = None
111
+ file_name = pkg + '.py'
112
+ if Path(file_name).exists():
113
+ # This can return None if there were import errors
114
+ module_name = pkg
115
+ spec = importlib.util.spec_from_file_location(module_name, file_name)
116
+
117
+ # If pkg can not be interpreted as a file, use regular import
118
+ if spec is None:
119
+ return getattr(importlib.import_module(pkg), cls)
120
+
121
+ # File import
122
+ else:
123
+ module = importlib.util.module_from_spec(spec)
124
+ sys.modules[module_name] = module
125
+ spec.loader.exec_module(module)
126
+ return getattr(module, cls)
105
127
 
106
128
 
107
129
  class Module:
ladim/state.py CHANGED
@@ -122,6 +122,14 @@ class DynamicState(State):
122
122
  raise AttributeError(f'Attribute not defined: {item}')
123
123
  return self[item]
124
124
 
125
+ def __setattr__(self, item, value):
126
+ if item in list(self.__dict__.keys()) + ['_data', '_model', '_num_released', '_varnames']:
127
+ super().__setattr__(item, value)
128
+ elif item in self._data:
129
+ self._data[item] = value
130
+ else:
131
+ raise AttributeError(f"Attribute not defined: '{item}'")
132
+
125
133
  def __contains__(self, item):
126
134
  return item in self._data
127
135
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ladim
3
- Version: 2.0.1
3
+ Version: 2.0.3
4
4
  Summary: Lagrangian Advection and Diffusion Model
5
5
  Home-page: https://github.com/pnsaevik/ladim
6
6
  Author: Bjørn Ådlandsvik
@@ -1,21 +1,21 @@
1
- ladim/__init__.py,sha256=gItkgENsZYTMR2SxeAgoMMvJ9uTjFAJA29jZoVWvM2k,51
1
+ ladim/__init__.py,sha256=Vn8lFql5FOG5tGQ1TDaKybIZLoYlLHVXvLcitN1QO04,51
2
2
  ladim/__main__.py,sha256=8f07EMfxQllDZSgpak5ECyYHnfQFy8LaHl2xdC-aO9c,23
3
- ladim/config.py,sha256=6b0ikBrBnq_sSElgO2YcVtJHjQU0nyZsVyPV3q9fy5I,4233
3
+ ladim/config.py,sha256=AAmQflh_u8T-0Qudij_cGpuc_WHOKouoLTBtMZw_Bh0,5075
4
4
  ladim/forcing.py,sha256=f4PpSwyilSScXeNyorTWLMgVTiat9htSLkCwAkRlJVM,3048
5
5
  ladim/grid.py,sha256=m6bQrGJ3cux7rqC8pbRXD86cOI-VQKF-XjP9m1jCIcY,2221
6
- ladim/main.py,sha256=wO91-nLd1gvF3V20XK5qRvvOIV4xoTOKiWFcbwV2oag,2812
7
- ladim/model.py,sha256=jpjq_ZSh7ULpwi3_RqDb-p5SG8WcdgCPaBkSpnNWblU,3137
6
+ ladim/main.py,sha256=6_blu3PYnDXaYdPxfZoukWsjN0o9vh7O8_-W2-aguAI,2894
7
+ ladim/model.py,sha256=iXClvieChhipCSZ-dDrmnjqwS4cuM53VpJv7oaJyQ88,3794
8
8
  ladim/output.py,sha256=Rz7iujvS7Z3LoABiJduQqyb3zPswNqhhFsywr3MLsBY,8373
9
9
  ladim/release.py,sha256=1j__9Gj0BD0CqVCM2KLZhio1Ia-hz1gbUIhTsa0J3Rg,8451
10
10
  ladim/sample.py,sha256=n8wRGd_VsW_qyQe1ZoTpmfZcdcwB929vsM8PoKG6JTs,8292
11
11
  ladim/solver.py,sha256=sZvYgOxzJ-EItI-IB2y8_z8Tf-SJAQSrmydlhDRa7ZQ,755
12
- ladim/state.py,sha256=5ICIiujsV3KOAUYagGLK7YdmhcItgJmtntZeR11nIpw,3781
12
+ ladim/state.py,sha256=4XNIIx5sGjlqkZ6bg-dGbqzp8ujFNkHHFL2D9qCQA2w,4119
13
13
  ladim/tracker.py,sha256=VVX6T5CqiU6nGSCgLlSCC8w0UYhW273OGFE7ApPjdyI,5091
14
14
  ladim/utilities.py,sha256=r7-zShqJhh0cBctDUmtfw-GBOk1eTTYR4S72b0ouiSQ,994
15
- ladim/gridforce/ROMS.py,sha256=yrMr1GiDgr7VG9V630nHFdeajPY4WHLu653ZRwAk1aw,26888
15
+ ladim/gridforce/ROMS.py,sha256=DF5CSR2iJsPWAmTOrqYavARVtqokQbtAWLSCDWb_9f0,27525
16
16
  ladim/gridforce/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
17
  ladim/gridforce/analytical.py,sha256=qI-1LJdjmnwwanzOdrsDZqwGgo73bT75CB7pMaxbHKM,1094
18
- ladim/gridforce/zROMS.py,sha256=MVA6PQuY1ukvs2E20sWY4kr5-QieeQHTrA5ruxCqbUM,22826
18
+ ladim/gridforce/zROMS.py,sha256=217lDAHY0H7vbez6iLgIHQxK6MrNaZlAtWo6MteKVxU,23225
19
19
  ladim/ibms/__init__.py,sha256=GOG75jZDmNEiLr8brxrKqIlqVj-pNR7pnPP8FUKE6hU,565
20
20
  ladim/ibms/light.py,sha256=POltHmKkX8-q3t9wXyfcseCKEq9Bq-kX1WEJYsr1lNQ,2737
21
21
  ladim/plugins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -24,9 +24,9 @@ postladim/cellcount.py,sha256=nCFu9iJmprubn4YmPB4W0VO02GfEb90Iif7D49w1Kss,2054
24
24
  postladim/kde_plot.py,sha256=GvMWzT6VxIeXKh1cnqaGzR-4jGG_WIHGMLPpRMXIpo4,1628
25
25
  postladim/particlefile.py,sha256=0aif9wYUJ-VrpQKeCef8wB5VCiBB-gWY6sxNCUYviTA,4889
26
26
  postladim/variable.py,sha256=-2aihoppYMMmpSpCqaF31XvpinTMaH3Y01-USDIkbBc,6587
27
- ladim-2.0.1.dist-info/LICENSE,sha256=BgtXyjNr6Ly9nQ7ZLXKpV3r5kWRLnh5MiN0dxp0Bvfc,1085
28
- ladim-2.0.1.dist-info/METADATA,sha256=-183UhMhVbBOfOcjonoP1P8l2q500OF8vexlV1SDAEg,1841
29
- ladim-2.0.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
30
- ladim-2.0.1.dist-info/entry_points.txt,sha256=JDlNJo87GJaOkH0-BpAzTPLCrZcuPSdSlHNQ4XmnoRg,41
31
- ladim-2.0.1.dist-info/top_level.txt,sha256=TK8Gl7d6MsrAQvqKG4b6YJCbB4UL46Se3SzsI-sJAuc,16
32
- ladim-2.0.1.dist-info/RECORD,,
27
+ ladim-2.0.3.dist-info/LICENSE,sha256=BgtXyjNr6Ly9nQ7ZLXKpV3r5kWRLnh5MiN0dxp0Bvfc,1085
28
+ ladim-2.0.3.dist-info/METADATA,sha256=QjbEu8Z5DysRolgZ7RuuvAT4CZ0S_8bWPP49ByNIQIE,1841
29
+ ladim-2.0.3.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
30
+ ladim-2.0.3.dist-info/entry_points.txt,sha256=JDlNJo87GJaOkH0-BpAzTPLCrZcuPSdSlHNQ4XmnoRg,41
31
+ ladim-2.0.3.dist-info/top_level.txt,sha256=TK8Gl7d6MsrAQvqKG4b6YJCbB4UL46Se3SzsI-sJAuc,16
32
+ ladim-2.0.3.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.43.0)
2
+ Generator: setuptools (75.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
File without changes