zoomy-core 0.1.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. decorators/decorators.py +25 -0
  2. fvm/__init__.py +0 -0
  3. fvm/flux.py +52 -0
  4. fvm/nonconservative_flux.py +97 -0
  5. fvm/ode.py +55 -0
  6. fvm/solver_numpy.py +297 -0
  7. fvm/timestepping.py +13 -0
  8. mesh/__init__.py +0 -0
  9. mesh/mesh.py +1239 -0
  10. mesh/mesh_extrude.py +168 -0
  11. mesh/mesh_util.py +487 -0
  12. misc/__init__.py +0 -0
  13. misc/custom_types.py +6 -0
  14. misc/interpolation.py +140 -0
  15. misc/io.py +448 -0
  16. misc/logger_config.py +18 -0
  17. misc/misc.py +218 -0
  18. model/__init__.py +0 -0
  19. model/analysis.py +147 -0
  20. model/basefunction.py +113 -0
  21. model/basemodel.py +513 -0
  22. model/boundary_conditions.py +193 -0
  23. model/initial_conditions.py +171 -0
  24. model/model.py +65 -0
  25. model/models/GN.py +70 -0
  26. model/models/advection.py +53 -0
  27. model/models/basisfunctions.py +181 -0
  28. model/models/basismatrices.py +381 -0
  29. model/models/coupled_constrained.py +60 -0
  30. model/models/poisson.py +41 -0
  31. model/models/shallow_moments.py +757 -0
  32. model/models/shallow_moments_sediment.py +378 -0
  33. model/models/shallow_moments_topo.py +423 -0
  34. model/models/shallow_moments_variants.py +1509 -0
  35. model/models/shallow_water.py +266 -0
  36. model/models/shallow_water_topo.py +111 -0
  37. model/models/shear_shallow_flow.py +594 -0
  38. model/models/sme_turbulent.py +613 -0
  39. model/models/vam.py +455 -0
  40. postprocessing/__init__.py +0 -0
  41. postprocessing/plotting.py +244 -0
  42. postprocessing/postprocessing.py +75 -0
  43. preprocessing/__init__.py +0 -0
  44. preprocessing/openfoam_moments.py +453 -0
  45. transformation/__init__.py +0 -0
  46. transformation/helpers.py +25 -0
  47. transformation/to_amrex.py +241 -0
  48. transformation/to_c.py +185 -0
  49. transformation/to_jax.py +14 -0
  50. transformation/to_numpy.py +118 -0
  51. transformation/to_openfoam.py +258 -0
  52. transformation/to_ufl.py +67 -0
  53. zoomy_core-0.1.14.dist-info/METADATA +52 -0
  54. zoomy_core-0.1.14.dist-info/RECORD +57 -0
  55. zoomy_core-0.1.14.dist-info/WHEEL +5 -0
  56. zoomy_core-0.1.14.dist-info/licenses/LICENSE +674 -0
  57. zoomy_core-0.1.14.dist-info/top_level.txt +8 -0
misc/interpolation.py ADDED
@@ -0,0 +1,140 @@
1
+ import numpy as np
2
+ from sympy import integrate, diff
3
+ from sympy.abc import x
4
+ from sympy import lambdify
5
+
6
+ import zoomy_core.mesh.mesh_util as mesh_util
7
+ # from zoomy_core.model.models.shallow_moments import Basis
8
+
9
+
10
+ def _find_bounding_element(mesh, position):
11
+ """
12
+ Strategy: the faces of the elements are outward facing. If I compute compte the intersection of the normal with the point,
13
+ resulting in alpha * normal = minimal_distance; then the alpha needs to be negative for all faces
14
+ see https://en.wikipedia.org/wiki/Hesse_normal_form
15
+ """
16
+ mesh_type = mesh.type
17
+ for i_elem, vertices in enumerate(mesh.element_vertices):
18
+ faces = mesh_util._face_order(vertices, mesh_type)
19
+ face_centers = [
20
+ mesh_util.center(mesh.vertex_coordinates, np.array(face)) for face in faces
21
+ ]
22
+ vector_origin_to_plane = [
23
+ face_center - position for face_center in face_centers
24
+ ]
25
+ face_normals = mesh.element_face_normals[i_elem]
26
+
27
+ if _is_point_inside_bounding_faces(face_normals, vector_origin_to_plane):
28
+ return i_elem
29
+
30
+ # outside of domain
31
+ assert False
32
+
33
+
34
+ def _is_point_inside_bounding_faces(outward_face_normals, vectors_OP):
35
+ for n, p in zip(outward_face_normals, vectors_OP):
36
+ if np.dot(n, p) < 0:
37
+ return False
38
+ return True
39
+
40
+
41
+ def to_new_mesh(fields, mesh_old, mesh_new, interp="const", map_fields=None):
42
+ assert interp == "const"
43
+
44
+ fields_new = np.zeros_like(fields)
45
+
46
+ for i_elem in range(mesh_new.n_elements):
47
+ element_center = mesh_new.element_center[i_elem]
48
+ i_elem_old = _find_bounding_element(mesh_old, element_center)
49
+ fields_new[i_elem] = fields[i_elem_old]
50
+ return fields_new
51
+
52
+
53
+ # # comute gradients based on FD using scattered pointwise data
54
+ # def compute_gradient_field_2d(points, fields):
55
+ # def in_hull(points, probe):
56
+ # n_points = points.shape[0]
57
+ # n_dim = points.shape[1]
58
+ # c = np.zeros(n_points)
59
+ # A = np.r_[points.T, np.ones((1, n_points))]
60
+ # b = np.r_[probe, np.ones(1)]
61
+ # lp = linprog(c, A_eq=A, b_eq=b)
62
+ # return lp.success
63
+
64
+ # assert points.shape[1] == 2
65
+ # grad = np.zeros((fields.shape[0], 2, fields.shape[1]))
66
+ # eps_x = (points[:, 0].max() - points[:, 0].min()) / 100.0
67
+ # eps_y = (points[:, 1].max() - points[:, 1].min()) / 100.00
68
+
69
+ # # generate evaluation 'stencil' for central differences
70
+ # xi_0 = np.array(points)
71
+ # xi_xp = np.array(points)
72
+ # xi_xp[:, 0] += eps_x
73
+ # xi_xm = np.array(points)
74
+ # xi_xm[:, 0] -= eps_x
75
+ # xi_yp = np.array(points)
76
+ # xi_yp[:, 1] += eps_y
77
+ # xi_ym = np.array(points)
78
+ # xi_ym[:, 1] -= eps_y
79
+ # factors_x = 2.0 * np.ones((points.shape[0]))
80
+ # factors_y = 2.0 * np.ones((points.shape[0]))
81
+ # # correct boundary points with single sided differences
82
+ # for i in range(xi_xp.shape[0]):
83
+ # if not in_hull(points, xi_xp[i]):
84
+ # xi_xp[i, 0] -= eps_x
85
+ # factors_x[i] = 1.0
86
+ # if not in_hull(points, xi_xm[i]):
87
+ # xi_xm[i, 0] += eps_x
88
+ # factors_x[i] = 1.0
89
+ # if not in_hull(points, xi_yp[i]):
90
+ # xi_yp[i, 1] -= eps_y
91
+ # factors_y[i] = 1.0
92
+ # if not in_hull(points, xi_ym[i]):
93
+ # xi_ym[i, 1] += eps_y
94
+ # factors_y[i] = 1.0
95
+
96
+ # for i_field, values in enumerate(fields):
97
+ # f = griddata(points, values, xi_0)
98
+ # f_xp = griddata(points, values, xi_xp)
99
+ # f_xm = griddata(points, values, xi_xm)
100
+ # f_yp = griddata(points, values, xi_yp)
101
+ # f_ym = griddata(points, values, xi_ym)
102
+
103
+ # dfdx = (f_xp - f_xm) / (factors_x * eps_x + 10 ** (-10))
104
+ # dfdy = (f_yp - f_ym) / (factors_y * eps_y + 10 ** (-10))
105
+
106
+ # grad[i_field, 0, :] = dfdx
107
+ # grad[i_field, 1, :] = dfdy
108
+
109
+ # assert (np.isnan(grad) == False).all()
110
+ # assert (np.isfinite(grad) == True).all()
111
+ # return grad
112
+
113
+
114
+ # # ps, vs: values at the boundary points
115
+ # # p0, v0, value at the cell_center
116
+ # def compute_gradient(ps, vs, p0, v0, limiter=lambda r: 1.0):
117
+ # points = np.zeros((ps.shape[0] + 1, 2))
118
+ # points[:-1, :] = ps[:, :2]
119
+ # points[-1, :] = p0[:2]
120
+ # values = np.zeros((vs.shape[0] + 1, vs.shape[1]))
121
+ # values[:-1, :] = vs
122
+ # values[-1, :] = v0
123
+
124
+ # f = LinearNDInterpolator(points, values)
125
+ # eps_x = (points[:, 0].max() - points[:, 0].min()) / 100.0
126
+ # eps_y = (points[:, 1].max() - points[:, 1].min()) / 100.00
127
+ # x0 = p0[0]
128
+ # y0 = p0[1]
129
+
130
+ # dfdx = (f(x0 + eps_x, y0) - f(x0 - eps_x, y0)) / (2 * eps_x + 10 ** (-10))
131
+ # rx = (f(x0, y0) - f(x0 - eps_x, y0)) / (f(x0 + eps_x, y0) - f(x0, y0) + 10 ** (-10))
132
+ # phix = limiter(rx)
133
+ # dfdy = (f(x0, y0 + eps_y) - f(x0, y0 - eps_y)) / (2 * eps_y + 10 ** (-10))
134
+ # ry = (f(x0, y0) - f(x0, y0 - eps_y)) / (f(x0, y0 + eps_y) - f(x0, y0) + 10 ** (-10))
135
+ # phiy = limiter(ry)
136
+
137
+ # grad = np.array([phix * dfdx, phiy * dfdy]).T
138
+ # assert (np.isnan(grad) == False).all()
139
+ # assert (np.isfinite(grad) == True).all()
140
+ # return grad
misc/io.py ADDED
@@ -0,0 +1,448 @@
1
+ import os
2
+ import numpy as np
3
+ import json
4
+ import shutil
5
+
6
+ try:
7
+ import meshio
8
+
9
+ _HAVE_MESHIO = True
10
+ except:
11
+ _HAVE_MESHIO = False
12
+
13
+ try:
14
+ import h5py
15
+
16
+ _HAVE_H5PY = True
17
+ except ImportError:
18
+ _HAVE_H5PY = False
19
+
20
+ # import zoomy_core.mesh.fvm_mesh as fvm_mesh
21
+ from zoomy_core.mesh.mesh import Mesh
22
+ import zoomy_core.mesh.mesh_util as mesh_util
23
+ from zoomy_core.misc.misc import Zstruct, Settings
24
+ from zoomy_core.misc import misc as misc
25
+ from zoomy_core.misc.logger_config import logger
26
+
27
+
28
+ def init_output_directory(path, clean):
29
+ main_dir = misc.get_main_directory()
30
+
31
+ path = os.path.join(main_dir, path)
32
+ os.makedirs(path, exist_ok=True)
33
+ if clean:
34
+ filelist = [f for f in os.listdir(path)]
35
+ for f in filelist:
36
+ if os.path.isdir(os.path.join(path, f)):
37
+ shutil.rmtree(os.path.join(path, f))
38
+ else:
39
+ os.remove(os.path.join(path, f))
40
+
41
+
42
+ def get_hdf5_type(value):
43
+ out = type(value)
44
+ if isinstance(value, str):
45
+ out = h5py.string_dtype()
46
+ return out
47
+
48
+
49
+ def write_dict_to_hdf5(group, d):
50
+ for key, value in d.items():
51
+ if isinstance(value, dict):
52
+ subgroup = group.create_group(key)
53
+ write_dict_to_hdf5(subgroup, value)
54
+ elif isinstance(value, (str, int, float, bool)):
55
+ group.create_dataset(key, data=value, dtype=get_hdf5_type(value))
56
+ elif isinstance(value, (list, tuple)):
57
+ group.create_dataset(key, data=value)
58
+ elif isinstance(value, type(np.ndarray)):
59
+ group.create_dataset(key, data=value)
60
+ elif hasattr(value, "as_dict"):
61
+ subgroup = group.create_group(key)
62
+ write_dict_to_hdf5(subgroup, value.as_dict())
63
+ else:
64
+ logger.warning(f"Skipping unsupported type for key: {
65
+ key} -> {type(value)}")
66
+
67
+
68
+ def load_hdf5_to_dict(group):
69
+ d = {}
70
+ for key, value in group.items():
71
+ if isinstance(value, h5py.Group):
72
+ d[key] = load_hdf5_to_dict(value)
73
+ elif isinstance(value, h5py.Dataset):
74
+ if value.dtype == h5py.string_dtype():
75
+ d[key] = value[()].decode("utf-8")
76
+ else:
77
+ d[key] = value[()]
78
+ else:
79
+ logger.warning(f"Skipping unsupported type for key: {
80
+ key} -> {type(value)}")
81
+
82
+ return d
83
+
84
+
85
+ def save_settings(settings):
86
+ main_dir = misc.get_main_directory()
87
+
88
+ filepath = os.path.join(main_dir, settings.output.directory)
89
+ with h5py.File(os.path.join(filepath, "settings.h5"), "w") as f:
90
+ write_dict_to_hdf5(f, settings.as_dict(recursive=True))
91
+
92
+
93
+ def load_settings(filepath):
94
+ main_dir = misc.get_main_directory()
95
+
96
+ filepath = os.path.join(main_dir, filepath)
97
+ with h5py.File(os.path.join(filepath, "settings.h5"), "r") as f:
98
+ d = load_hdf5_to_dict(f)
99
+
100
+ settings = Settings.from_dict(d)
101
+ return settings
102
+
103
+
104
+ def load_settings2(filepath):
105
+ main_dir = misc.get_main_directory()
106
+
107
+ filepath = os.path.join(main_dir, filepath)
108
+ with h5py.File(os.path.join(filepath, "settings.h5"), "r") as f:
109
+ model = f["model"]
110
+ solver = f["solver"]
111
+ output = f["output"]
112
+
113
+ d_model = {}
114
+ if "parameters" in model:
115
+ parameters = {k: v[()] for k, v in model["parameters"].items()}
116
+ parameters = Zstruct(**parameters)
117
+ for k in model.keys():
118
+ if k != "parameters":
119
+ v = model[k][()]
120
+ if isinstance(v, (str, int, float, bool)):
121
+ d_model[k] = v
122
+ else:
123
+ raise ValueError(
124
+ f"Unsupported type for model attribute {k}: {type(v)}"
125
+ )
126
+ d_model["parameters"] = parameters
127
+ model = Zstruct(**d_model)
128
+ d_solver = {}
129
+ for k in solver.keys():
130
+ v = solver[k][()]
131
+ if isinstance(v, (str, int, float, bool)):
132
+ d_solver[k] = v
133
+ else:
134
+ raise ValueError(
135
+ f"Unsupported type for solver attribute {k}: {type(v)}"
136
+ )
137
+ solver = Zstruct(**d_solver)
138
+
139
+ d_output = {}
140
+ for k in output.keys():
141
+ v = output[k][()]
142
+ if isinstance(v, (str, int, float, bool)):
143
+ d_output[k] = v
144
+ else:
145
+ raise ValueError(
146
+ f"Unsupported type for output attribute {k}: {type(v)}"
147
+ )
148
+ output = Zstruct(**d_output)
149
+
150
+ settings = Settings(model=model, solver=solver, output=output)
151
+
152
+ # parameters = {k: v[()] for k, v in f["parameters"].items()}
153
+ # name = f["name"][()]
154
+ # output_dir = f["output_dir"][()]
155
+ # output_snapshots = f["output_snapshots"][()]
156
+ # output_write_all = f["output_write_all"][()]
157
+ # output_clean_dir = f["output_clean_dir"][()]
158
+ # truncate_last_time_step = f["truncate_last_time_step"][()]
159
+ callbacks = f["callbacks"][()]
160
+ return settings
161
+
162
+
163
+ def clean_files(filepath, filename=".vtk"):
164
+ main_dir = misc.get_main_directory()
165
+
166
+ abs_filepath = os.path.join(main_dir, filepath)
167
+ if os.path.exists(abs_filepath):
168
+ for file in os.listdir(abs_filepath):
169
+ if file.endswith(filename):
170
+ os.remove(os.path.join(abs_filepath, file))
171
+
172
+
173
+ def _save_fields_to_hdf5(filepath, i_snapshot, time, Q, Qaux=None, overwrite=True):
174
+ i_snap = int(i_snapshot)
175
+ main_dir = misc.get_main_directory()
176
+
177
+ filepath = os.path.join(main_dir, filepath)
178
+ with h5py.File(filepath, "a") as f:
179
+ if i_snap == 0 and not "fields" in f.keys():
180
+ fields = f.create_group("fields")
181
+ else:
182
+ fields = f["fields"]
183
+ group_name = "iteration_" + str(i_snap)
184
+ if group_name in fields:
185
+ if overwrite:
186
+ del fields[group_name]
187
+ else:
188
+ raise ValueError(
189
+ f"Group {group_name} already exists in {filepath}")
190
+ attrs = fields.create_group(group_name)
191
+ attrs.create_dataset("time", data=time, dtype=float)
192
+ attrs.create_dataset("Q", data=Q)
193
+ if Qaux is not None:
194
+ attrs.create_dataset("Qaux", data=Qaux)
195
+ return i_snapshot + 1.0
196
+
197
+
198
+ def get_save_fields_simple(_filepath, write_all, overwrite=True):
199
+ def _save_hdf5(i_snapshot, time, Q, Qaux):
200
+ i_snap = int(i_snapshot)
201
+ main_dir = misc.get_main_directory()
202
+
203
+ filepath = os.path.join(main_dir, _filepath)
204
+
205
+ with h5py.File(filepath, "a") as f:
206
+ if i_snap == 0 and not "fields" in f.keys():
207
+ fields = f.create_group("fields")
208
+ else:
209
+ fields = f["fields"]
210
+ group_name = "iteration_" + str(i_snap)
211
+ if group_name in fields:
212
+ if overwrite:
213
+ del fields[group_name]
214
+ else:
215
+ raise ValueError(
216
+ f"Group {group_name} already exists in {filepath}")
217
+ attrs = fields.create_group(group_name)
218
+ attrs.create_dataset("time", data=time, dtype=float)
219
+ attrs.create_dataset("Q", data=Q)
220
+ if Qaux is not None:
221
+ attrs.create_dataset("Qaux", data=Qaux)
222
+ return i_snapshot + 1.0
223
+
224
+ return _save_hdf5
225
+
226
+
227
+ def _save_hdf5(_filepath, i_snapshot, time, Q, Qaux, overwrite=True):
228
+ i_snap = int(i_snapshot)
229
+ main_dir = misc.get_main_directory()
230
+
231
+ filepath = os.path.join(main_dir, _filepath)
232
+
233
+ with h5py.File(filepath, "a") as f:
234
+ if i_snap == 0 and not "fields" in f.keys():
235
+ fields = f.create_group("fields")
236
+ else:
237
+ fields = f["fields"]
238
+ group_name = "iteration_" + str(i_snap)
239
+ if group_name in fields:
240
+ if overwrite:
241
+ del fields[group_name]
242
+ else:
243
+ raise ValueError(
244
+ f"Group {group_name} already exists in {filepath}")
245
+ attrs = fields.create_group(group_name)
246
+ attrs.create_dataset("time", data=time, dtype=float)
247
+ attrs.create_dataset("Q", data=Q)
248
+ if Qaux is not None:
249
+ attrs.create_dataset("Qaux", data=Qaux)
250
+ return i_snapshot + 1.0
251
+
252
+
253
+ def get_save_fields(_filepath, write_all=False, overwrite=True):
254
+ if _HAVE_H5PY:
255
+
256
+ def save(time, next_write_at, i_snapshot, Q, Qaux):
257
+ if write_all or time >= next_write_at:
258
+ return _save_hdf5(
259
+ _filepath, i_snapshot, time, Q, Qaux, overwrite=overwrite
260
+ )
261
+ else:
262
+ return i_snapshot
263
+ else:
264
+
265
+ def save(time, next_write_at, i_snapshot, Q, Qaux):
266
+ if write_all or time >= next_write_at:
267
+ return i_snapshot + 1
268
+ else:
269
+ return i_snapshot
270
+
271
+ return save
272
+
273
+
274
+ def save_fields_test(a):
275
+ filepath, time, next_write_at, i_snapshot, Q, Qaux, write_all = a
276
+ if not write_all and time < next_write_at:
277
+ return i_snapshot
278
+
279
+ _save_fields_to_hdf5(filepath, i_snapshot, time, Q, Qaux)
280
+ return i_snapshot + 1
281
+
282
+
283
+ def load_mesh_from_hdf5(filepath):
284
+ mesh = Mesh.from_hdf5(filepath)
285
+ return mesh
286
+
287
+
288
+ def load_fields_from_hdf5(filepath, i_snapshot=-1):
289
+ main_dir = misc.get_main_directory()
290
+
291
+ filepath = os.path.join(main_dir, filepath)
292
+ with h5py.File(filepath, "r") as f:
293
+ fields = f["fields"]
294
+ if i_snapshot == -1:
295
+ i_snapshot = len(fields.keys()) - 1
296
+ else:
297
+ i_snapshot = i_snapshot
298
+ group = fields[f"iteration_{i_snapshot}"]
299
+ time = group["time"][()]
300
+ Q = group["Q"][()]
301
+ Qaux = group["Qaux"][()]
302
+ return Q, Qaux, time
303
+
304
+
305
+ def load_timeline_of_fields_from_hdf5(filepath):
306
+ main_dir = misc.get_main_directory()
307
+
308
+ filepath = os.path.join(main_dir, filepath)
309
+ l_time = []
310
+ l_Q = []
311
+ l_Qaux = []
312
+ mesh = Mesh.from_hdf5(filepath)
313
+ with h5py.File(filepath, "r") as f:
314
+ fields = f["fields"]
315
+ n_snapshots = len(fields.keys())
316
+ for i in range(n_snapshots):
317
+ group = fields[f"iteration_{i}"]
318
+ time = group["time"][()]
319
+ Q = group["Q"][()]
320
+ Qaux = group["Qaux"][()]
321
+ l_time.append(time)
322
+ l_Q.append(Q)
323
+ l_Qaux.append(Qaux)
324
+ return mesh.cell_centers[0], np.array(l_Q), np.array(l_Qaux), np.array(l_time)
325
+
326
+
327
+ def _write_to_vtk_from_vertices_edges(
328
+ filepath,
329
+ mesh_type,
330
+ vertex_coordinates,
331
+ cell_vertices,
332
+ fields=None,
333
+ field_names=None,
334
+ point_fields=None,
335
+ point_field_names=None,
336
+ ):
337
+ if not _HAVE_MESHIO:
338
+ raise RuntimeError(
339
+ "_write_to_vtk_from_vertices_edges requires meshio, which is not available."
340
+ )
341
+ assert (
342
+ mesh_type == "triangle"
343
+ or mesh_type == "quad"
344
+ or mesh_type == "wface"
345
+ or mesh_type == "hexahedron"
346
+ or mesh_type == "line"
347
+ or mesh_type == "tetra"
348
+ )
349
+ d_fields = {}
350
+ n_inner_elements = cell_vertices.shape[0]
351
+ if fields is not None:
352
+ if field_names is None:
353
+ field_names = [str(i) for i in range(fields.shape[0])]
354
+ for i_fields, _ in enumerate(fields):
355
+ d_fields[field_names[i_fields]] = [
356
+ fields[i_fields, :n_inner_elements]]
357
+ point_d_fields = {}
358
+ if point_fields is not None:
359
+ if point_field_names is None:
360
+ point_field_names = [str(i) for i in range(point_fields.shape[0])]
361
+ for i_fields, _ in enumerate(point_fields):
362
+ point_d_fields[point_field_names[i_fields]
363
+ ] = point_fields[i_fields]
364
+ meshout = meshio.Mesh(
365
+ vertex_coordinates,
366
+ [(mesh_util.convert_mesh_type_to_meshio_mesh_type(mesh_type), cell_vertices)],
367
+ cell_data=d_fields,
368
+ point_data=point_d_fields,
369
+ )
370
+ path, filename = os.path.split(filepath)
371
+ filename_base, filename_ext = os.path.splitext(filename)
372
+ os.makedirs(path, exist_ok=True)
373
+ meshout.write(filepath + ".vtk")
374
+
375
+
376
+ def generate_vtk(
377
+ filepath: str,
378
+ field_names=None,
379
+ aux_field_names=None,
380
+ skip_aux=False,
381
+ filename="out",
382
+ warp=False,
383
+ ):
384
+ main_dir = misc.get_main_directory()
385
+ abs_filepath = os.path.join(main_dir, filepath)
386
+ path = os.path.dirname(abs_filepath)
387
+ full_filepath_out = os.path.join(path, filename)
388
+ # abs_filepath = os.path.join(main_dir, filepath)
389
+ # with h5py.File(os.path.join(filepath, 'mesh'), "r") as file_mesh, h5py.File(os.path.join(filepath, 'fields'), "r") as file_fields:
390
+ file = h5py.File(os.path.join(main_dir, filepath), "r")
391
+ file_fields = file["fields"]
392
+ mesh = Mesh.from_hdf5(abs_filepath)
393
+ snapshots = list(file_fields.keys())
394
+ # init timestamp file
395
+ vtk_timestamp_file = {"file-series-version": "1.0", "files": []}
396
+
397
+ def get_iteration_from_datasetname(name):
398
+ return int(name.split("_")[1])
399
+
400
+ # write out vtk files for each timestamp
401
+ for snapshot in snapshots:
402
+ time = file_fields[snapshot]["time"][()]
403
+ Q = file_fields[snapshot]["Q"][()]
404
+
405
+ if not skip_aux:
406
+ Qaux = file_fields[snapshot]["Qaux"][()]
407
+ else:
408
+ Qaux = np.empty((Q.shape[0], 0))
409
+ output_vtk = f"{filename}.{get_iteration_from_datasetname(snapshot)}"
410
+
411
+ # TODO callout to compute pointwise data?
412
+ point_fields = None
413
+ point_field_names = None
414
+
415
+ if field_names is None:
416
+ field_names = [str(i) for i in range(Q.shape[0])]
417
+ if aux_field_names is None:
418
+ aux_field_names = ["aux_{}".format(
419
+ str(i)) for i in range(Qaux.shape[0])]
420
+
421
+ fields = np.concatenate((Q, Qaux), axis=0)
422
+ field_names = field_names + aux_field_names
423
+
424
+ vertex_coordinates_3d = np.zeros((mesh.vertex_coordinates.shape[1], 3))
425
+ vertex_coordinates_3d[:, : mesh.dimension] = mesh.vertex_coordinates.T
426
+
427
+ _write_to_vtk_from_vertices_edges(
428
+ os.path.join(path, output_vtk),
429
+ mesh.type,
430
+ vertex_coordinates_3d,
431
+ mesh.cell_vertices.T,
432
+ fields=fields,
433
+ field_names=field_names,
434
+ point_fields=point_fields,
435
+ point_field_names=point_field_names,
436
+ )
437
+
438
+ vtk_timestamp_file["files"].append(
439
+ {
440
+ "name": output_vtk + ".vtk",
441
+ "time": time,
442
+ }
443
+ )
444
+ # finalize vtk
445
+ with open(os.path.join(path, f"{filename}.vtk.series"), "w") as f:
446
+ json.dump(vtk_timestamp_file, f)
447
+
448
+ file.close()
misc/logger_config.py ADDED
@@ -0,0 +1,18 @@
1
+
2
+ import os
3
+ import sys
4
+ from loguru import logger
5
+
6
+ # Remove any default handlers
7
+ logger.remove()
8
+ # Check the ZoomyLog setting
9
+ zoomy_log_mode = os.getenv("ZoomyLog", "Default")
10
+
11
+ zoomy_log_level = os.getenv("ZoomyLogLevel", "INFO")
12
+
13
+ main_dir = os.getenv("ZOOMY_DIR", os.getcwd())
14
+
15
+ if zoomy_log_mode == "Default":
16
+ logger.add(sys.stderr, level=zoomy_log_level)
17
+ else:
18
+ logger.add(os.path.join(main_dir, "logs/log.log"), rotation="1 MB", retention="10 days", compression="zip")