zoomy-core 0.1.2__py3-none-any.whl → 0.1.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of zoomy-core might be problematic. Click here for more details.
- zoomy_core/mesh/mesh.py +85 -43
- zoomy_core/misc/io.py +72 -35
- {zoomy_core-0.1.2.dist-info → zoomy_core-0.1.3.dist-info}/METADATA +2 -1
- {zoomy_core-0.1.2.dist-info → zoomy_core-0.1.3.dist-info}/RECORD +7 -11
- zoomy_core/mesh/gmsh_loader.py +0 -301
- zoomy_core/misc/gui.py +0 -61
- zoomy_core/model/models/old_smm copy.py +0 -867
- zoomy_core/model/models/swe_old.py +0 -1018
- {zoomy_core-0.1.2.dist-info → zoomy_core-0.1.3.dist-info}/WHEEL +0 -0
- {zoomy_core-0.1.2.dist-info → zoomy_core-0.1.3.dist-info}/licenses/LICENSE +0 -0
- {zoomy_core-0.1.2.dist-info → zoomy_core-0.1.3.dist-info}/top_level.txt +0 -0
zoomy_core/mesh/mesh.py
CHANGED
|
@@ -2,6 +2,7 @@ import os
|
|
|
2
2
|
|
|
3
3
|
try:
|
|
4
4
|
import h5py
|
|
5
|
+
|
|
5
6
|
_HAVE_H5PY = True
|
|
6
7
|
except ImportError:
|
|
7
8
|
_HAVE_H5PY = False
|
|
@@ -9,15 +10,22 @@ except ImportError:
|
|
|
9
10
|
|
|
10
11
|
try:
|
|
11
12
|
from petsc4py import PETSc
|
|
13
|
+
|
|
12
14
|
_HAVE_PETSC = True
|
|
13
15
|
except ImportError:
|
|
14
16
|
_HAVE_PETSC = False
|
|
15
|
-
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
try:
|
|
20
|
+
import meshio
|
|
21
|
+
|
|
22
|
+
_HAVE_MESHIO = True
|
|
23
|
+
except:
|
|
24
|
+
_HAVE_MESHIO = False
|
|
16
25
|
from copy import deepcopy
|
|
17
26
|
from itertools import product
|
|
18
27
|
from typing import Union
|
|
19
28
|
|
|
20
|
-
import meshio
|
|
21
29
|
import numpy as np
|
|
22
30
|
from attr import define
|
|
23
31
|
|
|
@@ -62,7 +70,6 @@ def scale_lsq_derivative(mon_indices):
|
|
|
62
70
|
return scale_factors
|
|
63
71
|
|
|
64
72
|
|
|
65
|
-
|
|
66
73
|
def find_derivative_indices(full_monomials_arr, requested_derivs_arr):
|
|
67
74
|
"""
|
|
68
75
|
Args:
|
|
@@ -91,7 +98,6 @@ def find_derivative_indices(full_monomials_arr, requested_derivs_arr):
|
|
|
91
98
|
return indices
|
|
92
99
|
|
|
93
100
|
|
|
94
|
-
|
|
95
101
|
def compute_derivatives(u, mesh, derivatives_multi_index=None):
|
|
96
102
|
A_glob = mesh.lsq_gradQ # shape (n_cells, n_monomials, n_neighbors)
|
|
97
103
|
neighbors = mesh.lsq_neighbors # list of neighbors per cell
|
|
@@ -108,7 +114,8 @@ def compute_derivatives(u, mesh, derivatives_multi_index=None):
|
|
|
108
114
|
delta_u = u_neighbors - u_i
|
|
109
115
|
return (scale_factors * (A_loc.T @ delta_u)).T # shape (n_monomials,)
|
|
110
116
|
|
|
111
|
-
out = np.zeros((A_glob.shape[0], len(
|
|
117
|
+
out = np.zeros((A_glob.shape[0], len(
|
|
118
|
+
derivatives_multi_index)), dtype=float)
|
|
112
119
|
for i in range(A_glob.shape[0]):
|
|
113
120
|
A_loc = A_glob[i]
|
|
114
121
|
neighbor_idx = neighbors[i]
|
|
@@ -193,8 +200,10 @@ def least_squares_reconstruction_local(
|
|
|
193
200
|
|
|
194
201
|
# Expand further if still under-resolved
|
|
195
202
|
while n_nbr < max_neighbors:
|
|
196
|
-
extended_neighbors = expand_neighbors(
|
|
197
|
-
|
|
203
|
+
extended_neighbors = expand_neighbors(
|
|
204
|
+
neighbors_list, current_neighbors)
|
|
205
|
+
extended_neighbors = list(
|
|
206
|
+
set(extended_neighbors) - {i_c}) # Remove self
|
|
198
207
|
# Keep only new neighbors not already present
|
|
199
208
|
new_neighbors = [
|
|
200
209
|
n for n in extended_neighbors if n not in current_neighbors
|
|
@@ -220,7 +229,8 @@ def least_squares_reconstruction_local(
|
|
|
220
229
|
for j, neighbor in enumerate(trimmed_neighbors):
|
|
221
230
|
dX[j, :] = cell_centers[neighbor] - cell_centers[i_c]
|
|
222
231
|
|
|
223
|
-
|
|
232
|
+
# shape (max_neighbors, n_monomials)
|
|
233
|
+
V = build_vandermonde(dX, mon_indices)
|
|
224
234
|
|
|
225
235
|
# Compute weights
|
|
226
236
|
weights = compute_gaussian_weights(dX) # shape (n_neighbors,)
|
|
@@ -238,6 +248,10 @@ def least_squares_reconstruction_local(
|
|
|
238
248
|
|
|
239
249
|
|
|
240
250
|
def get_physical_boundary_labels(filepath):
|
|
251
|
+
if not _HAVE_MESHIO:
|
|
252
|
+
raise RuntimeError(
|
|
253
|
+
"_write_to_vtk_from_vertices_edges requires meshio, which is not available."
|
|
254
|
+
)
|
|
241
255
|
mesh = meshio.read(filepath)
|
|
242
256
|
boundary_dict = {key: value[0] for key, value in mesh.field_data.items()}
|
|
243
257
|
return boundary_dict
|
|
@@ -327,7 +341,8 @@ def _boundary_dict_indices(d):
|
|
|
327
341
|
def _get_neighberhood(dm, cell, cStart=0):
|
|
328
342
|
neighbors = (
|
|
329
343
|
np.array(
|
|
330
|
-
[dm.getSupport(f)[dm.getSupport(f) != cell][0]
|
|
344
|
+
[dm.getSupport(f)[dm.getSupport(f) != cell][0]
|
|
345
|
+
for f in dm.getCone(cell)],
|
|
331
346
|
dtype=int,
|
|
332
347
|
)
|
|
333
348
|
- cStart
|
|
@@ -338,7 +353,8 @@ def _get_neighberhood(dm, cell, cStart=0):
|
|
|
338
353
|
def _fill_neighborhood(dm, neighbors, max_neighbors, cStart=0):
|
|
339
354
|
new_potential_neighbors = []
|
|
340
355
|
for cell in neighbors:
|
|
341
|
-
new_potential_neighbors += list(_get_neighberhood(dm,
|
|
356
|
+
new_potential_neighbors += list(_get_neighberhood(dm,
|
|
357
|
+
cell, cStart=cStart))
|
|
342
358
|
new_potential_neighbors = np.array(new_potential_neighbors)
|
|
343
359
|
new_neighbors = np.setdiff1d(new_potential_neighbors, neighbors)
|
|
344
360
|
neighbors = np.concatenate((neighbors, new_neighbors))[:max_neighbors]
|
|
@@ -435,7 +451,8 @@ class Mesh:
|
|
|
435
451
|
for d in range(self.dimension)
|
|
436
452
|
]
|
|
437
453
|
# reverse the order of lexsort such that the most important is first IS NOT NEEDED, since lexsort starts sorting by the last entry in the list
|
|
438
|
-
sort_order_significance = np.lexsort(
|
|
454
|
+
sort_order_significance = np.lexsort(
|
|
455
|
+
[significance_per_dimension])
|
|
439
456
|
|
|
440
457
|
from_cells_sort_order = np.lexsort(
|
|
441
458
|
[from_coords[d, :] for d in sort_order_significance]
|
|
@@ -485,7 +502,8 @@ class Mesh:
|
|
|
485
502
|
cell_centers[:n_inner_cells, 0] = np.arange(xL + dx / 2, xR, dx)
|
|
486
503
|
cell_centers[n_inner_cells, 0] = xL - dx / 2
|
|
487
504
|
cell_centers[n_inner_cells + 1, 0] = xR + dx / 2
|
|
488
|
-
cell_neighbors = (n_cells + 1) *
|
|
505
|
+
cell_neighbors = (n_cells + 1) * \
|
|
506
|
+
np.ones((n_cells, n_faces_per_cell), dtype=int)
|
|
489
507
|
|
|
490
508
|
cell_faces = np.empty((n_inner_cells, n_faces_per_cell), dtype=int)
|
|
491
509
|
cell_faces[:, 0] = list(range(0, n_faces - 1))
|
|
@@ -511,8 +529,10 @@ class Mesh:
|
|
|
511
529
|
face_normals[i_face, 0] = 1.0
|
|
512
530
|
|
|
513
531
|
boundary_face_cells = np.array([0, n_inner_cells - 1], dtype=int)
|
|
514
|
-
boundary_face_ghosts = np.array(
|
|
515
|
-
|
|
532
|
+
boundary_face_ghosts = np.array(
|
|
533
|
+
[n_inner_cells, n_inner_cells + 1], dtype=int)
|
|
534
|
+
boundary_face_function_numbers = np.empty(
|
|
535
|
+
(n_boundary_faces), dtype=int)
|
|
516
536
|
boundary_face_function_numbers[0] = 0
|
|
517
537
|
boundary_face_function_numbers[1] = 1
|
|
518
538
|
boundary_face_physical_tags = np.array([0, 1], dtype=int)
|
|
@@ -520,8 +540,8 @@ class Mesh:
|
|
|
520
540
|
|
|
521
541
|
face_cells = np.empty((n_faces, 2), dtype=int)
|
|
522
542
|
# face_cell_face_index = (n_faces + 1)*np.ones((n_faces, 2), dtype=int)
|
|
523
|
-
face_cells[1
|
|
524
|
-
face_cells[1
|
|
543
|
+
face_cells[1: n_faces - 1, 0] = list(range(0, n_inner_cells - 1))
|
|
544
|
+
face_cells[1: n_faces - 1, 1] = list(range(1, n_inner_cells))
|
|
525
545
|
# face_cell_face_index[1:n_faces-1, 0] = 1
|
|
526
546
|
# face_cell_face_index[1:n_faces-1, 1] = 0
|
|
527
547
|
face_cells[0, 0] = n_inner_cells
|
|
@@ -558,12 +578,12 @@ class Mesh:
|
|
|
558
578
|
lsq_scale_factors = scale_lsq_derivative(lsq_monomial_multi_index)
|
|
559
579
|
|
|
560
580
|
n_face_neighbors = 2
|
|
561
|
-
face_neighbors = (n_cells + 1) *
|
|
581
|
+
face_neighbors = (n_cells + 1) * \
|
|
582
|
+
np.ones((n_faces, n_face_neighbors), dtype=int)
|
|
562
583
|
|
|
563
584
|
for i_f, neighbors in enumerate(face_cells):
|
|
564
585
|
face_neighbors[i_f] = neighbors
|
|
565
586
|
|
|
566
|
-
|
|
567
587
|
z_ordering = np.array([-1], dtype=float)
|
|
568
588
|
|
|
569
589
|
# return cls(dimension, 'line', n_cells, n_cells + 1, 2, n_faces_per_element, vertex_coordinates, element_vertices, element_face_areas, element_centers, element_volume, element_inradius, element_face_normals, element_n_neighbors, element_neighbors, element_neighbors_face_index, boundary_face_vertices, boundary_face_corresponding_element, boundary_face_element_face_index, boundary_face_tag, boundary_tag_names)
|
|
@@ -679,14 +699,16 @@ class Mesh:
|
|
|
679
699
|
n_inner_cells = cEnd - cStart
|
|
680
700
|
n_faces = egEnd - egStart
|
|
681
701
|
n_vertices = vEnd - vStart
|
|
682
|
-
cell_vertices = np.zeros(
|
|
702
|
+
cell_vertices = np.zeros(
|
|
703
|
+
(n_inner_cells, n_vertices_per_cell), dtype=int)
|
|
683
704
|
cell_faces = np.zeros((n_inner_cells, n_faces_per_cell), dtype=int)
|
|
684
705
|
cell_centers = np.zeros((n_cells, 3), dtype=float)
|
|
685
706
|
# I create cell_volumes of size n_cells because then I can avoid an if clause in the numerical flux computation. The values will be delted after using apply_boundary_conditions anyways
|
|
686
707
|
cell_volumes = np.ones((n_cells), dtype=float)
|
|
687
708
|
cell_inradius = compute_cell_inradius(dm)
|
|
688
709
|
for i_c, c in enumerate(range(cStart, cEnd)):
|
|
689
|
-
cell_volume, cell_center, cell_normal = dm.computeCellGeometryFVM(
|
|
710
|
+
cell_volume, cell_center, cell_normal = dm.computeCellGeometryFVM(
|
|
711
|
+
c)
|
|
690
712
|
transitive_closure_points, transitive_closure_orientation = (
|
|
691
713
|
dm.getTransitiveClosure(c, useCone=True)
|
|
692
714
|
)
|
|
@@ -731,7 +753,8 @@ class Mesh:
|
|
|
731
753
|
|
|
732
754
|
for e in range(egStart, egEnd):
|
|
733
755
|
label = gdm.getLabelValue("Face Sets", e)
|
|
734
|
-
face_volume, face_center, face_normal = gdm.computeCellGeometryFVM(
|
|
756
|
+
face_volume, face_center, face_normal = gdm.computeCellGeometryFVM(
|
|
757
|
+
e)
|
|
735
758
|
face_vertices = get_face_vertices(dim, gdm, vgStart, e)
|
|
736
759
|
face_vertices_coords = vertex_coordinates[face_vertices]
|
|
737
760
|
_face_cells = gdm.getSupport(e)
|
|
@@ -801,7 +824,8 @@ class Mesh:
|
|
|
801
824
|
# NON_VECTORIZED CASE
|
|
802
825
|
polynomial_degree = 1
|
|
803
826
|
n_neighbors = n_faces_per_cell * polynomial_degree
|
|
804
|
-
cell_neighbors = (n_cells + 1) *
|
|
827
|
+
cell_neighbors = (n_cells + 1) * \
|
|
828
|
+
np.ones((n_cells, n_neighbors), dtype=int)
|
|
805
829
|
|
|
806
830
|
for i_c, c in enumerate(range(cgStart, cgEnd)):
|
|
807
831
|
# GET NEIGHBORHOOD
|
|
@@ -826,7 +850,8 @@ class Mesh:
|
|
|
826
850
|
lsq_scale_factors = scale_lsq_derivative(lsq_monomial_multi_index)
|
|
827
851
|
|
|
828
852
|
n_face_neighbors = (2 * (n_faces_per_cell + 1) - 2) * polynomial_degree
|
|
829
|
-
face_neighbors = (n_cells + 1) *
|
|
853
|
+
face_neighbors = (n_cells + 1) * \
|
|
854
|
+
np.ones((n_faces, n_face_neighbors), dtype=int)
|
|
830
855
|
|
|
831
856
|
for i_f, f in enumerate(range(egStart, egEnd)):
|
|
832
857
|
# GET NEIGHBORHOOD
|
|
@@ -835,7 +860,6 @@ class Mesh:
|
|
|
835
860
|
)
|
|
836
861
|
face_neighbors[i_f, :] = neighbors
|
|
837
862
|
|
|
838
|
-
|
|
839
863
|
face_volumes = np.array(face_volumes, dtype=float)
|
|
840
864
|
_face_centers = np.array(face_centers, dtype=float)
|
|
841
865
|
face_centers = np.zeros((n_faces, 3), dtype=float)
|
|
@@ -845,18 +869,21 @@ class Mesh:
|
|
|
845
869
|
|
|
846
870
|
face_cells = np.array(face_cells, dtype=int)
|
|
847
871
|
# face_cell_face_index = np.array(face_cell_face_index, dtype=int)
|
|
848
|
-
boundary_face_function_numbers = _boundary_dict_indices(
|
|
872
|
+
boundary_face_function_numbers = _boundary_dict_indices(
|
|
873
|
+
boundary_face_cells)
|
|
849
874
|
|
|
850
875
|
# get rid of empty keys in the boundary_dict (e.g. no surface values in 2d)
|
|
851
876
|
boundary_dict_inverted = {v: k for k, v in boundary_dict.items()}
|
|
852
|
-
boundary_dict_reduced = {
|
|
877
|
+
boundary_dict_reduced = {
|
|
878
|
+
k: boundary_dict_inverted[k] for k in allowed_keys}
|
|
853
879
|
|
|
854
880
|
# sort the dict by the values
|
|
855
881
|
sorted_keys = np.array(list(boundary_dict_reduced.keys()), dtype=int)
|
|
856
882
|
sorted_keys.sort()
|
|
857
883
|
boundary_dict = {k: boundary_dict_reduced[k] for k in sorted_keys}
|
|
858
884
|
boundary_face_cells = {k: boundary_face_cells[k] for k in sorted_keys}
|
|
859
|
-
boundary_face_ghosts = {
|
|
885
|
+
boundary_face_ghosts = {
|
|
886
|
+
k: boundary_face_ghosts[k] for k in sorted_keys}
|
|
860
887
|
boundary_face_face_indices = {
|
|
861
888
|
k: boundary_face_face_indices[k] for k in sorted_keys
|
|
862
889
|
}
|
|
@@ -937,7 +964,8 @@ class Mesh:
|
|
|
937
964
|
n_boundary_faces = msh.n_boundary_faces * n_layers + 2 * msh.n_cells
|
|
938
965
|
n_faces_per_cell = mesh_util._get_faces_per_element(mesh_type)
|
|
939
966
|
n_faces = n_inner_cells * n_faces_per_cell
|
|
940
|
-
vertex_coordinates = extrude.extrude_points(
|
|
967
|
+
vertex_coordinates = extrude.extrude_points(
|
|
968
|
+
msh.vertex_coordinates.T, Z).T
|
|
941
969
|
cell_vertices = extrude.extrude_element_vertices(
|
|
942
970
|
msh.cell_vertices.T, msh.n_vertices, n_layers
|
|
943
971
|
).T
|
|
@@ -946,16 +974,16 @@ class Mesh:
|
|
|
946
974
|
cell_volumes = np.empty((n_cells), dtype=float)
|
|
947
975
|
cell_inradius = np.empty((n_cells), dtype=float)
|
|
948
976
|
cell_face_areas = np.empty((n_cells, n_faces_per_cell), dtype=float)
|
|
949
|
-
cell_face_normals = np.zeros(
|
|
977
|
+
cell_face_normals = np.zeros(
|
|
978
|
+
(n_cells, n_faces_per_cell, 3), dtype=float)
|
|
950
979
|
cell_n_neighbors = np.empty((n_cells), dtype=int)
|
|
951
980
|
cell_neighbors = np.empty((n_cells, n_faces_per_cell), dtype=int)
|
|
952
|
-
cell_neighbors_face_index = np.empty(
|
|
981
|
+
cell_neighbors_face_index = np.empty(
|
|
982
|
+
(n_cells, n_faces_per_cell), dtype=int)
|
|
953
983
|
for i_elem, elem in enumerate(cell_vertices.T):
|
|
954
|
-
|
|
955
984
|
cell_centers[i_elem, :dimension] = mesh_util.center(
|
|
956
985
|
vertex_coordinates.T, elem
|
|
957
986
|
)
|
|
958
|
-
|
|
959
987
|
|
|
960
988
|
# truncate normals and positions from 3d to dimendion-d
|
|
961
989
|
vertex_coordinates = vertex_coordinates.T[:, :dimension].T
|
|
@@ -966,8 +994,10 @@ class Mesh:
|
|
|
966
994
|
# empty fields
|
|
967
995
|
cell_faces = np.empty((n_inner_cells, n_faces_per_cell), dtype=int)
|
|
968
996
|
boundary_face_cells = np.array([0, n_inner_cells - 1], dtype=int)
|
|
969
|
-
boundary_face_ghosts = np.array(
|
|
970
|
-
|
|
997
|
+
boundary_face_ghosts = np.array(
|
|
998
|
+
[n_inner_cells, n_inner_cells + 1], dtype=int)
|
|
999
|
+
boundary_face_function_numbers = np.empty(
|
|
1000
|
+
(n_boundary_faces), dtype=int)
|
|
971
1001
|
boundary_face_physical_tags = np.array([0, 1], dtype=int)
|
|
972
1002
|
boundary_face_face_indices = np.array([0, n_faces - 1], dtype=int)
|
|
973
1003
|
face_cells = np.empty((n_faces, 2), dtype=int)
|
|
@@ -980,7 +1010,8 @@ class Mesh:
|
|
|
980
1010
|
|
|
981
1011
|
# hard coded guess
|
|
982
1012
|
n_face_neighbors = 0
|
|
983
|
-
face_neighbors = (n_cells + 1) *
|
|
1013
|
+
face_neighbors = (n_cells + 1) * \
|
|
1014
|
+
np.ones((n_faces, n_face_neighbors), dtype=int)
|
|
984
1015
|
lsq_gradQ = np.zeros((n_cells, dimension, 0), dtype=float)
|
|
985
1016
|
lsq_neighbors = np.zeros(1)
|
|
986
1017
|
lsq_monomial_multi_index = np.zeros(1)
|
|
@@ -1024,7 +1055,7 @@ class Mesh:
|
|
|
1024
1055
|
)
|
|
1025
1056
|
|
|
1026
1057
|
def write_to_hdf5(self, filepath: str):
|
|
1027
|
-
if
|
|
1058
|
+
if not _HAVE_H5PY:
|
|
1028
1059
|
raise RuntimeError(
|
|
1029
1060
|
"Mesh.write_to_hdf5() requires h5py, which is not available."
|
|
1030
1061
|
)
|
|
@@ -1039,15 +1070,18 @@ class Mesh:
|
|
|
1039
1070
|
mesh.create_dataset("n_vertices", data=self.n_vertices)
|
|
1040
1071
|
mesh.create_dataset("n_boundary_faces", data=self.n_boundary_faces)
|
|
1041
1072
|
mesh.create_dataset("n_faces_per_cell", data=self.n_faces_per_cell)
|
|
1042
|
-
mesh.create_dataset("vertex_coordinates",
|
|
1073
|
+
mesh.create_dataset("vertex_coordinates",
|
|
1074
|
+
data=self.vertex_coordinates)
|
|
1043
1075
|
mesh.create_dataset("cell_vertices", data=self.cell_vertices)
|
|
1044
1076
|
mesh.create_dataset("cell_faces", data=self.cell_faces)
|
|
1045
1077
|
mesh.create_dataset("cell_volumes", data=self.cell_volumes)
|
|
1046
1078
|
mesh.create_dataset("cell_centers", data=self.cell_centers)
|
|
1047
1079
|
mesh.create_dataset("cell_inradius", data=self.cell_inradius)
|
|
1048
1080
|
mesh.create_dataset("cell_neighbors", data=self.cell_neighbors)
|
|
1049
|
-
mesh.create_dataset("boundary_face_cells",
|
|
1050
|
-
|
|
1081
|
+
mesh.create_dataset("boundary_face_cells",
|
|
1082
|
+
data=self.boundary_face_cells)
|
|
1083
|
+
mesh.create_dataset("boundary_face_ghosts",
|
|
1084
|
+
data=self.boundary_face_ghosts)
|
|
1051
1085
|
mesh.create_dataset(
|
|
1052
1086
|
"boundary_face_function_numbers",
|
|
1053
1087
|
data=self.boundary_face_function_numbers,
|
|
@@ -1071,14 +1105,17 @@ class Mesh:
|
|
|
1071
1105
|
)
|
|
1072
1106
|
mesh.create_dataset(
|
|
1073
1107
|
"boundary_conditions_sorted_names",
|
|
1074
|
-
data=np.array(
|
|
1108
|
+
data=np.array(
|
|
1109
|
+
self.boundary_conditions_sorted_names, dtype="S"),
|
|
1075
1110
|
)
|
|
1076
1111
|
mesh.create_dataset("lsq_gradQ", data=np.array(self.lsq_gradQ))
|
|
1077
|
-
mesh.create_dataset(
|
|
1112
|
+
mesh.create_dataset(
|
|
1113
|
+
"lsq_neighbors", data=np.array(self.lsq_neighbors))
|
|
1078
1114
|
mesh.create_dataset(
|
|
1079
1115
|
"lsq_monomial_multi_index", data=(self.lsq_monomial_multi_index)
|
|
1080
1116
|
)
|
|
1081
|
-
mesh.create_dataset("lsq_scale_factors",
|
|
1117
|
+
mesh.create_dataset("lsq_scale_factors",
|
|
1118
|
+
data=(self.lsq_scale_factors))
|
|
1082
1119
|
mesh.create_dataset("z_ordering", data=np.array(self.z_ordering))
|
|
1083
1120
|
|
|
1084
1121
|
@classmethod
|
|
@@ -1119,7 +1156,8 @@ class Mesh:
|
|
|
1119
1156
|
file["mesh"]["face_neighbors"][()],
|
|
1120
1157
|
file["mesh"]["boundary_conditions_sorted_physical_tags"][()],
|
|
1121
1158
|
np.array(
|
|
1122
|
-
file["mesh"]["boundary_conditions_sorted_names"][()
|
|
1159
|
+
file["mesh"]["boundary_conditions_sorted_names"][()
|
|
1160
|
+
], dtype="str"
|
|
1123
1161
|
),
|
|
1124
1162
|
file["mesh"]["lsq_gradQ"][()],
|
|
1125
1163
|
file["mesh"]["lsq_neighbors"][()],
|
|
@@ -1136,6 +1174,10 @@ class Mesh:
|
|
|
1136
1174
|
field_names: Union[list[str], None] = None,
|
|
1137
1175
|
point_data: dict = {},
|
|
1138
1176
|
):
|
|
1177
|
+
if not _HAVE_MESHIO:
|
|
1178
|
+
raise RuntimeError(
|
|
1179
|
+
"_write_to_vtk_from_vertices_edges requires meshio, which is not available."
|
|
1180
|
+
)
|
|
1139
1181
|
d_fields = {}
|
|
1140
1182
|
vertex_coords_3d = np.zeros((3, self.vertex_coordinates.shape[1]))
|
|
1141
1183
|
vertex_coords_3d[: self.vertex_coordinates.shape[0], :] = (
|
zoomy_core/misc/io.py
CHANGED
|
@@ -1,11 +1,18 @@
|
|
|
1
1
|
import os
|
|
2
2
|
import numpy as np
|
|
3
|
-
import meshio
|
|
4
3
|
import json
|
|
5
4
|
import shutil
|
|
6
5
|
|
|
6
|
+
try:
|
|
7
|
+
import meshio
|
|
8
|
+
|
|
9
|
+
_HAVE_MESHIO = True
|
|
10
|
+
except:
|
|
11
|
+
_HAVE_MESHIO = False
|
|
12
|
+
|
|
7
13
|
try:
|
|
8
14
|
import h5py
|
|
15
|
+
|
|
9
16
|
_HAVE_H5PY = True
|
|
10
17
|
except ImportError:
|
|
11
18
|
_HAVE_H5PY = False
|
|
@@ -17,7 +24,6 @@ from library.zoomy_core.misc.misc import Zstruct, Settings
|
|
|
17
24
|
from library.zoomy_core.misc.logger_config import logger
|
|
18
25
|
|
|
19
26
|
|
|
20
|
-
|
|
21
27
|
def init_output_directory(path, clean):
|
|
22
28
|
main_dir = os.getenv("ZOOMY_DIR")
|
|
23
29
|
path = os.path.join(main_dir, path)
|
|
@@ -30,12 +36,14 @@ def init_output_directory(path, clean):
|
|
|
30
36
|
else:
|
|
31
37
|
os.remove(os.path.join(path, f))
|
|
32
38
|
|
|
39
|
+
|
|
33
40
|
def get_hdf5_type(value):
|
|
34
41
|
out = type(value)
|
|
35
42
|
if isinstance(value, str):
|
|
36
43
|
out = h5py.string_dtype()
|
|
37
44
|
return out
|
|
38
45
|
|
|
46
|
+
|
|
39
47
|
def write_dict_to_hdf5(group, d):
|
|
40
48
|
for key, value in d.items():
|
|
41
49
|
if isinstance(value, dict):
|
|
@@ -47,12 +55,14 @@ def write_dict_to_hdf5(group, d):
|
|
|
47
55
|
group.create_dataset(key, data=value)
|
|
48
56
|
elif isinstance(value, type(np.ndarray)):
|
|
49
57
|
group.create_dataset(key, data=value)
|
|
50
|
-
elif hasattr(value, "as_dict"):
|
|
58
|
+
elif hasattr(value, "as_dict"):
|
|
51
59
|
subgroup = group.create_group(key)
|
|
52
60
|
write_dict_to_hdf5(subgroup, value.as_dict())
|
|
53
61
|
else:
|
|
54
|
-
logger.warning(f"Skipping unsupported type for key: {
|
|
55
|
-
|
|
62
|
+
logger.warning(f"Skipping unsupported type for key: {
|
|
63
|
+
key} -> {type(value)}")
|
|
64
|
+
|
|
65
|
+
|
|
56
66
|
def load_hdf5_to_dict(group):
|
|
57
67
|
d = {}
|
|
58
68
|
for key, value in group.items():
|
|
@@ -60,11 +70,12 @@ def load_hdf5_to_dict(group):
|
|
|
60
70
|
d[key] = load_hdf5_to_dict(value)
|
|
61
71
|
elif isinstance(value, h5py.Dataset):
|
|
62
72
|
if value.dtype == h5py.string_dtype():
|
|
63
|
-
d[key] = value[()].decode(
|
|
64
|
-
else:
|
|
73
|
+
d[key] = value[()].decode("utf-8")
|
|
74
|
+
else:
|
|
65
75
|
d[key] = value[()]
|
|
66
76
|
else:
|
|
67
|
-
logger.warning(f"Skipping unsupported type for key: {
|
|
77
|
+
logger.warning(f"Skipping unsupported type for key: {
|
|
78
|
+
key} -> {type(value)}")
|
|
68
79
|
|
|
69
80
|
return d
|
|
70
81
|
|
|
@@ -74,16 +85,18 @@ def save_settings(settings):
|
|
|
74
85
|
filepath = os.path.join(main_dir, settings.output.directory)
|
|
75
86
|
with h5py.File(os.path.join(filepath, "settings.h5"), "w") as f:
|
|
76
87
|
write_dict_to_hdf5(f, settings.as_dict(recursive=True))
|
|
77
|
-
|
|
88
|
+
|
|
89
|
+
|
|
78
90
|
def load_settings(filepath):
|
|
79
91
|
main_dir = os.getenv("ZOOMY_DIR")
|
|
80
92
|
filepath = os.path.join(main_dir, filepath)
|
|
81
93
|
with h5py.File(os.path.join(filepath, "settings.h5"), "r") as f:
|
|
82
94
|
d = load_hdf5_to_dict(f)
|
|
83
|
-
|
|
84
|
-
settings = Settings.from_dict(d)
|
|
95
|
+
|
|
96
|
+
settings = Settings.from_dict(d)
|
|
85
97
|
return settings
|
|
86
|
-
|
|
98
|
+
|
|
99
|
+
|
|
87
100
|
def load_settings2(filepath):
|
|
88
101
|
main_dir = os.getenv("ZOOMY_DIR")
|
|
89
102
|
filepath = os.path.join(main_dir, filepath)
|
|
@@ -91,19 +104,21 @@ def load_settings2(filepath):
|
|
|
91
104
|
model = f["model"]
|
|
92
105
|
solver = f["solver"]
|
|
93
106
|
output = f["output"]
|
|
94
|
-
|
|
107
|
+
|
|
95
108
|
d_model = {}
|
|
96
|
-
if
|
|
109
|
+
if "parameters" in model:
|
|
97
110
|
parameters = {k: v[()] for k, v in model["parameters"].items()}
|
|
98
111
|
parameters = Zstruct(**parameters)
|
|
99
112
|
for k in model.keys():
|
|
100
|
-
if k !=
|
|
113
|
+
if k != "parameters":
|
|
101
114
|
v = model[k][()]
|
|
102
115
|
if isinstance(v, (str, int, float, bool)):
|
|
103
116
|
d_model[k] = v
|
|
104
117
|
else:
|
|
105
|
-
raise ValueError(
|
|
106
|
-
|
|
118
|
+
raise ValueError(
|
|
119
|
+
f"Unsupported type for model attribute {k}: {type(v)}"
|
|
120
|
+
)
|
|
121
|
+
d_model["parameters"] = parameters
|
|
107
122
|
model = Zstruct(**d_model)
|
|
108
123
|
d_solver = {}
|
|
109
124
|
for k in solver.keys():
|
|
@@ -111,21 +126,24 @@ def load_settings2(filepath):
|
|
|
111
126
|
if isinstance(v, (str, int, float, bool)):
|
|
112
127
|
d_solver[k] = v
|
|
113
128
|
else:
|
|
114
|
-
raise ValueError(
|
|
129
|
+
raise ValueError(
|
|
130
|
+
f"Unsupported type for solver attribute {k}: {type(v)}"
|
|
131
|
+
)
|
|
115
132
|
solver = Zstruct(**d_solver)
|
|
116
|
-
|
|
133
|
+
|
|
117
134
|
d_output = {}
|
|
118
135
|
for k in output.keys():
|
|
119
136
|
v = output[k][()]
|
|
120
137
|
if isinstance(v, (str, int, float, bool)):
|
|
121
138
|
d_output[k] = v
|
|
122
139
|
else:
|
|
123
|
-
raise ValueError(
|
|
140
|
+
raise ValueError(
|
|
141
|
+
f"Unsupported type for output attribute {k}: {type(v)}"
|
|
142
|
+
)
|
|
124
143
|
output = Zstruct(**d_output)
|
|
125
|
-
|
|
144
|
+
|
|
126
145
|
settings = Settings(model=model, solver=solver, output=output)
|
|
127
|
-
|
|
128
|
-
|
|
146
|
+
|
|
129
147
|
# parameters = {k: v[()] for k, v in f["parameters"].items()}
|
|
130
148
|
# name = f["name"][()]
|
|
131
149
|
# output_dir = f["output_dir"][()]
|
|
@@ -160,7 +178,8 @@ def _save_fields_to_hdf5(filepath, i_snapshot, time, Q, Qaux=None, overwrite=Tru
|
|
|
160
178
|
if overwrite:
|
|
161
179
|
del fields[group_name]
|
|
162
180
|
else:
|
|
163
|
-
raise ValueError(
|
|
181
|
+
raise ValueError(
|
|
182
|
+
f"Group {group_name} already exists in {filepath}")
|
|
164
183
|
attrs = fields.create_group(group_name)
|
|
165
184
|
attrs.create_dataset("time", data=time, dtype=float)
|
|
166
185
|
attrs.create_dataset("Q", data=Q)
|
|
@@ -168,6 +187,7 @@ def _save_fields_to_hdf5(filepath, i_snapshot, time, Q, Qaux=None, overwrite=Tru
|
|
|
168
187
|
attrs.create_dataset("Qaux", data=Qaux)
|
|
169
188
|
return i_snapshot + 1.0
|
|
170
189
|
|
|
190
|
+
|
|
171
191
|
def get_save_fields_simple(_filepath, write_all, overwrite=True):
|
|
172
192
|
def _save_hdf5(i_snapshot, time, Q, Qaux):
|
|
173
193
|
i_snap = int(i_snapshot)
|
|
@@ -184,15 +204,18 @@ def get_save_fields_simple(_filepath, write_all, overwrite=True):
|
|
|
184
204
|
if overwrite:
|
|
185
205
|
del fields[group_name]
|
|
186
206
|
else:
|
|
187
|
-
raise ValueError(
|
|
207
|
+
raise ValueError(
|
|
208
|
+
f"Group {group_name} already exists in {filepath}")
|
|
188
209
|
attrs = fields.create_group(group_name)
|
|
189
210
|
attrs.create_dataset("time", data=time, dtype=float)
|
|
190
211
|
attrs.create_dataset("Q", data=Q)
|
|
191
212
|
if Qaux is not None:
|
|
192
213
|
attrs.create_dataset("Qaux", data=Qaux)
|
|
193
214
|
return i_snapshot + 1.0
|
|
215
|
+
|
|
194
216
|
return _save_hdf5
|
|
195
217
|
|
|
218
|
+
|
|
196
219
|
def _save_hdf5(_filepath, i_snapshot, time, Q, Qaux, overwrite=True):
|
|
197
220
|
i_snap = int(i_snapshot)
|
|
198
221
|
main_dir = os.getenv("ZOOMY_DIR")
|
|
@@ -208,7 +231,8 @@ def _save_hdf5(_filepath, i_snapshot, time, Q, Qaux, overwrite=True):
|
|
|
208
231
|
if overwrite:
|
|
209
232
|
del fields[group_name]
|
|
210
233
|
else:
|
|
211
|
-
raise ValueError(
|
|
234
|
+
raise ValueError(
|
|
235
|
+
f"Group {group_name} already exists in {filepath}")
|
|
212
236
|
attrs = fields.create_group(group_name)
|
|
213
237
|
attrs.create_dataset("time", data=time, dtype=float)
|
|
214
238
|
attrs.create_dataset("Q", data=Q)
|
|
@@ -216,19 +240,25 @@ def _save_hdf5(_filepath, i_snapshot, time, Q, Qaux, overwrite=True):
|
|
|
216
240
|
attrs.create_dataset("Qaux", data=Qaux)
|
|
217
241
|
return i_snapshot + 1.0
|
|
218
242
|
|
|
243
|
+
|
|
219
244
|
def get_save_fields(_filepath, write_all=False, overwrite=True):
|
|
220
245
|
if _HAVE_H5PY:
|
|
221
|
-
|
|
246
|
+
|
|
247
|
+
def save(time, next_write_at, i_snapshot, Q, Qaux):
|
|
222
248
|
if write_all or time >= next_write_at:
|
|
223
|
-
return _save_hdf5(
|
|
249
|
+
return _save_hdf5(
|
|
250
|
+
_filepath, i_snapshot, time, Q, Qaux, overwrite=overwrite
|
|
251
|
+
)
|
|
224
252
|
else:
|
|
225
253
|
return i_snapshot
|
|
226
254
|
else:
|
|
227
|
-
|
|
255
|
+
|
|
256
|
+
def save(time, next_write_at, i_snapshot, Q, Qaux):
|
|
228
257
|
if write_all or time >= next_write_at:
|
|
229
258
|
return i_snapshot + 1
|
|
230
259
|
else:
|
|
231
260
|
return i_snapshot
|
|
261
|
+
|
|
232
262
|
return save
|
|
233
263
|
|
|
234
264
|
|
|
@@ -240,6 +270,7 @@ def save_fields_test(a):
|
|
|
240
270
|
_save_fields_to_hdf5(filepath, i_snapshot, time, Q, Qaux)
|
|
241
271
|
return i_snapshot + 1
|
|
242
272
|
|
|
273
|
+
|
|
243
274
|
def load_mesh_from_hdf5(filepath):
|
|
244
275
|
mesh = Mesh.from_hdf5(filepath)
|
|
245
276
|
return mesh
|
|
@@ -292,6 +323,10 @@ def _write_to_vtk_from_vertices_edges(
|
|
|
292
323
|
point_fields=None,
|
|
293
324
|
point_field_names=None,
|
|
294
325
|
):
|
|
326
|
+
if not _HAVE_MESHIO:
|
|
327
|
+
raise RuntimeError(
|
|
328
|
+
"_write_to_vtk_from_vertices_edges requires meshio, which is not available."
|
|
329
|
+
)
|
|
295
330
|
assert (
|
|
296
331
|
mesh_type == "triangle"
|
|
297
332
|
or mesh_type == "quad"
|
|
@@ -306,13 +341,15 @@ def _write_to_vtk_from_vertices_edges(
|
|
|
306
341
|
if field_names is None:
|
|
307
342
|
field_names = [str(i) for i in range(fields.shape[0])]
|
|
308
343
|
for i_fields, _ in enumerate(fields):
|
|
309
|
-
d_fields[field_names[i_fields]] = [
|
|
344
|
+
d_fields[field_names[i_fields]] = [
|
|
345
|
+
fields[i_fields, :n_inner_elements]]
|
|
310
346
|
point_d_fields = {}
|
|
311
347
|
if point_fields is not None:
|
|
312
348
|
if point_field_names is None:
|
|
313
349
|
point_field_names = [str(i) for i in range(point_fields.shape[0])]
|
|
314
350
|
for i_fields, _ in enumerate(point_fields):
|
|
315
|
-
point_d_fields[point_field_names[i_fields]
|
|
351
|
+
point_d_fields[point_field_names[i_fields]
|
|
352
|
+
] = point_fields[i_fields]
|
|
316
353
|
meshout = meshio.Mesh(
|
|
317
354
|
vertex_coordinates,
|
|
318
355
|
[(mesh_util.convert_mesh_type_to_meshio_mesh_type(mesh_type), cell_vertices)],
|
|
@@ -331,7 +368,7 @@ def generate_vtk(
|
|
|
331
368
|
aux_field_names=None,
|
|
332
369
|
skip_aux=False,
|
|
333
370
|
filename="out",
|
|
334
|
-
warp=False
|
|
371
|
+
warp=False,
|
|
335
372
|
):
|
|
336
373
|
main_dir = os.getenv("ZOOMY_DIR")
|
|
337
374
|
abs_filepath = os.path.join(main_dir, filepath)
|
|
@@ -367,7 +404,8 @@ def generate_vtk(
|
|
|
367
404
|
if field_names is None:
|
|
368
405
|
field_names = [str(i) for i in range(Q.shape[0])]
|
|
369
406
|
if aux_field_names is None:
|
|
370
|
-
aux_field_names = ["aux_{}".format(
|
|
407
|
+
aux_field_names = ["aux_{}".format(
|
|
408
|
+
str(i)) for i in range(Qaux.shape[0])]
|
|
371
409
|
|
|
372
410
|
fields = np.concatenate((Q, Qaux), axis=0)
|
|
373
411
|
field_names = field_names + aux_field_names
|
|
@@ -375,7 +413,6 @@ def generate_vtk(
|
|
|
375
413
|
vertex_coordinates_3d = np.zeros((mesh.vertex_coordinates.shape[1], 3))
|
|
376
414
|
vertex_coordinates_3d[:, : mesh.dimension] = mesh.vertex_coordinates.T
|
|
377
415
|
|
|
378
|
-
|
|
379
416
|
_write_to_vtk_from_vertices_edges(
|
|
380
417
|
os.path.join(path, output_vtk),
|
|
381
418
|
mesh.type,
|
|
@@ -397,5 +434,5 @@ def generate_vtk(
|
|
|
397
434
|
# finalize vtk
|
|
398
435
|
with open(os.path.join(path, f"{full_filepath_out}.vtk.series"), "w") as f:
|
|
399
436
|
json.dump(vtk_timestamp_file, f)
|
|
400
|
-
|
|
437
|
+
|
|
401
438
|
file.close()
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: zoomy_core
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.3
|
|
4
4
|
Summary: A simulation software for dimensionally-reduced free surface flows.
|
|
5
5
|
Author-email: Ingo Steldermann <steldermann@mbd.rwth-aachen.de>
|
|
6
6
|
License: GNU
|
|
@@ -20,6 +20,7 @@ Requires-Dist: attrs
|
|
|
20
20
|
Requires-Dist: sympy>=1.13.3
|
|
21
21
|
Requires-Dist: numpy
|
|
22
22
|
Requires-Dist: scipy
|
|
23
|
+
Requires-Dist: loguru
|
|
23
24
|
Provides-Extra: gui
|
|
24
25
|
Requires-Dist: pyvista; extra == "gui"
|
|
25
26
|
Requires-Dist: trame; extra == "gui"
|