fiqus 2025.2.0__py3-none-any.whl → 2025.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fiqus/MainFiQuS.py +4 -9
- fiqus/data/DataConductor.py +350 -301
- fiqus/data/DataFiQuS.py +42 -115
- fiqus/data/DataFiQuSCCT.py +150 -150
- fiqus/data/DataFiQuSConductor.py +97 -84
- fiqus/data/DataFiQuSConductorAC_Strand.py +701 -565
- fiqus/data/DataModelCommon.py +439 -0
- fiqus/data/DataMultipole.py +0 -13
- fiqus/data/DataRoxieParser.py +7 -0
- fiqus/data/DataWindingsCCT.py +37 -37
- fiqus/data/RegionsModelFiQuS.py +61 -104
- fiqus/geom_generators/GeometryCCT.py +904 -905
- fiqus/geom_generators/GeometryConductorAC_Strand.py +1863 -1391
- fiqus/geom_generators/GeometryMultipole.py +5 -4
- fiqus/geom_generators/GeometryPancake3D.py +1 -1
- fiqus/getdp_runners/RunGetdpCCT.py +13 -4
- fiqus/getdp_runners/RunGetdpConductorAC_Strand.py +341 -201
- fiqus/getdp_runners/RunGetdpPancake3D.py +2 -2
- fiqus/mains/MainConductorAC_Strand.py +141 -133
- fiqus/mains/MainMultipole.py +6 -5
- fiqus/mains/MainPancake3D.py +3 -4
- fiqus/mesh_generators/MeshCCT.py +209 -209
- fiqus/mesh_generators/MeshConductorAC_Strand.py +709 -656
- fiqus/mesh_generators/MeshMultipole.py +43 -46
- fiqus/parsers/ParserDAT.py +16 -16
- fiqus/parsers/ParserGetDPOnSection.py +212 -212
- fiqus/parsers/ParserGetDPTimeTable.py +134 -134
- fiqus/parsers/ParserMSH.py +53 -53
- fiqus/parsers/ParserPOS.py +214 -214
- fiqus/parsers/ParserRES.py +142 -142
- fiqus/plotters/PlotPythonCCT.py +133 -133
- fiqus/plotters/PlotPythonConductorAC.py +1079 -855
- fiqus/plotters/PlotPythonMultipole.py +18 -18
- fiqus/post_processors/PostProcessCCT.py +444 -440
- fiqus/post_processors/PostProcessConductorAC.py +997 -49
- fiqus/post_processors/PostProcessMultipole.py +19 -19
- fiqus/pre_processors/PreProcessCCT.py +175 -175
- fiqus/pro_material_functions/ironBHcurves.pro +246 -246
- fiqus/pro_templates/combined/CCT_template.pro +275 -274
- fiqus/pro_templates/combined/ConductorAC_template.pro +1474 -1025
- fiqus/pro_templates/combined/Multipole_template.pro +5 -5
- fiqus/utils/Utils.py +12 -7
- {fiqus-2025.2.0.dist-info → fiqus-2025.10.0.dist-info}/METADATA +65 -63
- fiqus-2025.10.0.dist-info/RECORD +86 -0
- {fiqus-2025.2.0.dist-info → fiqus-2025.10.0.dist-info}/WHEEL +1 -1
- tests/test_geometry_generators.py +4 -0
- tests/test_mesh_generators.py +5 -0
- tests/test_solvers.py +41 -4
- tests/utils/fiqus_test_classes.py +15 -6
- tests/utils/generate_reference_files_ConductorAC.py +57 -57
- tests/utils/helpers.py +97 -97
- fiqus-2025.2.0.dist-info/RECORD +0 -85
- {fiqus-2025.2.0.dist-info → fiqus-2025.10.0.dist-info}/LICENSE.txt +0 -0
- {fiqus-2025.2.0.dist-info → fiqus-2025.10.0.dist-info}/top_level.txt +0 -0
|
@@ -1,134 +1,134 @@
|
|
|
1
|
-
import re
|
|
2
|
-
import math
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
class ParserGetDPTimeTable:
|
|
6
|
-
"""
|
|
7
|
-
This class parses GetDP's TimeTable format output files.
|
|
8
|
-
"""
|
|
9
|
-
|
|
10
|
-
def __init__(self, filePath):
|
|
11
|
-
self.time_values = []
|
|
12
|
-
self.values = []
|
|
13
|
-
# Parse data:
|
|
14
|
-
with open(filePath) as file:
|
|
15
|
-
# If the first line starts with #, we skip it.
|
|
16
|
-
first_line = file.readline()
|
|
17
|
-
if not first_line.startswith("#"):
|
|
18
|
-
number_of_entries = len(
|
|
19
|
-
re.findall(r"(-?\d+\.?\d*e?[-+]*\d*)", first_line)
|
|
20
|
-
)
|
|
21
|
-
# readline() moves the cursor to the next line, so we need to go back to
|
|
22
|
-
# the beginning of the file.
|
|
23
|
-
file.seek(0)
|
|
24
|
-
else:
|
|
25
|
-
second_line = file.readline()
|
|
26
|
-
number_of_entries = len(
|
|
27
|
-
re.findall(r"(-?\d+\.?\d*e?[-+]*\d*)", second_line)
|
|
28
|
-
)
|
|
29
|
-
# Seek to the second line
|
|
30
|
-
file.seek(len(first_line) + 1)
|
|
31
|
-
|
|
32
|
-
data = file.read()
|
|
33
|
-
|
|
34
|
-
entries = re.findall(r"(-?\d+\.?\d*e?[-+]*\d*)", data)
|
|
35
|
-
if number_of_entries == 2:
|
|
36
|
-
# Global scalar value:
|
|
37
|
-
time_index = 0
|
|
38
|
-
value_index = 1
|
|
39
|
-
self.data_type = "scalar"
|
|
40
|
-
elif number_of_entries == 6:
|
|
41
|
-
# Local scalar value probed at a point:
|
|
42
|
-
time_index = 1
|
|
43
|
-
value_index = 5
|
|
44
|
-
self.data_type = "scalar"
|
|
45
|
-
elif number_of_entries == 8:
|
|
46
|
-
# Local vector value probed at a point:
|
|
47
|
-
time_index = 1
|
|
48
|
-
value_index = [5, 6, 7]
|
|
49
|
-
self.data_type = "vector"
|
|
50
|
-
elif number_of_entries == 14:
|
|
51
|
-
# Local tensor value probed at a point:
|
|
52
|
-
time_index = 1
|
|
53
|
-
value_index = [[5, 6, 7], [8, 9, 10], [11, 12, 13]]
|
|
54
|
-
self.data_type = "tensor"
|
|
55
|
-
else:
|
|
56
|
-
raise ValueError(f"{filePath} contains an unexpected type of data.")
|
|
57
|
-
|
|
58
|
-
# Pack entries for each line:
|
|
59
|
-
entries = [
|
|
60
|
-
entries[i : i + number_of_entries]
|
|
61
|
-
for i in range(0, len(entries), number_of_entries)
|
|
62
|
-
]
|
|
63
|
-
|
|
64
|
-
for entry in entries:
|
|
65
|
-
if self.data_type == "scalar":
|
|
66
|
-
self.time_values.append(float(entry[time_index]))
|
|
67
|
-
self.values.append(float(entry[value_index]))
|
|
68
|
-
elif self.data_type == "vector":
|
|
69
|
-
self.time_values.append(float(entry[time_index]))
|
|
70
|
-
self.values.append(
|
|
71
|
-
(
|
|
72
|
-
float(entry[value_index[0]]),
|
|
73
|
-
float(entry[value_index[1]]),
|
|
74
|
-
float(entry[value_index[2]]),
|
|
75
|
-
)
|
|
76
|
-
)
|
|
77
|
-
elif self.data_type == "tensor":
|
|
78
|
-
self.time_values.append(float(entry[time_index]))
|
|
79
|
-
self.values.append(
|
|
80
|
-
[
|
|
81
|
-
[
|
|
82
|
-
float(entry[value_index[0][0]]),
|
|
83
|
-
float(entry[value_index[0][1]]),
|
|
84
|
-
float(entry[value_index[0][2]]),
|
|
85
|
-
],
|
|
86
|
-
[
|
|
87
|
-
float(entry[value_index[1][0]]),
|
|
88
|
-
float(entry[value_index[1][1]]),
|
|
89
|
-
float(entry[value_index[1][2]]),
|
|
90
|
-
],
|
|
91
|
-
[
|
|
92
|
-
float(entry[value_index[2][0]]),
|
|
93
|
-
float(entry[value_index[2][1]]),
|
|
94
|
-
float(entry[value_index[2][2]]),
|
|
95
|
-
],
|
|
96
|
-
]
|
|
97
|
-
)
|
|
98
|
-
|
|
99
|
-
def get_equivalent_scalar_values(self):
|
|
100
|
-
"""
|
|
101
|
-
Returns the same scalar if self.data_type is scalar.
|
|
102
|
-
Returns the magnitude of the vectors if self.data_type is vector.
|
|
103
|
-
Returns the von misses equivalents of the tensors if self.data_type is tensor.
|
|
104
|
-
"""
|
|
105
|
-
|
|
106
|
-
if self.data_type == "scalar":
|
|
107
|
-
return self.values
|
|
108
|
-
elif self.data_type == "vector":
|
|
109
|
-
magnitudes = [
|
|
110
|
-
math.sqrt(v[0] ** 2 + v[1] ** 2 + v[2] ** 2)
|
|
111
|
-
for v in self.values
|
|
112
|
-
]
|
|
113
|
-
return magnitudes
|
|
114
|
-
elif self.data_type == "tensor":
|
|
115
|
-
von_misses_equivalents = [
|
|
116
|
-
math.sqrt(
|
|
117
|
-
0.5
|
|
118
|
-
* (
|
|
119
|
-
(v[0][0] - v[1][1]) ** 2
|
|
120
|
-
+ (v[1][1] - v[2][2]) ** 2
|
|
121
|
-
+ (v[2][2] - v[0][0]) ** 2
|
|
122
|
-
+ 6
|
|
123
|
-
* (
|
|
124
|
-
((v[0][1] + v[1][0]) / 2) ** 2
|
|
125
|
-
+ ((v[1][2] + v[2][1]) / 2) ** 2
|
|
126
|
-
+ ((v[0][2] + v[2][0]) / 2) ** 2
|
|
127
|
-
)
|
|
128
|
-
)
|
|
129
|
-
)
|
|
130
|
-
for v in self.values
|
|
131
|
-
]
|
|
132
|
-
return von_misses_equivalents
|
|
133
|
-
else:
|
|
134
|
-
raise RuntimeError("Data type not recognized.")
|
|
1
|
+
import re
|
|
2
|
+
import math
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class ParserGetDPTimeTable:
|
|
6
|
+
"""
|
|
7
|
+
This class parses GetDP's TimeTable format output files.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
def __init__(self, filePath):
|
|
11
|
+
self.time_values = []
|
|
12
|
+
self.values = []
|
|
13
|
+
# Parse data:
|
|
14
|
+
with open(filePath) as file:
|
|
15
|
+
# If the first line starts with #, we skip it.
|
|
16
|
+
first_line = file.readline()
|
|
17
|
+
if not first_line.startswith("#"):
|
|
18
|
+
number_of_entries = len(
|
|
19
|
+
re.findall(r"(-?\d+\.?\d*e?[-+]*\d*)", first_line)
|
|
20
|
+
)
|
|
21
|
+
# readline() moves the cursor to the next line, so we need to go back to
|
|
22
|
+
# the beginning of the file.
|
|
23
|
+
file.seek(0)
|
|
24
|
+
else:
|
|
25
|
+
second_line = file.readline()
|
|
26
|
+
number_of_entries = len(
|
|
27
|
+
re.findall(r"(-?\d+\.?\d*e?[-+]*\d*)", second_line)
|
|
28
|
+
)
|
|
29
|
+
# Seek to the second line
|
|
30
|
+
file.seek(len(first_line) + 1)
|
|
31
|
+
|
|
32
|
+
data = file.read()
|
|
33
|
+
|
|
34
|
+
entries = re.findall(r"(-?\d+\.?\d*e?[-+]*\d*)", data)
|
|
35
|
+
if number_of_entries == 2:
|
|
36
|
+
# Global scalar value:
|
|
37
|
+
time_index = 0
|
|
38
|
+
value_index = 1
|
|
39
|
+
self.data_type = "scalar"
|
|
40
|
+
elif number_of_entries == 6:
|
|
41
|
+
# Local scalar value probed at a point:
|
|
42
|
+
time_index = 1
|
|
43
|
+
value_index = 5
|
|
44
|
+
self.data_type = "scalar"
|
|
45
|
+
elif number_of_entries == 8:
|
|
46
|
+
# Local vector value probed at a point:
|
|
47
|
+
time_index = 1
|
|
48
|
+
value_index = [5, 6, 7]
|
|
49
|
+
self.data_type = "vector"
|
|
50
|
+
elif number_of_entries == 14:
|
|
51
|
+
# Local tensor value probed at a point:
|
|
52
|
+
time_index = 1
|
|
53
|
+
value_index = [[5, 6, 7], [8, 9, 10], [11, 12, 13]]
|
|
54
|
+
self.data_type = "tensor"
|
|
55
|
+
else:
|
|
56
|
+
raise ValueError(f"{filePath} contains an unexpected type of data.")
|
|
57
|
+
|
|
58
|
+
# Pack entries for each line:
|
|
59
|
+
entries = [
|
|
60
|
+
entries[i : i + number_of_entries]
|
|
61
|
+
for i in range(0, len(entries), number_of_entries)
|
|
62
|
+
]
|
|
63
|
+
|
|
64
|
+
for entry in entries:
|
|
65
|
+
if self.data_type == "scalar":
|
|
66
|
+
self.time_values.append(float(entry[time_index]))
|
|
67
|
+
self.values.append(float(entry[value_index]))
|
|
68
|
+
elif self.data_type == "vector":
|
|
69
|
+
self.time_values.append(float(entry[time_index]))
|
|
70
|
+
self.values.append(
|
|
71
|
+
(
|
|
72
|
+
float(entry[value_index[0]]),
|
|
73
|
+
float(entry[value_index[1]]),
|
|
74
|
+
float(entry[value_index[2]]),
|
|
75
|
+
)
|
|
76
|
+
)
|
|
77
|
+
elif self.data_type == "tensor":
|
|
78
|
+
self.time_values.append(float(entry[time_index]))
|
|
79
|
+
self.values.append(
|
|
80
|
+
[
|
|
81
|
+
[
|
|
82
|
+
float(entry[value_index[0][0]]),
|
|
83
|
+
float(entry[value_index[0][1]]),
|
|
84
|
+
float(entry[value_index[0][2]]),
|
|
85
|
+
],
|
|
86
|
+
[
|
|
87
|
+
float(entry[value_index[1][0]]),
|
|
88
|
+
float(entry[value_index[1][1]]),
|
|
89
|
+
float(entry[value_index[1][2]]),
|
|
90
|
+
],
|
|
91
|
+
[
|
|
92
|
+
float(entry[value_index[2][0]]),
|
|
93
|
+
float(entry[value_index[2][1]]),
|
|
94
|
+
float(entry[value_index[2][2]]),
|
|
95
|
+
],
|
|
96
|
+
]
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
def get_equivalent_scalar_values(self):
|
|
100
|
+
"""
|
|
101
|
+
Returns the same scalar if self.data_type is scalar.
|
|
102
|
+
Returns the magnitude of the vectors if self.data_type is vector.
|
|
103
|
+
Returns the von misses equivalents of the tensors if self.data_type is tensor.
|
|
104
|
+
"""
|
|
105
|
+
|
|
106
|
+
if self.data_type == "scalar":
|
|
107
|
+
return self.values
|
|
108
|
+
elif self.data_type == "vector":
|
|
109
|
+
magnitudes = [
|
|
110
|
+
math.sqrt(v[0] ** 2 + v[1] ** 2 + v[2] ** 2)
|
|
111
|
+
for v in self.values
|
|
112
|
+
]
|
|
113
|
+
return magnitudes
|
|
114
|
+
elif self.data_type == "tensor":
|
|
115
|
+
von_misses_equivalents = [
|
|
116
|
+
math.sqrt(
|
|
117
|
+
0.5
|
|
118
|
+
* (
|
|
119
|
+
(v[0][0] - v[1][1]) ** 2
|
|
120
|
+
+ (v[1][1] - v[2][2]) ** 2
|
|
121
|
+
+ (v[2][2] - v[0][0]) ** 2
|
|
122
|
+
+ 6
|
|
123
|
+
* (
|
|
124
|
+
((v[0][1] + v[1][0]) / 2) ** 2
|
|
125
|
+
+ ((v[1][2] + v[2][1]) / 2) ** 2
|
|
126
|
+
+ ((v[0][2] + v[2][0]) / 2) ** 2
|
|
127
|
+
)
|
|
128
|
+
)
|
|
129
|
+
)
|
|
130
|
+
for v in self.values
|
|
131
|
+
]
|
|
132
|
+
return von_misses_equivalents
|
|
133
|
+
else:
|
|
134
|
+
raise RuntimeError("Data type not recognized.")
|
fiqus/parsers/ParserMSH.py
CHANGED
|
@@ -1,53 +1,53 @@
|
|
|
1
|
-
import gmsh
|
|
2
|
-
import statistics
|
|
3
|
-
|
|
4
|
-
class ParserMSH:
|
|
5
|
-
def __init__(self, mesh_file_path):
|
|
6
|
-
"""
|
|
7
|
-
Read msh file and returns mesh format and physical names as class attributes.
|
|
8
|
-
:param mesh_file_path: Full path to .msh file, including file name and extension.
|
|
9
|
-
"""
|
|
10
|
-
self.mesh_file_path = mesh_file_path
|
|
11
|
-
|
|
12
|
-
self._mesh_format_markers = {'s': '$MeshFormat', 'e': '$EndMeshFormat'}
|
|
13
|
-
self._physical_name_markers = {'s': 'PhysicalNames', 'e': '$EndPhysicalNames'}
|
|
14
|
-
|
|
15
|
-
with open(mesh_file_path) as f:
|
|
16
|
-
self._contents = f.read()
|
|
17
|
-
|
|
18
|
-
def __get_content(self, markers_dict):
|
|
19
|
-
"""
|
|
20
|
-
Gets text string between two markers specified in markers_dict
|
|
21
|
-
"""
|
|
22
|
-
return self._contents[self._contents.find(markers_dict['s']) + len(markers_dict['s']):self._contents.find(markers_dict['e'])]
|
|
23
|
-
|
|
24
|
-
def get_average_mesh_quality(self):
|
|
25
|
-
"""
|
|
26
|
-
Gets the lowest mesh quality from the mesh file
|
|
27
|
-
"""
|
|
28
|
-
gmsh.initialize()
|
|
29
|
-
gmsh.open(self.mesh_file_path)
|
|
30
|
-
|
|
31
|
-
# SICN not implemented in 1D!
|
|
32
|
-
allElementsDim2 = gmsh.model.mesh.getElements(dim=2)[1]
|
|
33
|
-
allElementsDim3 = gmsh.model.mesh.getElements(dim=3)[1]
|
|
34
|
-
allElements = list(allElementsDim2[0]) + (list(allElementsDim3[0]) if allElementsDim3 else [])
|
|
35
|
-
lowestQuality = statistics.fmean(gmsh.model.mesh.getElementQualities(allElements))
|
|
36
|
-
|
|
37
|
-
gmsh.finalize()
|
|
38
|
-
|
|
39
|
-
return lowestQuality
|
|
40
|
-
|
|
41
|
-
@property
|
|
42
|
-
def mesh_format(self):
|
|
43
|
-
"""
|
|
44
|
-
Parse mesh_generators field and assign it to the class attribute
|
|
45
|
-
"""
|
|
46
|
-
return self.__get_content(self._mesh_format_markers)
|
|
47
|
-
|
|
48
|
-
@property
|
|
49
|
-
def physical_names(self):
|
|
50
|
-
"""
|
|
51
|
-
Parse physical_names field and assign it to the class attribute
|
|
52
|
-
"""
|
|
53
|
-
return self.__get_content(self._physical_name_markers)
|
|
1
|
+
import gmsh
|
|
2
|
+
import statistics
|
|
3
|
+
|
|
4
|
+
class ParserMSH:
|
|
5
|
+
def __init__(self, mesh_file_path):
|
|
6
|
+
"""
|
|
7
|
+
Read msh file and returns mesh format and physical names as class attributes.
|
|
8
|
+
:param mesh_file_path: Full path to .msh file, including file name and extension.
|
|
9
|
+
"""
|
|
10
|
+
self.mesh_file_path = mesh_file_path
|
|
11
|
+
|
|
12
|
+
self._mesh_format_markers = {'s': '$MeshFormat', 'e': '$EndMeshFormat'}
|
|
13
|
+
self._physical_name_markers = {'s': 'PhysicalNames', 'e': '$EndPhysicalNames'}
|
|
14
|
+
|
|
15
|
+
with open(mesh_file_path) as f:
|
|
16
|
+
self._contents = f.read()
|
|
17
|
+
|
|
18
|
+
def __get_content(self, markers_dict):
|
|
19
|
+
"""
|
|
20
|
+
Gets text string between two markers specified in markers_dict
|
|
21
|
+
"""
|
|
22
|
+
return self._contents[self._contents.find(markers_dict['s']) + len(markers_dict['s']):self._contents.find(markers_dict['e'])]
|
|
23
|
+
|
|
24
|
+
def get_average_mesh_quality(self):
|
|
25
|
+
"""
|
|
26
|
+
Gets the lowest mesh quality from the mesh file
|
|
27
|
+
"""
|
|
28
|
+
gmsh.initialize()
|
|
29
|
+
gmsh.open(self.mesh_file_path)
|
|
30
|
+
|
|
31
|
+
# SICN not implemented in 1D!
|
|
32
|
+
allElementsDim2 = gmsh.model.mesh.getElements(dim=2)[1]
|
|
33
|
+
allElementsDim3 = gmsh.model.mesh.getElements(dim=3)[1]
|
|
34
|
+
allElements = list(allElementsDim2[0]) + (list(allElementsDim3[0]) if allElementsDim3 else [])
|
|
35
|
+
lowestQuality = statistics.fmean(gmsh.model.mesh.getElementQualities(allElements))
|
|
36
|
+
|
|
37
|
+
gmsh.finalize()
|
|
38
|
+
|
|
39
|
+
return lowestQuality
|
|
40
|
+
|
|
41
|
+
@property
|
|
42
|
+
def mesh_format(self):
|
|
43
|
+
"""
|
|
44
|
+
Parse mesh_generators field and assign it to the class attribute
|
|
45
|
+
"""
|
|
46
|
+
return self.__get_content(self._mesh_format_markers)
|
|
47
|
+
|
|
48
|
+
@property
|
|
49
|
+
def physical_names(self):
|
|
50
|
+
"""
|
|
51
|
+
Parse physical_names field and assign it to the class attribute
|
|
52
|
+
"""
|
|
53
|
+
return self.__get_content(self._physical_name_markers)
|