fiqus 2024.6.0__py3-none-any.whl → 2024.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. fiqus/MainFiQuS.py +290 -134
  2. fiqus/data/DataConductor.py +301 -301
  3. fiqus/data/DataFiQuS.py +128 -84
  4. fiqus/data/DataFiQuSCCT.py +150 -150
  5. fiqus/data/DataFiQuSConductor.py +84 -84
  6. fiqus/data/DataFiQuSConductorAC_Strand.py +565 -565
  7. fiqus/data/DataFiQuSMultipole.py +716 -42
  8. fiqus/data/DataFiQuSPancake3D.py +737 -278
  9. fiqus/data/DataMultipole.py +180 -15
  10. fiqus/data/DataRoxieParser.py +90 -51
  11. fiqus/data/DataSettings.py +121 -0
  12. fiqus/data/DataWindingsCCT.py +37 -37
  13. fiqus/data/RegionsModelFiQuS.py +18 -6
  14. fiqus/geom_generators/GeometryCCT.py +905 -905
  15. fiqus/geom_generators/GeometryConductorAC_Strand.py +1391 -1391
  16. fiqus/geom_generators/GeometryMultipole.py +1827 -227
  17. fiqus/geom_generators/GeometryPancake3D.py +316 -117
  18. fiqus/geom_generators/GeometryPancake3DUtils.py +549 -0
  19. fiqus/getdp_runners/RunGetdpCCT.py +4 -4
  20. fiqus/getdp_runners/RunGetdpConductorAC_Strand.py +201 -201
  21. fiqus/getdp_runners/RunGetdpMultipole.py +115 -42
  22. fiqus/getdp_runners/RunGetdpPancake3D.py +28 -6
  23. fiqus/mains/MainCCT.py +2 -2
  24. fiqus/mains/MainConductorAC_Strand.py +132 -132
  25. fiqus/mains/MainMultipole.py +113 -62
  26. fiqus/mains/MainPancake3D.py +63 -23
  27. fiqus/mesh_generators/MeshCCT.py +209 -209
  28. fiqus/mesh_generators/MeshConductorAC_Strand.py +656 -656
  29. fiqus/mesh_generators/MeshMultipole.py +1243 -181
  30. fiqus/mesh_generators/MeshPancake3D.py +275 -192
  31. fiqus/parsers/ParserCOND.py +825 -0
  32. fiqus/parsers/ParserDAT.py +16 -16
  33. fiqus/parsers/ParserGetDPOnSection.py +212 -212
  34. fiqus/parsers/ParserGetDPTimeTable.py +134 -134
  35. fiqus/parsers/ParserMSH.py +53 -53
  36. fiqus/parsers/ParserPOS.py +214 -214
  37. fiqus/parsers/ParserRES.py +142 -142
  38. fiqus/plotters/PlotPythonCCT.py +133 -133
  39. fiqus/plotters/PlotPythonConductorAC.py +855 -840
  40. fiqus/plotters/PlotPythonMultipole.py +18 -18
  41. fiqus/post_processors/PostProcessCCT.py +440 -440
  42. fiqus/post_processors/PostProcessConductorAC.py +49 -49
  43. fiqus/post_processors/PostProcessMultipole.py +353 -229
  44. fiqus/post_processors/PostProcessPancake3D.py +8 -13
  45. fiqus/pre_processors/PreProcessCCT.py +175 -175
  46. fiqus/pro_assemblers/ProAssembler.py +14 -6
  47. fiqus/pro_material_functions/ironBHcurves.pro +246 -246
  48. fiqus/pro_templates/combined/CCT_template.pro +274 -274
  49. fiqus/pro_templates/combined/ConductorAC_template.pro +1025 -1025
  50. fiqus/pro_templates/combined/Multipole_template.pro +1694 -126
  51. fiqus/pro_templates/combined/Pancake3D_template.pro +2294 -1103
  52. fiqus/pro_templates/combined/TSA_materials.pro +162 -0
  53. fiqus/pro_templates/combined/materials.pro +36 -18
  54. fiqus/utils/Utils.py +508 -110
  55. fiqus/utils/update_data_settings.py +33 -0
  56. fiqus-2024.12.0.dist-info/METADATA +130 -0
  57. fiqus-2024.12.0.dist-info/RECORD +84 -0
  58. {fiqus-2024.6.0.dist-info → fiqus-2024.12.0.dist-info}/WHEEL +1 -1
  59. tests/test_FiQuS.py +1 -1
  60. tests/test_geometry_generators.py +101 -2
  61. tests/test_mesh_generators.py +154 -1
  62. tests/test_solvers.py +115 -21
  63. tests/utils/fiqus_test_classes.py +85 -21
  64. tests/utils/generate_reference_files_ConductorAC.py +57 -57
  65. tests/utils/generate_reference_files_Pancake3D.py +4 -5
  66. tests/utils/helpers.py +97 -97
  67. fiqus-2024.6.0.dist-info/METADATA +0 -103
  68. fiqus-2024.6.0.dist-info/RECORD +0 -79
  69. {fiqus-2024.6.0.dist-info → fiqus-2024.12.0.dist-info}/top_level.txt +0 -0
fiqus/utils/Utils.py CHANGED
@@ -1,27 +1,36 @@
1
1
  import sys
2
2
  import os
3
+ import getpass
4
+ import platform
3
5
  import shutil
6
+ import logging
7
+ import re
8
+
4
9
  import numpy as np
5
10
  from pathlib import Path
6
11
  from time import sleep
7
12
  import multiprocessing
13
+
14
+ import pandas as pd
8
15
  import ruamel.yaml
9
- import warnings
10
16
  import gmsh
11
- import logging
12
- import re
17
+ import json
18
+
19
+ from fiqus.data.DataSettings import DataSettings
20
+ from fiqus.data.DataFiQuS import FDM
13
21
 
14
22
  logger = logging.getLogger(__name__)
15
23
 
24
+
16
25
  class LoggingFormatter(logging.Formatter):
17
26
  """
18
27
  Logging formatter class
19
28
  """
20
- grey = "\x1b[38;20m" # debug level
21
- white = "\x1b[37;20m" # info level
22
- yellow = "\x1b[33;20m" # warning level
23
- red = "\x1b[31;20m" # error level
24
- bold_red = "\x1b[31;1m" # critical level
29
+ grey = "\x1b[38;20m" # debug level
30
+ white = "\x1b[37;20m" # info level
31
+ yellow = "\x1b[33;20m" # warning level
32
+ red = "\x1b[31;20m" # error level
33
+ bold_red = "\x1b[31;1m" # critical level
25
34
 
26
35
  reset = "\x1b[0m"
27
36
  format = '%(asctime)s | %(levelname)s | %(message)s'
@@ -48,21 +57,134 @@ class FilesAndFolders:
48
57
  yaml_str = yaml.load(stream)
49
58
  if "magnet" in yaml_str:
50
59
  yaml_str["magnet"]["input_file_path"] = str(full_file_path)
51
-
60
+
52
61
  return data_class(**yaml_str)
53
62
 
54
63
  @staticmethod
55
- def write_data_to_yaml(full_file_path, dict_of_data_class):
64
+ def write_data_to_yaml(full_file_path, dict_of_data_class, list_exceptions=[]):
56
65
  def my_represent_none(self, data):
66
+ """
67
+ Change data representation from empty string to "null" string
68
+ """
57
69
  return self.represent_scalar('tag:yaml.org,2002:null', 'null')
58
70
 
71
+ def flist(x):
72
+ """
73
+ Define a commented sequence to allow writing a list in a single row
74
+ """
75
+ retval = ruamel.yaml.comments.CommentedSeq(x)
76
+ retval.fa.set_flow_style() # fa -> format attribute
77
+ return retval
78
+
79
+ def list_single_row_recursively(data_dict: dict, exceptions: list):
80
+ """
81
+ Write lists in a single row
82
+ :param data_dict: Dictionary to edit
83
+ :param exceptions: List of strings defining keys that will not be written
84
+ in a single row
85
+ :return:
86
+ """
87
+ for key, value in data_dict.items():
88
+ if isinstance(value, list) and (key not in exceptions):
89
+ data_dict[key] = flist(value)
90
+ elif isinstance(value, np.ndarray):
91
+ data_dict[key] = flist(value.tolist())
92
+ elif isinstance(value, dict):
93
+ data_dict[key] = list_single_row_recursively(value, exceptions)
94
+
95
+ return data_dict
96
+
59
97
  yaml = ruamel.yaml.YAML()
60
98
  yaml.default_flow_style = False
61
99
  yaml.emitter.alt_null = 'Null'
62
100
  yaml.representer.add_representer(type(None), my_represent_none)
101
+ dict_of_data_class = list_single_row_recursively(dict_of_data_class, exceptions=list_exceptions)
63
102
  with open(full_file_path, 'w') as yaml_file:
64
103
  yaml.dump(dict_of_data_class, yaml_file)
65
104
 
105
+ @staticmethod
106
+ def write_data_model_to_yaml(full_file_path, data_model, with_comments=True, by_alias=True):
107
+ if data_model:
108
+ # Set up YAML instance settings:
109
+ yamlInstance = ruamel.yaml.YAML()
110
+
111
+ # Convert the model_data to a ruamel.yaml object/dictionary:
112
+ if with_comments:
113
+ path_object = Path(full_file_path)
114
+ # Add pydantic descriptions to the yaml file as comments:
115
+ dummy_yaml_file_to_create_ruamel_object = (
116
+ path_object.resolve().parent.joinpath("dummy.yaml")
117
+ )
118
+ with open(dummy_yaml_file_to_create_ruamel_object, "w") as stream:
119
+ yamlInstance.dump(data_model.dict(by_alias=by_alias), stream)
120
+
121
+ # Read the file:
122
+ with open(dummy_yaml_file_to_create_ruamel_object, "r") as stream:
123
+ # Read the yaml file and store the date inside ruamel_yaml_object:
124
+ # ruamel_yaml_object is a special object that stores both the data and
125
+ # comments. Even though the data might be changed or added, the same
126
+ # object will be used to create the new YAML file to store the comments.
127
+ ruamel_yaml_object = yamlInstance.load(
128
+ dummy_yaml_file_to_create_ruamel_object
129
+ )
130
+
131
+ os.remove(dummy_yaml_file_to_create_ruamel_object)
132
+
133
+ def iterate_fields(model, ruamel_yaml_object):
134
+ for currentPydanticKey, value in model.__fields__.items():
135
+ if value.alias and by_alias:
136
+ currentDictionaryKey = value.alias
137
+ else:
138
+ currentDictionaryKey = currentPydanticKey
139
+
140
+ if value.description:
141
+ ruamel_yaml_object.yaml_add_eol_comment(
142
+ value.description,
143
+ currentDictionaryKey,
144
+ )
145
+
146
+ if hasattr(getattr(model, currentPydanticKey), "__fields__"):
147
+ new_ruamel_yaml_object = iterate_fields(
148
+ getattr(model, currentPydanticKey),
149
+ ruamel_yaml_object[currentDictionaryKey],
150
+ )
151
+
152
+ ruamel_yaml_object[currentDictionaryKey] = new_ruamel_yaml_object
153
+
154
+ elif isinstance(getattr(model, currentPydanticKey), list):
155
+ for i, item in enumerate(getattr(model, currentPydanticKey)):
156
+ if hasattr(item, "__fields__"):
157
+ new_ruamel_yaml_object = iterate_fields(
158
+ item,
159
+ ruamel_yaml_object[currentDictionaryKey][i],
160
+ )
161
+
162
+ ruamel_yaml_object[currentDictionaryKey][i] = new_ruamel_yaml_object
163
+
164
+ return ruamel_yaml_object
165
+
166
+ iterate_fields(data_model, ruamel_yaml_object)
167
+ for currentPydanticKey, value in data_model.__fields__.items():
168
+ if value.alias and by_alias:
169
+ currentDictionaryKey = value.alias
170
+ else:
171
+ currentDictionaryKey = currentPydanticKey
172
+
173
+ if hasattr(getattr(data_model, currentPydanticKey), "__fields__"):
174
+ ruamel_yaml_object[currentDictionaryKey] = iterate_fields(
175
+ getattr(data_model, currentPydanticKey),
176
+ ruamel_yaml_object[currentDictionaryKey],
177
+ )
178
+
179
+ data_dict = ruamel_yaml_object
180
+
181
+ else:
182
+ data_dict = data_model.dict(by_alias=by_alias)
183
+
184
+ yamlInstance.indent(sequence=4, offset=2)
185
+ with open(full_file_path, 'w') as yaml_file:
186
+ yamlInstance.dump(data_dict, yaml_file)
187
+
66
188
  @staticmethod
67
189
  def prep_folder(folder_full_path, clear: bool = False):
68
190
  if clear:
@@ -71,25 +193,23 @@ class FilesAndFolders:
71
193
  if not os.path.exists(folder_full_path):
72
194
  os.makedirs(folder_full_path) # make new directory
73
195
 
74
- @staticmethod
75
- def get_folder_path(folder_type, folder, ref_nr, overwrite, required_folder):
76
- if required_folder and not (ref_nr and overwrite):
77
- last_nr = 0
78
- for study in [x.parts[-1] for x in Path(folder).iterdir() if x.is_dir()]:
79
- last_nr = max(int(study[study.find('_') + 1:]), last_nr)
80
- if overwrite and required_folder and last_nr > 0:
81
- run_nr = str(last_nr)
82
- else:
83
- run_nr = str(last_nr + 1)
84
- else:
85
- run_nr = str(ref_nr)
86
-
87
- folder_path = os.path.join(folder, folder_type + '_' + run_nr)
88
- FilesAndFolders.prep_folder(folder_path, overwrite and required_folder)
89
- return folder_path
90
-
91
196
  @staticmethod
92
197
  def get_folder_path(folder_type, folder, folder_key, overwrite, required_folder):
198
+ """
199
+ Method for ...
200
+ :param folder_type:
201
+ :type folder_type:
202
+ :param folder:
203
+ :type folder:
204
+ :param folder_key:
205
+ :type folder_key:
206
+ :param overwrite:
207
+ :type overwrite:
208
+ :param required_folder:
209
+ :type required_folder:
210
+ :return:
211
+ :rtype:
212
+ """
93
213
  if required_folder and not (folder_key and overwrite):
94
214
  all_dirs = [x.parts[-1] for x in Path(folder).iterdir() if x.is_dir()]
95
215
  all_relevant_dirs = [x for x in all_dirs if x.startswith(f"{folder_type}_{folder_key}")]
@@ -98,7 +218,7 @@ class FilesAndFolders:
98
218
  folder_key = new_folder_key
99
219
 
100
220
  folder_path = os.path.join(folder, folder_type + '_' + str(folder_key))
101
- # Disable the line below to avoid deleating volder
221
+ # Disable the line below to avoid deleting the folder # TODO: add logic to control this at a higher level
102
222
  FilesAndFolders.prep_folder(folder_path, overwrite and required_folder)
103
223
  return folder_path
104
224
 
@@ -141,56 +261,68 @@ class FilesAndFolders:
141
261
  if folder_key < i:
142
262
  break
143
263
  folder_key += 1
144
-
264
+
145
265
  return folder_key
146
266
 
147
267
  @staticmethod
148
268
  def print_welcome_graphics():
149
- print(r" _____ _ ___ ____ ")
150
- print(r"| ___(_)/ _ \ _ _/ ___| ")
151
- print(r"| |_ | | | | | | | \___ \ ")
152
- print(r"| _| | | |_| | |_| |___) |")
153
- print(r"|_| |_|\__\_\\__,_|____/ ")
154
- print("")
269
+ logger.info(r" _____ _ ___ ____ ")
270
+ logger.info(r"| ___(_)/ _ \ _ _/ ___| ")
271
+ logger.info(r"| |_ | | | | | | | \___ \ ")
272
+ logger.info(r"| _| | | |_| | |_| |___) |")
273
+ logger.info(r"|_| |_|\__\_\\__,_|____/ ")
274
+ logger.info("")
155
275
 
156
276
 
157
277
  class CheckForExceptions:
278
+
158
279
  @staticmethod
159
280
  def check_inputs(run): # RunFiQuS()
281
+ # """
282
+ # This method raises errors when geometry, mesh or solution folders inputs are incorrect. Warnings are disabled as a trial.
283
+ # :param run: FDM.run object
284
+ # :type run: FDM.run
285
+ # """
160
286
  if run.type == 'start_from_yaml':
161
- if run.geometry and not run.overwrite:
162
- warnings.warn("Warning: Geometry folder is needed only if it has to be overwritten. Ignoring it...")
163
- if run.solution or run.mesh:
164
- warnings.warn("Warning: Mesh and Solution folders are not needed. Ignoring them...")
165
- elif run.type == 'geometry_only':
166
- if run.solution or run.mesh:
167
- warnings.warn("Warning: Mesh and Solution folders are not needed. Ignoring them...")
287
+ pass
288
+ # if run.geometry and not run.overwrite:
289
+ # warnings.warn("Warning: Geometry folder is needed only if it has to be overwritten. Ignoring it...")
290
+ # if run.solution or run.mesh:
291
+ # warnings.warn("Warning: Mesh and Solution folders are not needed. Ignoring them...")
292
+ # elif run.type == 'geometry_only':
293
+ # if run.solution or run.mesh:
294
+ # warnings.warn("Warning: Mesh and Solution folders are not needed. Ignoring them...")
295
+ # elif run.type == 'geometry_and_mesh':
296
+ # if run.geometry and not run.overwrite:
297
+ # warnings.warn("Warning: Geometry folder is needed only if it has to be overwritten. Ignoring it...")
298
+ # if run.mesh:
299
+ # warnings.warn("Warning: Mesh folder is not needed. Ignoring it...")
168
300
  elif run.type == 'mesh_and_solve_with_post_process':
169
301
  if not run.geometry:
170
302
  raise Exception('Full path to Geometry not provided. '
171
303
  'Insert options -> reference_files -> geometry.')
172
- if run.mesh and not run.overwrite:
173
- warnings.warn("Warning: Mesh folder is needed only if it has to be overwritten. Ignoring it...")
174
- if run.solution:
175
- warnings.warn("Warning: Solution folder is not needed. Ignoring it...")
304
+ # if run.mesh and not run.overwrite:
305
+ # warnings.warn("Warning: Mesh folder is needed only if it has to be overwritten. Ignoring it...")
306
+ # if run.solution:
307
+ # warnings.warn("Warning: Solution folder is not needed. Ignoring it...")
176
308
  elif run.type == 'mesh_only':
177
309
  if not run.geometry:
178
310
  raise Exception('Full path to Mesh not provided. '
179
311
  'Insert options -> reference_files -> geometry.')
180
- if run.solution:
181
- warnings.warn("Warning: Solution folder is not needed. Ignoring it...")
312
+ # if run.solution:
313
+ # warnings.warn("Warning: Solution folder is not needed. Ignoring it...")
182
314
  elif run.type == 'solve_with_post_process':
183
315
  if not run.mesh or not run.geometry:
184
316
  raise Exception('Full path to Mesh not provided. '
185
317
  'Insert options -> reference_files -> geometry and mesh.')
186
- if run.solution and not run.overwrite:
187
- warnings.warn("Warning: Solution folder is needed only if it has to be overwritten. Ignoring it...")
318
+ # if run.solution and not run.overwrite:
319
+ # warnings.warn("Warning: Solution folder is needed only if it has to be overwritten. Ignoring it...")
188
320
  elif run.type == 'solve_only':
189
321
  if not run.mesh or not run.geometry:
190
322
  raise Exception('Full path to Mesh not provided. '
191
323
  'Insert options -> reference_files -> geometry and mesh.')
192
- if run.solution and not run.overwrite:
193
- warnings.warn("Warning: Solution folder is needed only if it has to be overwritten. Ignoring it...")
324
+ # if run.solution and not run.overwrite:
325
+ # warnings.warn("Warning: Solution folder is needed only if it has to be overwritten. Ignoring it...")
194
326
  elif run.type == 'post_process_only':
195
327
  if not run.mesh or not run.geometry or not run.solution:
196
328
  raise Exception('Full path to Solution not provided. '
@@ -198,23 +330,72 @@ class CheckForExceptions:
198
330
 
199
331
  @staticmethod
200
332
  def check_overwrite_conditions(folder_type, folder, folder_key):
201
- if folder_key:
202
- if not os.path.exists(os.path.join(folder, folder_type + '_' + str(folder_key))):
203
- warnings.warn(
204
- f'The folder {folder_type}_{folder_key} does not exist. Creating it...')
205
- else:
206
- warnings.warn(
207
- f'Reference number of the folder {folder_type} not provided. '
208
- f'Overwriting the latest {folder_type} folder...')
333
+ """
334
+ This method prints warning related to overwrite conditions settings. This is disabled as a trial.
335
+ :param folder_type:
336
+ :type folder_type:
337
+ :param folder:
338
+ :type folder:
339
+ :param folder_key:
340
+ :type folder_key:
341
+ """
342
+ pass
343
+ # if folder_key:
344
+ # if not os.path.exists(os.path.join(folder, folder_type + '_' + str(folder_key))):
345
+ # warnings.warn(
346
+ # f'The folder {folder_type}_{folder_key} does not exist. Creating it...')
347
+ # else:
348
+ # warnings.warn(
349
+ # f'Reference number of the folder {folder_type} not provided. '
350
+ # f'Overwriting the latest {folder_type} folder...')
209
351
 
210
352
 
211
353
  class GeometricFunctions:
212
354
 
355
+ @staticmethod
356
+ def sig_dig(n, precision=8):
357
+ return float(np.format_float_positional(n, precision=precision))
358
+
359
+ @staticmethod
360
+ def points_distance(a, b):
361
+ """
362
+ Computes the distance between two points a and b
363
+ :param a: list of x and y coordinates
364
+ :param b: list of x and y coordinates
365
+ """
366
+ a = np.array(a)
367
+ b = np.array(b)
368
+ return np.linalg.norm(a - b)
369
+
370
+ @staticmethod
371
+ def line_through_two_points(point1, point2):
372
+ """
373
+ Finds coefficients of the line through two points [x1,y1] and [x2,y2]
374
+ :param point1: 2-element list defining x/y positions of the 1st point
375
+ :param point2: 2-element list defining x/y positions of the 2nd point
376
+ :return: 3-element list defining the A, B, and C coefficients of the line, as in: A*x + B*y + C = 0
377
+ """
378
+ x1, y1 = point1[0], point1[1]
379
+ x2, y2 = point2[0], point2[1]
380
+ if x2 == x1:
381
+ A = 1
382
+ B = 0
383
+ C = - x1
384
+ elif y2 == y1:
385
+ A = 0
386
+ B = 1
387
+ C = - y1
388
+ else:
389
+ A = - (y2 - y1) / (x2 - x1)
390
+ B = + 1
391
+ C = - (x2 * y1 - x1 * y2) / (x2 - x1)
392
+ return [float(A), float(B), float(C)]
393
+
213
394
  @staticmethod
214
395
  def centroid(X, Y):
215
396
  """
216
397
  Computes the centroid coordinates of a non-self-intersecting closed polygon
217
- :param X: list of X coordinate of the vertices
398
+ :param X: list of x coordinate of the vertices
218
399
  :param Y: list of y coordinate of the vertices
219
400
  """
220
401
  sum_A, sum_Cx, sum_Cy = 0, 0, 0
@@ -231,40 +412,203 @@ class GeometricFunctions:
231
412
  def arc_center_from_3_points(a, b, c):
232
413
  """
233
414
  Computes the center coordinates of an arc passing through three points
234
- :param a: DataRoxieParser.Coord class object of one arc point
235
- :param b: DataRoxieParser.Coord class object of one arc point
236
- :param c: DataRoxieParser.Coord class object of one arc point
415
+ :param a: list of x and y coordinates of one arc point
416
+ :param b: list of x and y coordinates of one arc point
417
+ :param c: list of x and y coordinates of one arc point
237
418
  """
238
- ab = [a.x - b.x, a.y - b.y]
239
- ac = [a.x - c.x, a.y - c.y]
240
- sac = [a.x * a.x - c.x * c.x, a.y * a.y - c.y * c.y]
241
- sba = [b.x * b.x - a.x * a.x, b.y * b.y - a.y * a.y]
419
+ ab = [a[0] - b[0], a[1] - b[1]]
420
+ ac = [a[0] - c[0], a[1] - c[1]]
421
+ sac = [a[0] * a[0] - c[0] * c[0], a[1] * a[1] - c[1] * c[1]]
422
+ sba = [b[0] * b[0] - a[0] * a[0], b[1] * b[1] - a[1] * a[1]]
242
423
  yy = (sac[0] * ab[0] + sac[1] * ab[0] + sba[0] * ac[0] + sba[1] * ac[0]) / \
243
- (2 * ((c.y - a.y) * ab[0] - (b.y - a.y) * ac[0]))
424
+ (2 * ((c[1] - a[1]) * ab[0] - (b[1] - a[1]) * ac[0]))
244
425
  xx = (sac[0] * ab[1] + sac[1] * ab[1] + sba[0] * ac[1] + sba[1] * ac[1]) / \
245
- (2 * ((c.x - a.x) * ab[1] - (b.x - a.x) * ac[1]))
426
+ (2 * ((c[0] - a[0]) * ab[1] - (b[0] - a[0]) * ac[1]))
246
427
  return [-xx, -yy]
247
428
 
429
+ @staticmethod
430
+ def corrected_arc_center(C, pnt1, pnt2):
431
+ """
432
+ Computes the center coordinates of an arc from two points and a guessed center
433
+ :param C: list of x and y coordinates of guessed center
434
+ :param pnt1: list of x and y coordinates of first arc point
435
+ :param pnt2: list of x and y coordinates of second arc point
436
+ """
437
+ if pnt1[1] < 0:
438
+ pnt_tmp = pnt1.copy()
439
+ pnt1 = pnt2.copy()
440
+ pnt2 = pnt_tmp
441
+ radius = (np.sqrt(np.square(pnt1[0] - C[0]) + np.square(pnt1[1] - C[1])) +
442
+ np.sqrt(np.square(pnt2[0] - C[0]) + np.square(pnt2[1] - C[1]))) / 2
443
+ d = [0.5 * abs((pnt2[0] - pnt1[0])), 0.5 * abs((pnt1[1] - pnt2[1]))]
444
+ aa = np.sqrt(np.square(d[0]) + np.square(d[1]))
445
+ bb = np.sqrt(np.square(radius) - np.square(aa))
446
+ M = [pnt1[0] + d[0]]
447
+ if pnt2[1] < pnt1[1]:
448
+ M.append(pnt2[1] + d[1])
449
+ sign = [-1, -1] if pnt2[1] >= 0. else [1, 1]
450
+ else:
451
+ M.append(pnt1[1] + d[1])
452
+ sign = [1, -1] if pnt2[1] >= 0. else [-1, 1]
453
+ return [M[0] + sign[0] * bb * d[1] / aa, M[1] + sign[1] * bb * d[0] / aa]
454
+
455
+ @staticmethod
456
+ def arc_angle_between_point_and_abscissa(p, c):
457
+ """
458
+ Returns the angle of an arc with center c and endpoints at (cx + radius, cy) and (px, py)
459
+ :param p: list of x and y coordinates of a point
460
+ :param c: list of x and y coordinates of the arc center
461
+ """
462
+ theta = np.arctan2(p[1] - c[1], p[0] - c[0])
463
+ return theta + (2 * np.pi if theta < 0 else 0)
464
+
465
+ @staticmethod
466
+ def intersection_between_two_lines(line1, line2):
467
+ """
468
+ Finds the intersection point between two lines
469
+ :param line1: list of A, B, C (A*x + B*y + C = 0)
470
+ :param line2: list of A, B, C (A*x + B*y + C = 0)
471
+ """
472
+ if line1[1] == 0.0:
473
+ x = - line1[2] / line1[0]
474
+ y = - (line2[0] * x + line2[2]) / line2[1]
475
+ elif line2[1] == 0.0:
476
+ x = - line2[2] / line2[0]
477
+ y = - (line1[0] * x + line1[2]) / line1[1]
478
+ else:
479
+ a = - line1[0] / line1[1]
480
+ c = - line1[2] / line1[1]
481
+ b = - line2[0] / line2[1]
482
+ d = - line2[2] / line2[1]
483
+ x = (d - c) / (a - b)
484
+ y = a * x + c
485
+ return [x, y]
486
+
487
+ @staticmethod
488
+ def intersection_between_circle_and_line(line, circle, get_only_closest: bool = False):
489
+ """
490
+ Finds the intersection point/s between a circle and a line
491
+ :param line: list of A, B, C (A*x + B*y + C = 0)
492
+ :param circle: list of lists (x and y coordinates of the center, and point)
493
+ :param get_only_closest: boolean to return only closest intersection point to the circle point
494
+ """
495
+ vertical = line[1] == 0
496
+ c, d = circle
497
+ r = GeometricFunctions.points_distance(c, d)
498
+ intersect = []
499
+ if vertical:
500
+ m = - line[2] / line[0]
501
+ delta = r ** 2 + 2 * m * c[0] - m ** 2 - c[0] ** 2
502
+ else:
503
+ m, b = - line[0] / line[1], - line[2] / line[1]
504
+ A = m ** 2 + 1
505
+ B = 2 * (m * b - c[0] - m * c[1])
506
+ C = b ** 2 - r ** 2 + c[0] ** 2 + c[1] ** 2 - 2 * c[1] * b
507
+ delta = B ** 2 - 4 * A * C
508
+
509
+ if delta < 0: # no intersection with the circle
510
+ return None
511
+ elif delta == 0: # tangent to the circle
512
+ x0 = m if vertical else - B / 2 / A
513
+ y0 = c[1] if vertical else m * x0 + b
514
+ intersect.append([x0, y0])
515
+ else: # two intersections with the circle
516
+ x1 = m if vertical else (- B + np.sqrt(delta)) / 2 / A
517
+ y1 = np.sqrt(delta) + c[1] if vertical else m * x1 + b
518
+ x2 = m if vertical else (- B - np.sqrt(delta)) / 2 / A
519
+ y2 = - np.sqrt(delta) + c[1] if vertical else m * x2 + b
520
+ intersect.append([x1, y1])
521
+ intersect.append([x2, y2])
522
+ if get_only_closest:
523
+ distance1 = GeometricFunctions.points_distance(d, intersect[0])
524
+ distance2 = GeometricFunctions.points_distance(d, intersect[1])
525
+ if distance1 > distance2:
526
+ intersect.pop(0)
527
+ else:
528
+ intersect.pop(1)
529
+ return intersect
530
+
531
+ @staticmethod
532
+ def intersection_between_arc_and_line(line, arc):
533
+ """
534
+ Finds the intersection point/s between an arc and a line
535
+ :param line: list of A, B, C (A*x + B*y + C = 0)
536
+ :param arc: list of lists (x and y coordinates of the center, high-angle endpoint, and low-angle endpoint)
537
+ """
538
+ vertical = line[1] == 0
539
+ c, d, e = arc
540
+ r = GeometricFunctions.points_distance(c, d)
541
+ angle_d = GeometricFunctions.arc_angle_between_point_and_abscissa(d, c)
542
+ angle_e = GeometricFunctions.arc_angle_between_point_and_abscissa(e, c)
543
+ intersect = []
544
+ if vertical:
545
+ m = - line[2] / line[0]
546
+ delta = r ** 2 + 2 * m * c[0] - m ** 2 - c[0] ** 2
547
+ else:
548
+ m, b = - line[0] / line[1], - line[2] / line[1]
549
+ A = m ** 2 + 1
550
+ B = 2 * (m * b - c[0] - m * c[1])
551
+ C = b ** 2 - r ** 2 + c[0] ** 2 + c[1] ** 2 - 2 * c[1] * b
552
+ delta = B ** 2 - 4 * A * C
553
+
554
+ if delta < 0: # no intersection with the circle
555
+ return None
556
+ elif delta == 0: # tangent to the circle
557
+ x0 = m if vertical else - B / 2 / A
558
+ y0 = c[1] if vertical else m * x0 + b
559
+ angle0 = GeometricFunctions.arc_angle_between_point_and_abscissa([x0, y0], c)
560
+ intersect0 = True if angle_e < angle0 < angle_d else False
561
+ if intersect0:
562
+ intersect.append([x0, y0])
563
+ else: # no intersection with the arc
564
+ return None
565
+ else: # two intersections with the circle
566
+ x1 = m if vertical else (- B + np.sqrt(delta)) / 2 / A
567
+ y1 = np.sqrt(delta) + c[1] if vertical else m * x1 + b
568
+ angle1 = GeometricFunctions.arc_angle_between_point_and_abscissa([x1, y1], c)
569
+ intersect1 = True if (angle_e < angle1 < angle_d) or abs(angle1 - angle_e) < 1e-6 or abs(angle1 - angle_d) < 1e-6 else False
570
+ x2 = m if vertical else (- B - np.sqrt(delta)) / 2 / A
571
+ y2 = - np.sqrt(delta) + c[1] if vertical else m * x2 + b
572
+ angle2 = GeometricFunctions.arc_angle_between_point_and_abscissa([x2, y2], c)
573
+ intersect2 = True if (angle_e < angle2 < angle_d) or abs(angle2 - angle_e) < 1e-6 or abs(angle2 - angle_d) < 1e-6 else False
574
+ if not intersect1 and not intersect2: # no intersection with the arc
575
+ return None
576
+ if intersect1: # first point intersecting the arc
577
+ intersect.append([x1, y1])
578
+ if intersect2: # second point intersecting the arc
579
+ intersect.append([x2, y2])
580
+
581
+ return intersect
582
+
248
583
 
249
584
  class GmshUtils:
250
585
 
251
- def __init__(self, model_name=None, verbose=True):
586
+ def __init__(self, model_name='dummy_name', verbose=True):
252
587
  self.model_name = model_name
253
588
  self.verbose = verbose
254
589
 
255
- @staticmethod
256
- def initialize():
590
+ def initialize(self, verbosity_Gmsh: int = 5):
591
+ """
592
+ Initialize Gmsh with options for FiQuS
593
+ :param verbosity_Gmsh: Input file run.verbosity_Gmsh
594
+ :type verbosity_Gmsh: int
595
+ """
257
596
  if not gmsh.is_initialized():
258
597
  gmsh.initialize(sys.argv)
598
+ gmsh.model.add(str(self.model_name))
259
599
  num_threads = multiprocessing.cpu_count()
260
600
  gmsh.option.setNumber('General.NumThreads', num_threads) # enable multithreading (this seems to be only for meshing)
261
601
  gmsh.option.setNumber('Mesh.MaxNumThreads1D', num_threads)
262
602
  gmsh.option.setNumber('Mesh.MaxNumThreads2D', num_threads)
263
603
  gmsh.option.setNumber('Mesh.MaxNumThreads3D', num_threads)
264
-
265
- gmsh.option.setNumber('Geometry.ToleranceBoolean', 0.0000001)
266
- gmsh.option.setNumber('General.Terminal', 1)
267
- # gmsh.model.add(self.model_name)
604
+ gmsh.option.setNumber('Geometry.OCCParallel', 1)
605
+ gmsh.option.setNumber('Geometry.ToleranceBoolean', 0.000001)
606
+ gmsh.option.setString('Geometry.OCCTargetUnit', 'M')
607
+ gmsh.option.setNumber("General.Verbosity", verbosity_Gmsh)
608
+ if self.verbose:
609
+ gmsh.option.setNumber('General.Terminal', 1)
610
+ else:
611
+ gmsh.option.setNumber('General.Terminal', 0)
268
612
 
269
613
  def check_for_event(self): # pragma: no cover
270
614
  action = gmsh.onelab.getString("ONELAB/Action")
@@ -301,37 +645,17 @@ class RoxieParsers:
301
645
  self.xyCorner = xyCorner
302
646
 
303
647
  @staticmethod
304
- def parseMap2d(map2dFile: Path, headerLines: int = 1):
648
+ def parseMap2d(map2dFile: Path, physical_quantity: str = 'magnetic_flux_density'):
305
649
  """
306
- Generates array-stream of values of map2dFile
650
+ Generates pandas data frame with map2d content
307
651
  :param map2dFile: path of map2dFile containing the content to parse
308
- :param headerLines: which index the header line is at - will start to read after that
652
+ :param physical_quantity: magnetic_flux_density or temperature
309
653
  """
310
- # Open map2dfile
311
- fileContent = open(map2dFile, "r").read()
312
- # Split content of file in rows
313
- fileContentByRow = fileContent.split("\n")
314
- # Create array-matrix to fill in with the values of the file
315
- output_matrix = np.array([[None for x in range(10)] for y in range(headerLines + 1, len(fileContentByRow) - 1)],
316
- dtype=float)
317
-
318
- # Assign values to the matrix row by row
319
- for index, rowContent in enumerate(fileContentByRow):
320
- if index > headerLines and rowContent: # without header
321
- row = rowContent.split()
322
- output_array = np.array([]) # create temp. array
323
- output_array = np.append(output_array, int(row[0])) # strands to groups
324
- output_array = np.append(output_array, int(row[1])) # strands to halfturn
325
- output_array = np.append(output_array, float(row[2])) # idx
326
- output_array = np.append(output_array, float(row[3]) / 1e3) # x_strands in [m]
327
- output_array = np.append(output_array, float(row[4]) / 1e3) # y_strands in [m]
328
- output_array = np.append(output_array, float(row[5])) # Bx
329
- output_array = np.append(output_array, float(row[6])) # By
330
- output_array = np.append(output_array, float(row[7]) / 1e6) # Area in [m^2]
331
- output_array = np.append(output_array, float(row[8])) # I_strands
332
- output_array = np.append(output_array, float(row[9])) # fill factor
333
- output_matrix[index - headerLines - 1] = output_array # assign into matrix
334
- return output_matrix
654
+ physical_quantities_abbreviations = {'magnetic_flux_density': ('BX/T', 'BY/T'), 'temperature': ('T/K', '-')}
655
+ columns = ['BL.', 'COND.', 'NO.', 'X-POS/MM', 'Y-POS/MM'] + \
656
+ [abbr for abbr in physical_quantities_abbreviations[physical_quantity]] + \
657
+ ['AREA/MM**2', 'CURRENT', 'FILL FAC.']
658
+ return pd.read_csv(map2dFile, sep=r"\s{2,}|(?<=2) |(?<=T) ", engine='python', usecols=columns)
335
659
 
336
660
  @staticmethod
337
661
  def parseCond2d(cond2dFile: Path):
@@ -384,18 +708,31 @@ class RoxieParsers:
384
708
 
385
709
  return conductorPositionsList
386
710
 
387
- def initialize_logger(verbose: bool = True, work_folder: str = None, time_stamp: str = None):
711
+
712
+ def initialize_logger(work_folder: str = None, time_stamp: str = None, verbose: bool = True, ):
713
+ """
714
+ THis is logger function to write FiQuS log files.
715
+
716
+ :param work_folder: Folder where the log file is written to
717
+ :type work_folder: str
718
+ :param time_stamp: time stamp put in the log file name
719
+ :type time_stamp: str
720
+ :param verbose: if true INFO level logs are printed, if false only WARNING level logs are printed to the console
721
+ :type verbose: bool
722
+ :return: logger object
723
+ :rtype: object
724
+ """
725
+
388
726
  logger = logging.getLogger()
389
727
 
728
+ while logger.hasHandlers():
729
+ logger.removeHandler(logger.handlers[0])
730
+
390
731
  if verbose:
391
732
  logger.setLevel(logging.INFO)
392
733
  else:
393
734
  logger.setLevel(logging.WARNING)
394
735
 
395
- for handler in logger.handlers:
396
- logger.handlers.remove(handler)
397
- handler.close()
398
-
399
736
  stdout_handler = logging.StreamHandler(sys.stdout)
400
737
  stdout_handler.setLevel(logging.INFO)
401
738
  stdout_handler.setFormatter(LoggingFormatter())
@@ -415,3 +752,64 @@ def initialize_logger(verbose: bool = True, work_folder: str = None, time_stamp:
415
752
  logger.addHandler(errorsAndWarnings_file_handler)
416
753
 
417
754
  return logger
755
+
756
+
757
+ def create_json_schema(data_model: FDM):
758
+ """
759
+ Create the JSON Schema from a Pydantic data model
760
+ :param data_model: FDM
761
+ :type data_model: FDM
762
+ """
763
+
764
+ # Generate the raw JSON schema from the Pydantic model
765
+ json_schema_dict = data_model.model_json_schema()
766
+
767
+ # Replace anyOf with oneOf for better compatibility
768
+ json_schema_str = json.dumps(json_schema_dict)
769
+ json_schema_str = json_schema_str.replace("anyOf", "oneOf")
770
+
771
+ # Pretty-print the schema with proper indentation
772
+ pretty_json_schema = json.dumps(json.loads(json_schema_str), indent=4, ensure_ascii=False)
773
+
774
+ # Define the output folder for the schema
775
+ docs_folder = os.path.join(
776
+ os.path.dirname(os.path.dirname(os.path.dirname(__file__))), "docs"
777
+ )
778
+
779
+ # Create the _inputs folder for the JSON schema
780
+ json_schema_file_path = os.path.join(docs_folder, "schema.json")
781
+ os.makedirs(os.path.dirname(json_schema_file_path), exist_ok=True)
782
+
783
+ # Write the prettified JSON schema to a file
784
+ with open(json_schema_file_path, "w", encoding="utf-8") as file:
785
+ file.write(pretty_json_schema)
786
+
787
+ def get_data_settings(GetDP_path=None, settings=None):
788
+ user_name = getpass.getuser()
789
+
790
+ if user_name == 'root':
791
+ user_name = 'SYSTEM'
792
+ elif user_name == 'MP-WIN-02$':
793
+ user_name = 'MP_WIN_02'
794
+ if not settings:
795
+ path_to_settings_file = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), "tests", f"settings.{user_name}.yaml")
796
+ try:
797
+ settings = FilesAndFolders.read_data_from_yaml(path_to_settings_file, DataSettings)
798
+ except:
799
+ with open(settings.error.log, 'a') as file:
800
+ # Append the string to the file
801
+ file.write(f'Could not find: {path_to_settings_file}' + '\n')
802
+ raise ValueError(f'File: {path_to_settings_file} does not exist.')
803
+
804
+ if platform.system() == 'Windows':
805
+ if GetDP_path:
806
+ settings.GetDP_path = GetDP_path
807
+ elif platform.system() == 'Linux':
808
+ settings.GetDP_path = 'getdp'
809
+ elif platform.system() == 'Darwin':
810
+ if GetDP_path:
811
+ settings.GetDP_path = GetDP_path
812
+ else:
813
+ raise ValueError(f'Python claims that you are running on operating system: {platform.system()} and it is not supported by FiQuS!')
814
+
815
+ return settings