fiqus 2024.7.0__py3-none-any.whl → 2024.12.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. fiqus/MainFiQuS.py +290 -134
  2. fiqus/data/DataConductor.py +301 -301
  3. fiqus/data/DataFiQuS.py +128 -84
  4. fiqus/data/DataFiQuSCCT.py +150 -150
  5. fiqus/data/DataFiQuSConductor.py +84 -84
  6. fiqus/data/DataFiQuSConductorAC_Strand.py +565 -565
  7. fiqus/data/DataFiQuSMultipole.py +716 -42
  8. fiqus/data/DataFiQuSPancake3D.py +737 -278
  9. fiqus/data/DataMultipole.py +180 -15
  10. fiqus/data/DataRoxieParser.py +90 -51
  11. fiqus/data/DataSettings.py +121 -0
  12. fiqus/data/DataWindingsCCT.py +37 -37
  13. fiqus/data/RegionsModelFiQuS.py +18 -6
  14. fiqus/geom_generators/GeometryCCT.py +905 -905
  15. fiqus/geom_generators/GeometryConductorAC_Strand.py +1391 -1391
  16. fiqus/geom_generators/GeometryMultipole.py +1827 -227
  17. fiqus/geom_generators/GeometryPancake3D.py +316 -117
  18. fiqus/geom_generators/GeometryPancake3DUtils.py +549 -0
  19. fiqus/getdp_runners/RunGetdpCCT.py +4 -4
  20. fiqus/getdp_runners/RunGetdpConductorAC_Strand.py +201 -201
  21. fiqus/getdp_runners/RunGetdpMultipole.py +115 -42
  22. fiqus/getdp_runners/RunGetdpPancake3D.py +28 -6
  23. fiqus/mains/MainCCT.py +2 -2
  24. fiqus/mains/MainConductorAC_Strand.py +132 -132
  25. fiqus/mains/MainMultipole.py +113 -62
  26. fiqus/mains/MainPancake3D.py +63 -23
  27. fiqus/mesh_generators/MeshCCT.py +209 -209
  28. fiqus/mesh_generators/MeshConductorAC_Strand.py +656 -656
  29. fiqus/mesh_generators/MeshMultipole.py +1243 -181
  30. fiqus/mesh_generators/MeshPancake3D.py +275 -192
  31. fiqus/parsers/ParserCOND.py +825 -0
  32. fiqus/parsers/ParserDAT.py +16 -16
  33. fiqus/parsers/ParserGetDPOnSection.py +212 -212
  34. fiqus/parsers/ParserGetDPTimeTable.py +134 -134
  35. fiqus/parsers/ParserMSH.py +53 -53
  36. fiqus/parsers/ParserPOS.py +214 -214
  37. fiqus/parsers/ParserRES.py +142 -142
  38. fiqus/plotters/PlotPythonCCT.py +133 -133
  39. fiqus/plotters/PlotPythonConductorAC.py +855 -855
  40. fiqus/plotters/PlotPythonMultipole.py +18 -18
  41. fiqus/post_processors/PostProcessCCT.py +440 -440
  42. fiqus/post_processors/PostProcessConductorAC.py +49 -49
  43. fiqus/post_processors/PostProcessMultipole.py +353 -229
  44. fiqus/post_processors/PostProcessPancake3D.py +8 -13
  45. fiqus/pre_processors/PreProcessCCT.py +175 -175
  46. fiqus/pro_assemblers/ProAssembler.py +14 -6
  47. fiqus/pro_material_functions/ironBHcurves.pro +246 -246
  48. fiqus/pro_templates/combined/CCT_template.pro +274 -274
  49. fiqus/pro_templates/combined/ConductorAC_template.pro +1025 -1025
  50. fiqus/pro_templates/combined/Multipole_template.pro +1694 -126
  51. fiqus/pro_templates/combined/Pancake3D_template.pro +2294 -1103
  52. fiqus/pro_templates/combined/TSA_materials.pro +162 -0
  53. fiqus/pro_templates/combined/materials.pro +36 -18
  54. fiqus/utils/Utils.py +508 -110
  55. fiqus/utils/update_data_settings.py +33 -0
  56. fiqus-2024.12.1.dist-info/METADATA +132 -0
  57. fiqus-2024.12.1.dist-info/RECORD +84 -0
  58. {fiqus-2024.7.0.dist-info → fiqus-2024.12.1.dist-info}/WHEEL +1 -1
  59. tests/test_FiQuS.py +1 -1
  60. tests/test_geometry_generators.py +101 -2
  61. tests/test_mesh_generators.py +154 -1
  62. tests/test_solvers.py +115 -21
  63. tests/utils/fiqus_test_classes.py +85 -21
  64. tests/utils/generate_reference_files_ConductorAC.py +57 -57
  65. tests/utils/generate_reference_files_Pancake3D.py +4 -5
  66. tests/utils/helpers.py +97 -97
  67. fiqus-2024.7.0.dist-info/METADATA +0 -103
  68. fiqus-2024.7.0.dist-info/RECORD +0 -79
  69. {fiqus-2024.7.0.dist-info → fiqus-2024.12.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,825 @@
1
+ import re
2
+ import copy
3
+
4
+ import numpy as np
5
+ import json
6
+ from operator import itemgetter
7
+
8
+ class ParserCOND:
9
+ """
10
+ Class for operations on Opera compatible conductor files
11
+ """
12
+ def __init__(self, verbose=True):
13
+ self.verbose = verbose
14
+ self.br8 = [['SHAPE'], # 'DEFINE' is keyword is ignored only the shape is propagated
15
+ ['XCENTRE', 'YCENTRE', 'ZCENTRE', 'PHI1', 'THETA1', 'PSI1'],
16
+ ['XCEN2', 'YCEN2', 'ZCEN2'],
17
+ ['THETA2', 'PHI2', 'PSI2'],
18
+ ['XP1', 'YP1', 'ZP1'],
19
+ ['XP2', 'YP2', 'ZP2'],
20
+ ['XP3', 'YP3', 'ZP3'],
21
+ ['XP4', 'YP4', 'ZP4'],
22
+ ['XP5', 'YP5', 'ZP5'],
23
+ ['XP6', 'YP6', 'ZP6'],
24
+ ['XP7', 'YP7', 'ZP7'],
25
+ ['XP8', 'YP8', 'ZP8'],
26
+ ['CURD', 'SYMMETRY'], # 'DRIVELABEL' is done separately due a possibility of a space in the string
27
+ ['IRXY', 'IRYZ', 'IRZX'],
28
+ ['TOLERANCE']]
29
+ self.br8_def_txt = 'DEFINE BR8'
30
+ self.drive_count = 0
31
+ self.vertices_to_surf = [[1, 5, 8, 4], [2, 6, 7, 3], [2, 6, 5, 1], [3, 7, 8, 4], [1, 2, 3, 4], [5, 6, 7, 8]]
32
+ self.vertices_to_lines = [[[1, 2], [5, 6], [8, 7], [4, 3]], [[1, 2], [5, 6], [8, 7], [4, 3]], [[1, 4], [2, 3], [6, 7], [5, 8]], [[1, 4], [2, 3], [6, 7], [5, 8]], [[1, 5], [2, 6], [3, 7], [4, 8]], [[1, 5], [2, 6], [3, 7], [4, 8]]] # only z direction supported for now
33
+ self._sur_names = ['x-', 'x+', 'y-', 'y+', 'z-', 'z+']
34
+ self._op_sign = {'+': '-', '-': '+'}
35
+ self._sign = {'+': 1, '-': -1}
36
+
37
+ @staticmethod
38
+ def scale_bricks(cond_dict, factor):
39
+ """
40
+ Scales the conductor file model by a factor
41
+ :param cond_dict: conductor dictionary
42
+ :param factor: factor, e.g. 0.001 to change conductor file from mm to m
43
+ :return: conductor dictionary
44
+ """
45
+ append = False
46
+ for idx, _ in enumerate(list(cond_dict.keys())):
47
+ P1, P2, P3, P4, P5, P6, P7, P8 = ParserCOND.get_points_cond_dict(cond_dict, idx)
48
+ arrays = [P1, P2, P3, P4, P5, P6, P7, P8]
49
+ for i in range(len(arrays)):
50
+ arrays[i] *= factor
51
+ P1, P2, P3, P4, P5, P6, P7, P8 = arrays
52
+ cond_dict = ParserCOND.set_points_cond_dict(cond_dict, idx, append, P1, P2, P3, P4, P5, P6, P7, P8)
53
+ return cond_dict
54
+
55
+ @staticmethod
56
+ def merge_if_straight(bricks_dict, direction='z'):
57
+ """
58
+ Merges multiple bricks if they are on the straight line, i.e. if after merge the shape does not change
59
+ :param bricks_dict: dictionary of bricks
60
+ :type bricks_dict: dict
61
+ :param direction: direction to look for, eg 'z'
62
+ :type direction: str
63
+ :return: dictionary of bricks
64
+ :rtype: dict
65
+ """
66
+
67
+ coord_pos = {'x': 0, 'y': 1, 'z': 2}
68
+
69
+ def merge_and_delete(bricks_dict, brick_i_from, brick_i_to):
70
+ brick_from = bricks_dict[brick_i_from]
71
+ brick_to = bricks_dict[brick_i_to]
72
+ for p_num in range(4, 8):
73
+ for cord in ['XP', 'YP', 'ZP']:
74
+ brick_from[f'{cord}{p_num + 1}'] = brick_to[f'{cord}{p_num + 1}']
75
+ for brick_i in range(brick_i_from + 1, brick_i_to + 1):
76
+ del bricks_dict[brick_i]
77
+ return bricks_dict
78
+
79
+
80
+ #num_bricks = len(bricks_dict.keys())
81
+ brick_i_list = copy.deepcopy(list(bricks_dict.keys()))
82
+ print(f'Started with {len(brick_i_list)}')
83
+ brick_i_from = -1
84
+ brick_i_to = -1
85
+ for brick_i in brick_i_list:
86
+ P1, P2, P3, P4, P5, P6, P7, P8 = ParserCOND.get_points_cond_dict(bricks_dict, brick_i, bynumber=True)
87
+ if direction == 'z':
88
+ if (P1[coord_pos['z']] == P2[coord_pos['z']] == P3[coord_pos['z']] == P4[coord_pos['z']]) and (P5[coord_pos['z']] == P6[coord_pos['z']] == P7[coord_pos['z']] == P8[coord_pos['z']]):
89
+ straight = True
90
+ else:
91
+ straight = False
92
+ else:
93
+ raise ValueError(f"Direction {direction} is not yet implemented!")
94
+
95
+ if straight and brick_i_from <= 0:
96
+ brick_i_from = brick_i
97
+ if (not straight and brick_i_from > 0) or (straight and brick_i_from > 0 and brick_i == max(brick_i_list)):
98
+ brick_i_to = brick_i-1
99
+
100
+ if brick_i_from > 0 and brick_i_to > 0:
101
+ bricks_dict = merge_and_delete(bricks_dict, brick_i_from, brick_i_to)
102
+ brick_i_from = -1
103
+ brick_i_to = -1
104
+ combined_bricks_dict = {}
105
+ for brick_new_i, brick in enumerate(bricks_dict.values()):
106
+ combined_bricks_dict[brick_new_i] = brick
107
+ print(f'Ended with {len(list(combined_bricks_dict.keys()))}')
108
+ return combined_bricks_dict
109
+
110
+ def extend_terminals(self, cond_dict, extend_list):
111
+ """
112
+ Extends terminals by creating additional bricks starting at index brick start towards direction, until a position in m and uses number of bricks to get there.
113
+ :param cond_dict: conductor dictionary
114
+ :type cond_dict: dict
115
+ :param extend_list: [index, direction, until position, number bricks] for example [0, 'z-', -1.25, 8] or [-1, 'z+', 0.25, 8]
116
+ :type extend_list: list
117
+ :return: conductor dictionary
118
+ :rtype: dict
119
+ """
120
+ additional_bricks = []
121
+ for extend in extend_list:
122
+ index = extend[0]
123
+ plane = extend[1]
124
+ coord_ext_to = extend[2]
125
+ n_bricks = extend[3]
126
+ index_brick = list(cond_dict.keys())[index]
127
+ brick = cond_dict[index_brick]
128
+ points = self.vertices_to_surf[self._sur_names.index(plane)]
129
+ op_points = self.vertices_to_surf[self._sur_names.index(plane[0] + self._op_sign[plane[1]])]
130
+ coord = 'Z' #str.upper(plane[0])
131
+ coord_0 = float(brick[f'{coord}P{points[0]}'])
132
+ for point in points[1:]:
133
+ coord_i = float(brick[f'{coord}P{point}'])
134
+ if coord_i - coord_0 > 1e-6:
135
+ raise ValueError(f"This method only works on planes parallel to extension direction. Use straighten bricks method of this class first")
136
+ #coord_dist = coord_ext_to-abs(coord_0)
137
+ dist = coord_ext_to - coord_0
138
+ if dist > 0:
139
+ sign = -1
140
+ else:
141
+ sign = 1
142
+ coord_dist = abs(coord_ext_to - coord_0)
143
+ bricks_new = {}
144
+ for n_b in range(n_bricks):
145
+ new_brick = copy.deepcopy(brick)
146
+ for p, op in zip(points, op_points):
147
+ for coord in ['X', 'Y', 'Z']:
148
+ new_brick[f'{coord}P{op}'] = brick[f'{coord}P{p}']
149
+ new_brick[f'{coord}P{p}'] = brick[f'{coord}P{p}']
150
+ new_brick[f'{coord}P{p}'] = str(float(brick[f'{coord}P{p}']) - sign * coord_dist / n_bricks)
151
+ brick = copy.deepcopy(new_brick)
152
+ bricks_new[n_b] = new_brick
153
+ additional_bricks.append(bricks_new)
154
+ idx = 0
155
+ out_cond_dict = {}
156
+ # start
157
+ keys_list = list(additional_bricks[0].keys())
158
+ keys_list.reverse()
159
+ for key in keys_list:
160
+ out_cond_dict[idx] = additional_bricks[0][key]
161
+ idx += 1
162
+ for brick in cond_dict.values():
163
+ out_cond_dict[idx] = brick
164
+ idx += 1
165
+ for brick in additional_bricks[1].values():
166
+ out_cond_dict[idx] = brick
167
+ idx += 1
168
+ return out_cond_dict
169
+
170
+ def add_short_bricks_for_connections(self, cond_dict, connect_list):
171
+ """
172
+ Extends terminals by creating additional bricks starting at index brick start towards direction, until a position in m and uses number of bricks to get there.
173
+ :param cond_dict: conductor dictionary
174
+ :type cond_dict: dict
175
+ :param connect_list: [index, direction, by brick dim along, number bricks] for example [0, 'z-', 'y', 8] or [-1, 'z+', 'x', 8]
176
+ :type connect_list: list
177
+ :return: conductor dictionary
178
+ :rtype: dict
179
+ """
180
+ additional_bricks = []
181
+ for connect in connect_list:
182
+ index = connect[0]
183
+ plane = connect[1]
184
+
185
+ index_brick = list(cond_dict.keys())[index]
186
+ brick = cond_dict[index_brick]
187
+ points = self.vertices_to_surf[self._sur_names.index(plane)]
188
+ op_points = self.vertices_to_surf[self._sur_names.index(plane[0] + self._op_sign[plane[1]])]
189
+ coord = str.upper(plane[0])
190
+ coord_0 = float(brick[f'{coord}P{points[0]}'])
191
+ for point in points[1:]:
192
+ coord_i = float(brick[f'{coord}P{point}'])
193
+ if coord_i - coord_0 > 1e-6:
194
+ raise ValueError(f"This method only works on planes parallel to extension direction. Use straighten bricks method of this class first")
195
+ along_coord = connect[2]
196
+
197
+ points_along = self.vertices_to_surf[self._sur_names.index(f'{along_coord}+')]
198
+ op_points_along = self.vertices_to_surf[self._sur_names.index(f'{along_coord}-')]
199
+ point_a = []
200
+ point_op = []
201
+ for coord in ['X', 'Y', 'Z']:
202
+ avg = []
203
+ for p_along in points_along:
204
+ avg.append(float(brick[f'{coord}P{p_along}']))
205
+ point_a.append(np.mean(avg))
206
+ avg = []
207
+ for op_p_along in op_points_along:
208
+ avg.append(float(brick[f'{coord}P{op_p_along}']))
209
+ point_op.append(np.mean(avg))
210
+
211
+ coord_dist = np.sqrt((point_op[0] - point_a[0]) ** 2 + (point_op[1] - point_a[1]) ** 2 + (point_op[2] - point_a[2]) ** 2)
212
+ bricks_new = {}
213
+ n_bricks = 1
214
+ for n_b in range(n_bricks):
215
+ new_brick = copy.deepcopy(brick)
216
+ for p, op_p in zip(points, op_points):
217
+ new_brick[f'{coord}P{op_p}'] = str(new_brick[f'{coord}P{p}'])
218
+ for p in points:
219
+ new_brick[f'{coord}P{p}'] = str(float(new_brick[f'{coord}P{p}']) + self._sign[plane[1]] * coord_dist / n_bricks)
220
+ brick = copy.deepcopy(new_brick)
221
+ bricks_new[n_b] = new_brick
222
+ additional_bricks.append(bricks_new)
223
+ idx = 0
224
+ out_cond_dict = {}
225
+ # start
226
+ keys_list = list(additional_bricks[0].keys())
227
+ keys_list.reverse()
228
+ for key in keys_list:
229
+ out_cond_dict[idx] = additional_bricks[0][key]
230
+ idx += 1
231
+ for brick in cond_dict.values():
232
+ out_cond_dict[idx] = brick
233
+ idx += 1
234
+ for brick in additional_bricks[1].values():
235
+ out_cond_dict[idx] = brick
236
+ idx += 1
237
+ return out_cond_dict
238
+
239
+ def add_short_bricks_by_distance(self, cond_dict, short_brick_list):
240
+ """
241
+ Extends terminals by creating additional bricks starting at index brick start towards direction, until a position in m and uses number of bricks to get there.
242
+ :param cond_dict: conductor dictionary
243
+ :type cond_dict: dict
244
+ :param connect_list: [index, direction, distance, number bricks] for example [0, 'z-', 0.001, 8] or [-1, 'z+', 0.001, 8]
245
+ :type connect_list: list
246
+ :return: conductor dictionary
247
+ :rtype: dict
248
+ """
249
+ append = True
250
+ for end in short_brick_list:
251
+ idx, direction, distance = end
252
+ P1, P2, P3, P4, P5, P6, P7, P8 = ParserCOND()._extend_brick_size(cond_dict, append, hexa_idx=idx, extension_distance=distance, extension_direction=direction)
253
+ cond_dict = ParserCOND.set_points_cond_dict(cond_dict, idx, append, P1, P2, P3, P4, P5, P6, P7, P8)
254
+ dict_out = {}
255
+ for (idx, _), key in zip(enumerate(list(cond_dict.keys())), cond_dict.keys()):
256
+ dict_out[idx] = cond_dict[key]
257
+ return dict_out
258
+
259
+ @staticmethod
260
+ def resample_bricks(bricks_dict, f=1):
261
+ """
262
+ Combined number of bricks f into single brick
263
+ :param bricks_dict: dictionary of bricks
264
+ :type bricks_dict: dict
265
+ :param f: how many bricks are combined
266
+ :type f: int
267
+ :return: dictionary of bricks
268
+ :rtype: dict
269
+ """
270
+ num_bricks = len(bricks_dict.keys())
271
+ for brick_ii in range(num_bricks):
272
+ if brick_ii % f == 0:
273
+ if brick_ii + f < num_bricks:
274
+ brick_i_from = brick_ii
275
+ brick_i_to = brick_ii + f - 1
276
+ else:
277
+ brick_i_from = brick_ii
278
+ brick_i_to = num_bricks - 1
279
+ brick_from = bricks_dict[brick_i_from]
280
+ brick_to = bricks_dict[brick_i_to]
281
+ for p_num in range(4, 8):
282
+ for cord in ['XP', 'YP', 'ZP']:
283
+ brick_from[f'{cord}{p_num + 1}'] = brick_to[f'{cord}{p_num + 1}']
284
+ for brick_i in range(brick_i_from + 1, brick_i_to + 1):
285
+ del bricks_dict[brick_i]
286
+ elif brick_ii > brick_i:
287
+ if brick_ii < num_bricks - f:
288
+ del bricks_dict[brick_ii]
289
+ combined_bricks_dict = {}
290
+ for brick_new_i, brick in enumerate(bricks_dict.values()):
291
+ combined_bricks_dict[brick_new_i] = brick
292
+ #del combined_bricks_dict[brick_new_i]
293
+ return combined_bricks_dict
294
+
295
+ def get_br8_dict(self):
296
+ """
297
+ Creates conductor dict with some default values that are not yet used in FiQuS
298
+ :return: conductor dictionary
299
+ :rtype: dict
300
+ """
301
+ dict_out = {}
302
+ for keys in self.br8:
303
+ for key in keys:
304
+ dict_out[key] = str(0.0)
305
+ for key, value in zip(['SHAPE', 'SYMMETRY', 'IRXY', 'IRYZ', 'IRZX', 'TOLERANCE'], ['BR8', 1, 0, 0, 0, 1e-6]):
306
+ dict_out[key] = str(value)
307
+ return dict_out
308
+
309
+ def write_cond(self, input_dict, cond_file_path):
310
+ """
311
+ Write conductor dictionary to a conductor file
312
+ :param input_dict: conductor dictionary
313
+ :type input_dict: dict
314
+ :param cond_file_path: full path to the output conductor file
315
+ :type cond_file_path: str
316
+ :return: None, only writes file on disk
317
+ :rtype: None
318
+ """
319
+ if self.verbose:
320
+ print(f'Writing: {cond_file_path}')
321
+ with open(cond_file_path, mode='w') as f:
322
+ f.write('CONDUCTOR' + '\n')
323
+ for _, value in input_dict.items():
324
+ if value['SHAPE'] == 'BR8' or value['SHAPE'] == self.br8_def_txt:
325
+ params_list = self.br8
326
+ value['SHAPE'] = self.br8_def_txt
327
+ else:
328
+ raise ValueError(f"FiQuS ParserCOND can not parse parse {value['SHAPE']} shape, yet!")
329
+ lines = []
330
+ for params in params_list:
331
+ line = ''
332
+ for param in params:
333
+ line += value[param] + ' '
334
+ if param == 'SYMMETRY':
335
+ line += f"'drive {str(self.drive_count)}'"
336
+ lines.append(line.strip() + '\n') # strip space at the end (added 3 lines above) and add end of line to go to the new line
337
+ f.writelines(lines)
338
+ f.write('QUIT')
339
+ self.drive_count += 1
340
+
341
+ def read_cond(self, cond_file_path):
342
+ """
343
+ Reads conductor file and returns it as conductor dict
344
+ :param cond_file_path: full path to the input conductor file
345
+ :type cond_file_path: str
346
+ :return: conductor dictionary
347
+ :rtype: dict
348
+ """
349
+ with open(cond_file_path, mode='r') as f:
350
+ file_contents = f.read()
351
+ #file_contents = re.sub('\n', "#", file_contents) # replace end of lines with #
352
+ file_contents = re.sub("'", '"', file_contents) # replace ' (expected around DRIVELABEL string) with "
353
+ lines = re.split('\n', file_contents) # split on hases
354
+
355
+ if lines.pop(0) != 'CONDUCTOR':
356
+ raise ValueError(f'The file {cond_file_path} is not a valid Opera conductor file!')
357
+ if lines.pop(-1) != 'QUIT':
358
+ raise ValueError(f'The file {cond_file_path} is not a valid Opera conductor file!')
359
+
360
+ if lines[0] == self.br8_def_txt:
361
+ parameters_lists = self.br8
362
+ else:
363
+ raise ValueError(f'FiQuS ParserCOND can not parse parse {lines[0]} shape, yet!')
364
+
365
+ num_lines = len(parameters_lists)
366
+ num_of_shapes, rest = divmod(len(lines), num_lines)
367
+ if rest != 0:
368
+ raise ValueError(f'FiQuS ParserCOND can not parse parse conductor file with mixed shape types, yet!')
369
+
370
+ output_dict = {}
371
+ for block_i in range(num_of_shapes):
372
+ output_dict[block_i] = {}
373
+ blol = list(itemgetter(*range(block_i*num_lines, (block_i+1)*num_lines))(lines)) # blol = block list of lines
374
+ for par_i, params_list_line in enumerate(parameters_lists):
375
+ entry_list = re.split(' ', blol[par_i])
376
+ if par_i == 0:
377
+ output_dict[block_i][params_list_line[0]] = entry_list[1]
378
+ else:
379
+ for par, entry in zip(params_list_line, entry_list):
380
+ output_dict[block_i][par] = entry
381
+ if par_i == 12: # if this is the line with DRIVELABEL definiton that coudl contain a space character so need a different treatment
382
+ output_dict[block_i]['DRIVELABEL'] = blol[par_i][blol[par_i].find('"') + 1:-1] # Get the content of line from the first found '"' character to the end of the file.
383
+ return output_dict
384
+
385
+ @staticmethod
386
+ def get_points_cond_dict(cond_dict, hexa=None, bynumber=False):
387
+ """
388
+ Gets point, defined as numpy array with three coordinates for the 8-noded brick
389
+ :param cond_dict: conductor dictionary
390
+ :type cond_dict: dict
391
+ :param hexa_idx: brick index
392
+ :type hexa_idx: int
393
+ :return: tuple with numpy arrays, each with tree coordinates of points in cartesian
394
+ :rtype: tuple with arrays
395
+ """
396
+ if bynumber:
397
+ hexa_number = hexa
398
+ else: # i.e. by index
399
+ hexa_number = list(cond_dict.keys())[hexa]
400
+ P1 = np.array([float(cond_dict[hexa_number]['XP1']), float(cond_dict[hexa_number]['YP1']), float(cond_dict[hexa_number]['ZP1'])])
401
+ P2 = np.array([float(cond_dict[hexa_number]['XP2']), float(cond_dict[hexa_number]['YP2']), float(cond_dict[hexa_number]['ZP2'])])
402
+ P3 = np.array([float(cond_dict[hexa_number]['XP3']), float(cond_dict[hexa_number]['YP3']), float(cond_dict[hexa_number]['ZP3'])])
403
+ P4 = np.array([float(cond_dict[hexa_number]['XP4']), float(cond_dict[hexa_number]['YP4']), float(cond_dict[hexa_number]['ZP4'])])
404
+ P5 = np.array([float(cond_dict[hexa_number]['XP5']), float(cond_dict[hexa_number]['YP5']), float(cond_dict[hexa_number]['ZP5'])])
405
+ P6 = np.array([float(cond_dict[hexa_number]['XP6']), float(cond_dict[hexa_number]['YP6']), float(cond_dict[hexa_number]['ZP6'])])
406
+ P7 = np.array([float(cond_dict[hexa_number]['XP7']), float(cond_dict[hexa_number]['YP7']), float(cond_dict[hexa_number]['ZP7'])])
407
+ P8 = np.array([float(cond_dict[hexa_number]['XP8']), float(cond_dict[hexa_number]['YP8']), float(cond_dict[hexa_number]['ZP8'])])
408
+ return P1, P2, P3, P4, P5, P6, P7, P8
409
+
410
+ @staticmethod
411
+ def set_points_cond_dict(cond_dict, hexa_idx, append, P1, P2, P3, P4, P5, P6, P7, P8):
412
+ """
413
+ Sets point, defined as numpy array with three coordinates for the 8-noded brick
414
+ :param cond_dict: conductor dictionary
415
+ :type cond_dict: dict
416
+ :param hexa_idx: brick index
417
+ :type hexa_idx: int
418
+ :return: tuple with numpy arrays, each with tree coordinates of points in cartesian
419
+ :rtype: tuple with arrays
420
+ """
421
+ points = [P1, P2, P3, P4, P5, P6, P7, P8]
422
+ point_idx = [1, 2, 3, 4, 5, 6, 7, 8]
423
+ coords = ['XP', 'YP', 'ZP']
424
+ coord_idx = [0, 1, 2]
425
+ hexa_number = list(cond_dict.keys())[hexa_idx]
426
+ if append:
427
+ hexa = copy.deepcopy(cond_dict[hexa_number])
428
+ else:
429
+ hexa = cond_dict[hexa_number]
430
+ for point, point_i in zip(points, point_idx):
431
+ for corr, corr_i in zip(coords, coord_idx):
432
+ hexa[f'{corr}{point_i}'] = str(point[corr_i])
433
+
434
+ if append:
435
+ if hexa_idx == 0:
436
+ new_hexa_idx = hexa_number-1
437
+ elif hexa_idx == -1:
438
+ new_hexa_idx = hexa_number+1
439
+ cond_dict[new_hexa_idx] = hexa
440
+ cond_dict = dict(sorted(cond_dict.items(), key=lambda x: int(x[0])))
441
+ return cond_dict
442
+
443
+ @staticmethod
444
+ def _extend_brick_size(cond_dict, append, hexa_idx, extension_distance=0, extension_direction='top'):
445
+ """
446
+ Gets point, defined as numpy array with three coordinates for the 8-noded brick
447
+ :param cond_dict: conductor dictionary
448
+ :type cond_dict: dict
449
+ :param hexa_idx: brick index
450
+ :type hexa_idx: int
451
+ :param extension_distance: distance in mm to extend the brick
452
+ :type extension_distance: float
453
+ :param extension_direction: string specifying the direction of the extension, the default is 'outer'
454
+ :type extension_direction: str
455
+ :return: tuple with numpy arrays, each with tree coordinates of points in cartesian
456
+ :rtype: tuple with arrays
457
+ """
458
+ P1, P2, P3, P4, P5, P6, P7, P8 = ParserCOND.get_points_cond_dict(cond_dict, hexa_idx)
459
+
460
+ if extension_direction == 'top': # north
461
+ line1_direction = P4 - P1
462
+ line2_direction = P3 - P2
463
+ line3_direction = P8 - P5
464
+ line4_direction = P7 - P6
465
+ if append:
466
+ P4 = P1.copy()
467
+ P3 = P2.copy()
468
+ P8 = P5.copy()
469
+ P7 = P6.copy()
470
+ P1 = P1 + line1_direction / np.linalg.norm(line1_direction) * extension_distance
471
+ P2 = P2 + line2_direction / np.linalg.norm(line2_direction) * extension_distance
472
+ P5 = P5 + line3_direction / np.linalg.norm(line3_direction) * extension_distance
473
+ P6 = P6 + line4_direction / np.linalg.norm(line4_direction) * extension_distance
474
+ elif extension_direction == 'bottom': # south
475
+ line1_direction = P1 - P4
476
+ line2_direction = P2 - P3
477
+ line3_direction = P5 - P8
478
+ line4_direction = P6 - P7
479
+ if append:
480
+ P1 = P4.copy()
481
+ P2 = P3.copy()
482
+ P5 = P8.copy()
483
+ P6 = P7.copy()
484
+ P4 = P4 + line1_direction / np.linalg.norm(line1_direction) * extension_distance
485
+ P3 = P3 + line2_direction / np.linalg.norm(line2_direction) * extension_distance
486
+ P8 = P8 + line3_direction / np.linalg.norm(line3_direction) * extension_distance
487
+ P7 = P7 + line4_direction / np.linalg.norm(line4_direction) * extension_distance
488
+ elif extension_direction == 'close':
489
+ line1_direction = P1 - P5
490
+ line2_direction = P2 - P6
491
+ line3_direction = P3 - P7
492
+ line4_direction = P4 - P8
493
+ if append:
494
+ P1 = P5.copy()
495
+ P2 = P6.copy()
496
+ P3 = P7.copy()
497
+ P4 = P8.copy()
498
+ P5 = P5 + line1_direction / np.linalg.norm(line1_direction) * extension_distance
499
+ P6 = P6 + line2_direction / np.linalg.norm(line2_direction) * extension_distance
500
+ P7 = P7 + line3_direction / np.linalg.norm(line3_direction) * extension_distance
501
+ P8 = P8 + line4_direction / np.linalg.norm(line4_direction) * extension_distance
502
+ elif extension_direction == 'far':
503
+ line1_direction = P5 - P1
504
+ line2_direction = P6 - P2
505
+ line3_direction = P7 - P3
506
+ line4_direction = P8 - P4
507
+ if append:
508
+ P5 = P1.copy()
509
+ P6 = P2.copy()
510
+ P7 = P3.copy()
511
+ P8 = P4.copy()
512
+ P1 = P1 + line1_direction / np.linalg.norm(line1_direction) * extension_distance
513
+ P2 = P2 + line2_direction / np.linalg.norm(line2_direction) * extension_distance
514
+ P3 = P3 + line3_direction / np.linalg.norm(line3_direction) * extension_distance
515
+ P4 = P4 + line4_direction / np.linalg.norm(line4_direction) * extension_distance
516
+ elif extension_direction == 'west':
517
+ line1_direction = P4 - P3
518
+ line2_direction = P8 - P7
519
+ line3_direction = P5 - P6
520
+ line4_direction = P1 - P2
521
+ if append:
522
+ P4 = P3.copy()
523
+ P8 = P7.copy()
524
+ P5 = P6.copy()
525
+ P1 = P2.copy()
526
+ P3 = P3 + line1_direction / np.linalg.norm(line1_direction) * extension_distance
527
+ P7 = P7 + line2_direction / np.linalg.norm(line2_direction) * extension_distance
528
+ P6 = P6 + line3_direction / np.linalg.norm(line3_direction) * extension_distance
529
+ P2 = P2 + line4_direction / np.linalg.norm(line4_direction) * extension_distance
530
+ elif extension_direction == 'east':
531
+ line1_direction = P3 - P4
532
+ line2_direction = P7 - P8
533
+ line3_direction = P6 - P5
534
+ line4_direction = P2 - P1
535
+ if append:
536
+ P3 = P4.copy()
537
+ P7 = P8.copy()
538
+ P6 = P5.copy()
539
+ P2 = P1.copy()
540
+ P4 = P4 + line1_direction / np.linalg.norm(line1_direction) * extension_distance
541
+ P8 = P8 + line2_direction / np.linalg.norm(line2_direction) * extension_distance
542
+ P5 = P5 + line3_direction / np.linalg.norm(line3_direction) * extension_distance
543
+ P1 = P1 + line4_direction / np.linalg.norm(line4_direction) * extension_distance
544
+ elif extension_direction == 'none':
545
+ pass
546
+ else:
547
+ raise Exception(f"Only extension_direction='top', 'bottom', 'close', 'far' or 'none are supported, but the {extension_direction} was requested!")
548
+ return P1, P2, P3, P4, P5, P6, P7, P8
549
+
550
+ @staticmethod
551
+ def extend_brick_idx(cond_dict, list_for_extension):
552
+ """
553
+ Extends or shortens a brick of idx
554
+ list_for_extension is [idx, 'direction', distance], for example [0, 'far', 0.0015]
555
+ """
556
+ append = False
557
+ for end in list_for_extension:
558
+ idx, direction, distance = end
559
+ P1, P2, P3, P4, P5, P6, P7, P8 = ParserCOND()._extend_brick_size(cond_dict, append, hexa_idx=idx, extension_distance=distance, extension_direction=direction)
560
+ cond_dict = ParserCOND.set_points_cond_dict(cond_dict, idx, append, P1, P2, P3, P4, P5, P6, P7, P8)
561
+ return cond_dict
562
+
563
+ @staticmethod
564
+ def extend_all_bricks(cond_dict, extension_distance=0.0, extension_direction='top', trim_list=[None, None]):
565
+ """
566
+ Extends a single brick by extension distance in the out pointing normal to the extension direction surface
567
+ :param trim_list: decides to trim the which bricks get extended, if all use [None, None], if not last use [None, -1]
568
+ :type trim_list: list
569
+ :param cond_dict: conductor dictionary with bricks to extend
570
+ :type cond_dict: dict
571
+ :param extension_distance: distance to extend in m
572
+ :type extension_distance: float
573
+ :param extension_direction: extension direction as a keyword, only 'top' coded at the moment
574
+ :type extension_direction: str
575
+ :return: conductor dictionary with extended bricks
576
+ :rtype: dict
577
+ """
578
+ append = False
579
+ dict_keys = list(cond_dict.keys())[trim_list[0]:trim_list[1]]
580
+ cond_dict_to_ext = {}
581
+ for key in dict_keys:
582
+ cond_dict_to_ext[key] = cond_dict[key]
583
+ for idx, _ in enumerate(list(cond_dict_to_ext.keys())):
584
+ P1, P2, P3, P4, P5, P6, P7, P8 = ParserCOND()._extend_brick_size(cond_dict_to_ext, append, hexa_idx=idx, extension_distance=extension_distance, extension_direction=extension_direction)
585
+ cond_dict_to_ext = ParserCOND.set_points_cond_dict(cond_dict_to_ext, idx, append, P1, P2, P3, P4, P5, P6, P7, P8)
586
+ for idx, brick in cond_dict_to_ext.items():
587
+ cond_dict[key]=brick
588
+ return cond_dict
589
+
590
+ @staticmethod
591
+ def trim_cond_dict(cond_dict, t_from, t_to):
592
+ """
593
+ Function to split conductor dictionary using t_from and t_to integers
594
+ :param cond_dict: conductor dictionary
595
+ :type cond_dict: dict
596
+ :param t_from: output bricks starting from this index
597
+ :type t_from: int
598
+ :param t_to: output bricks up to this index
599
+ :type t_to: int
600
+ :return: trimmed conductor dictionary
601
+ :rtype: dict
602
+ """
603
+ hex_list = list(cond_dict.keys())
604
+ if t_to == 0:
605
+ t_to = None # this is to give all the last elements, i.e. no trimming from the end.
606
+ elif t_to == -1:
607
+ t_to = None
608
+ trimmed_hex_list = hex_list[t_from:t_to]
609
+ trimmed_cond_dict = {}
610
+ for key in trimmed_hex_list:
611
+ trimmed_cond_dict[key] = cond_dict[key]
612
+ return trimmed_cond_dict
613
+
614
+ def write_json(self, json_file_path):
615
+ """
616
+ Method for writing conductor bricks into a file. This is only used for testing the parser conductor functionality
617
+ :param json_file_path: path to json output file
618
+ :type json_file_path: str
619
+ :return: none, only writes file to disk
620
+ :rtype: none
621
+ """
622
+ json.dump(self.bricks, open(json_file_path, 'w'), sort_keys=False)
623
+
624
+ @staticmethod
625
+ def read_json(json_file_path):
626
+ """
627
+ Method for reading the json file. The string values for the key names in json are converted to integers
628
+ :param json_file_path: full path to json file
629
+ :type json_file_path: str
630
+ :return: dictionary with values read from json, with keys as integers
631
+ :rtype: dict
632
+ """
633
+ def jsonKeys2int(x):
634
+ """
635
+ Helper function for converting keys from strings to integers
636
+ :param x: input dictionary
637
+ :type x: dict
638
+ :return: dictionary with key changed from str to int
639
+ :rtype: dict
640
+ """
641
+ return {int(k): v for k, v in x.items()} # change dict keys from strings to integers
642
+ return jsonKeys2int(json.load(open(json_file_path)))
643
+
644
+ @staticmethod
645
+ def merge_conductor_dicts(cond_dict_list):
646
+ output_dict = {}
647
+ brick_i = 1
648
+ for cond_dict in cond_dict_list:
649
+ for brick in cond_dict.values():
650
+ output_dict[brick_i] = brick
651
+ brick_i += 1
652
+ return output_dict
653
+
654
+ @staticmethod
655
+ def reverse_bricks(cond_dict):
656
+ """
657
+ Reverses sequence of bricks
658
+ @param cond_dict: conductor dictionary
659
+ @return: reversed conductor dictionary
660
+ """
661
+ keys = cond_dict.keys()
662
+ values = [cond_dict[key] for key in keys]
663
+ reversed_values = values[::-1]
664
+ for key, value in zip(keys, reversed_values):
665
+ cond_dict[key] = value
666
+ return cond_dict
667
+
668
+ @staticmethod
669
+ def make_layer_jump_between(cond_dict_1, cond_dict_2, idx_from_to):
670
+ idx_from, idx_to = idx_from_to
671
+ P1_1, P2_1, P3_1, P4_1, P5_1, P6_1, P7_1, P8_1 = ParserCOND().get_points_cond_dict(cond_dict_1, hexa=idx_from, bynumber=False)
672
+ P1_2, P2_2, P3_2, P4_2, P5_2, P6_2, P7_2, P8_2 = ParserCOND().get_points_cond_dict(cond_dict_2, hexa=idx_to, bynumber=False)
673
+ # print([P1_1, P2_1, P3_1, P4_1, P5_1, P6_1, P7_1, P8_1])
674
+ # print([P1_2, P2_2, P3_2, P4_2, P5_2, P6_2, P7_2, P8_2])
675
+ P1_avg = [0.0, 0.0, 0.0]
676
+ P2_avg = [0.0, 0.0, 0.0]
677
+ P3_avg = [0.0, 0.0, 0.0]
678
+ P4_avg = [0.0, 0.0, 0.0]
679
+ for P_1, P_2, P_a in zip([P1_1, P2_1, P3_1, P4_1], [P1_2, P2_2, P3_2, P4_2], [P1_avg, P2_avg, P3_avg, P4_avg]):
680
+ for i in range(3):
681
+ P_a[i] = (P_1[i] + P_2[i])/2
682
+
683
+ ParserCOND().set_points_cond_dict(cond_dict_1, idx_from, False, P1_avg, P2_avg, P3_avg, P4_avg, P5_1, P6_1, P7_1, P8_1)
684
+ ParserCOND().set_points_cond_dict(cond_dict_2, idx_to, False, P1_avg, P2_avg, P3_avg, P4_avg, P5_2, P6_2, P7_2, P8_2)
685
+
686
+ return cond_dict_1, cond_dict_2
687
+
688
+ @staticmethod
689
+ def combine_bricks(cond_dict, from_to_list):
690
+ """
691
+ Combines bricks into single brick approximating its size by taking the corners of the first and last surface
692
+ :param cond_dict: conductor dictionary input
693
+ :type cond_dict: dict
694
+ :param from_to_list: list of lists specifying indexes at the start and end of the dictionary, e.g. [[0, 2], [-3, -1]] means combined brick from 0th to 2nd at the start and from -3rd to -1st at the end.
695
+ :type from_to_list: list
696
+ :return: conductor dictionary output
697
+ :rtype: dict
698
+ """
699
+ for soe_i, p_nums in zip([0, 1], [range(0, 4), range(4, 8)]): # soe = start or end (of the winding)
700
+ brick_i_from = list(cond_dict.keys())[from_to_list[soe_i][0]]
701
+ brick_i_to = list(cond_dict.keys())[from_to_list[soe_i][1]]
702
+ brick_from = cond_dict[brick_i_from]
703
+ brick_to = cond_dict[brick_i_to]
704
+ for p_num in p_nums:
705
+ for cord in ['XP', 'YP', 'ZP']:
706
+ if soe_i == 0: # start
707
+ brick_to[f'{cord}{p_num + 1}'] = brick_from[f'{cord}{p_num + 1}']
708
+ elif soe_i == 1: # end
709
+ brick_from[f'{cord}{p_num + 1}'] = brick_to[f'{cord}{p_num + 1}']
710
+ for brick_i in range(brick_i_from + soe_i, brick_i_to + soe_i):
711
+ del cond_dict[brick_i]
712
+ combined_bricks_dict = {}
713
+ for brick_new_i, brick in enumerate(cond_dict.values()):
714
+ combined_bricks_dict[brick_new_i] = brick
715
+ return combined_bricks_dict
716
+
717
+ def straighten_brick(self, cond_dict, index_and_plane_list):
718
+ """
719
+ :param cond_dict: conductor dictionary
720
+ :type cond_dict: dict
721
+ :param index_and_plane_list: this is list, typically [0, 'z-'] or [-1, 'z+']. Index can be either 0 or -1 and plane can be either 'z-' or 'z+'
722
+ :type index_and_plane_list: list
723
+ :return: conductor dictionary
724
+ :rtype: dict
725
+ """
726
+ for index_and_plane in index_and_plane_list:
727
+ index = index_and_plane[0]
728
+ if index not in [0, -1]:
729
+ raise ValueError(f'Index can be either 0 or -1, but {index} was given!')
730
+ plane = index_and_plane[1]
731
+ # if plane not in ['z-', 'z+']:
732
+ # raise ValueError(f"Plane can be either 'z-' or 'z+', but {plane} was given!")
733
+
734
+ brick_index = list(cond_dict.keys())[index_and_plane[0]]
735
+ brick = cond_dict[brick_index]
736
+ points = self.vertices_to_surf[self._sur_names.index(plane)]
737
+ lines = self.vertices_to_lines[self._sur_names.index(plane)]
738
+ coord = 'Z' #str.upper(plane[0])
739
+ values = []
740
+ def find_intersection_point(line, coord, z):
741
+ v = {'X': float(brick[f'XP{line[1]}']) - float(brick[f'XP{line[0]}']),
742
+ 'Y': float(brick[f'YP{line[1]}']) - float(brick[f'YP{line[0]}']),
743
+ 'Z': float(brick[f'ZP{line[1]}']) - float(brick[f'ZP{line[0]}'])}
744
+ t = (z - float(brick[f'{coord}P{line[0]}'])) / v[coord]
745
+ x = float(brick[f'XP{line[0]}']) + t * v['X']
746
+ y = float(brick[f'YP{line[0]}']) + t * v['Y']
747
+ return (str(x), str(y), str(z))
748
+ for point in points:
749
+ value = float(brick[f'{coord}P{point}'])
750
+ values.append(value)
751
+ z = np.mean(values)
752
+ for point, line in zip(points, lines):
753
+ brick[f'XP{point}'], brick[f'YP{point}'], brick[f'ZP{point}'] = find_intersection_point(line, coord, z)
754
+ cond_dict[brick_index] = brick
755
+ return cond_dict
756
+
757
+ @staticmethod
758
+ def _make_combined_dict(from_cond_dict, to_cond_dict):
759
+ new_key = 0
760
+ from_cond_dict_out = {}
761
+ for brick in from_cond_dict.values():
762
+ from_cond_dict_out[new_key]=brick
763
+ new_key+=1
764
+ combined_dict = {}
765
+ combined_dict.update({f'{key}': value for key, value in from_cond_dict_out.items()})
766
+ last_key = list(from_cond_dict_out.keys())[-1]
767
+ combined_dict.update({f'{key+last_key+1}': value for key, value in to_cond_dict.items()})
768
+ return combined_dict
769
+
770
+ def add_link_brick(self, twin_cond_dict, lists_for_connections, skip=False):
771
+ """
772
+ Adds link bricks
773
+ :param twin_cond_dict: twin dictionary of type {first_winding_name: first_winding_bricks_dict, second_winding_name: second_winding_bricks_dict}
774
+ :type twin_cond_dict: dict
775
+ :param lists_for_connections: list of lists of lists of type [[[-1, 'y-', False], [-1, 'y-', True]], [[0, 'y-', False], [0, 'y-', True]]], where:
776
+ [[start terminals],[end terminals]], for terminal end: [[brick id from, surface direction from, swap points to opposite side flag from], [brick id to, surface direction to, swap points to opposite side flag to]]
777
+ :type lists_for_connections: list
778
+ :return:
779
+ :rtype:
780
+ """
781
+ if len(twin_cond_dict) != 2:
782
+ raise ValueError(f'The twin_cond_dict can only contain two conductor sets, but it contains: {len(twin_cond_dict)} conductor sets')
783
+
784
+ new_bricks_dict ={}
785
+ for idx, end in enumerate(lists_for_connections):
786
+ from_cond_dict = list(twin_cond_dict.values())[0]
787
+ from_def = end[0]
788
+ from_brick_index = list(from_cond_dict.keys())[from_def[0]]
789
+ from_brick = from_cond_dict[from_brick_index]
790
+ from_points = self.vertices_to_surf[self._sur_names.index(from_def[1])]
791
+ from_points_op = self.vertices_to_surf[self._sur_names.index(from_def[1][0]+self._op_sign[from_def[1][1]])]
792
+ to_cond_dict = list(twin_cond_dict.values())[1]
793
+ to_def = end[1]
794
+ to_brick_index = list(to_cond_dict.keys())[to_def[0]]
795
+ to_brick = to_cond_dict[to_brick_index]
796
+ to_points = self.vertices_to_surf[self._sur_names.index(to_def[1])]
797
+ to_points_op = self.vertices_to_surf[self._sur_names.index(to_def[1][0] + self._op_sign[to_def[1][1]])]
798
+ new_brick = copy.deepcopy(from_brick)
799
+
800
+ for coord in ['X', 'Y', 'Z']:
801
+ if from_def[2]: #swap points to opposite side flag from
802
+ for d, s in zip(from_points_op, [from_points[1],from_points[0],from_points[3],from_points[2]]):
803
+ new_brick[f'{coord}P{d}'] = from_brick[f'{coord}P{s}']
804
+ else:
805
+ for d, s in zip(from_points, from_points):
806
+ new_brick[f'{coord}P{d}'] = from_brick[f'{coord}P{s}']
807
+ if to_def[2]: #swap points to opposite side flag to
808
+ for d, s in zip(to_points, list(reversed(to_points_op))):
809
+ new_brick[f'{coord}P{d}'] = to_brick[f'{coord}P{s}']
810
+ else:
811
+ for d, s in zip(to_points, to_points):
812
+ new_brick[f'{coord}P{d}'] = to_brick[f'{coord}P{s}']
813
+ new_bricks_dict[idx] = new_brick
814
+
815
+ if not skip:
816
+ for key, new_brick in new_bricks_dict.items():
817
+ if key == 0:
818
+ first_key = list(list(twin_cond_dict.values())[0].keys())[0]
819
+ from_cond_dict[first_key-1] = new_brick
820
+ else:
821
+ last_key = list(list(twin_cond_dict.values())[0].keys())[-2]
822
+ from_cond_dict[last_key+1] = new_brick
823
+ from_cond_dict = {key: from_cond_dict[key] for key in sorted(from_cond_dict)}
824
+ combined_dict = ParserCOND._make_combined_dict(from_cond_dict, to_cond_dict)
825
+ return from_cond_dict, to_cond_dict, combined_dict