pyedb 0.28.0__py3-none-any.whl → 0.30.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyedb might be problematic. Click here for more details.

Files changed (33) hide show
  1. pyedb/__init__.py +29 -3
  2. pyedb/configuration/cfg_boundaries.py +44 -74
  3. pyedb/configuration/cfg_common.py +1 -1
  4. pyedb/configuration/cfg_components.py +31 -105
  5. pyedb/configuration/cfg_data.py +4 -9
  6. pyedb/configuration/cfg_general.py +19 -8
  7. pyedb/configuration/cfg_operations.py +14 -10
  8. pyedb/configuration/cfg_padstacks.py +41 -61
  9. pyedb/configuration/cfg_ports_sources.py +4 -2
  10. pyedb/configuration/cfg_s_parameter_models.py +85 -29
  11. pyedb/configuration/cfg_setup.py +5 -0
  12. pyedb/configuration/cfg_stackup.py +2 -6
  13. pyedb/configuration/configuration.py +42 -9
  14. pyedb/dotnet/edb.py +116 -57
  15. pyedb/dotnet/edb_core/cell/hierarchy/component.py +202 -0
  16. pyedb/dotnet/edb_core/cell/layout.py +2 -13
  17. pyedb/dotnet/edb_core/cell/primitive/primitive.py +10 -2
  18. pyedb/dotnet/edb_core/cell/terminal/terminal.py +4 -3
  19. pyedb/dotnet/edb_core/components.py +1 -2
  20. pyedb/dotnet/edb_core/definition/component_def.py +17 -1
  21. pyedb/dotnet/edb_core/definition/component_model.py +0 -4
  22. pyedb/dotnet/edb_core/edb_data/hfss_extent_info.py +3 -3
  23. pyedb/dotnet/edb_core/edb_data/layer_data.py +95 -1
  24. pyedb/dotnet/edb_core/edb_data/nets_data.py +10 -7
  25. pyedb/dotnet/edb_core/edb_data/padstacks_data.py +67 -5
  26. pyedb/dotnet/edb_core/layout_validation.py +27 -4
  27. pyedb/dotnet/edb_core/nets.py +162 -181
  28. pyedb/dotnet/edb_core/padstack.py +0 -1
  29. pyedb/siwave.py +5 -1
  30. {pyedb-0.28.0.dist-info → pyedb-0.30.0.dist-info}/METADATA +3 -3
  31. {pyedb-0.28.0.dist-info → pyedb-0.30.0.dist-info}/RECORD +33 -33
  32. {pyedb-0.28.0.dist-info → pyedb-0.30.0.dist-info}/LICENSE +0 -0
  33. {pyedb-0.28.0.dist-info → pyedb-0.30.0.dist-info}/WHEEL +0 -0
@@ -24,37 +24,93 @@ from pathlib import Path
24
24
 
25
25
 
26
26
  class CfgSParameterModel:
27
- def __init__(self, pdata, path_lib, sparam_dict):
28
- self._pedb = pdata._pedb
27
+ def __init__(self, **kwargs):
28
+ self.name = kwargs.get("name", "")
29
+ self.component_definition = kwargs.get("component_definition", "")
30
+ self.file_path = kwargs.get("file_path", "")
31
+ self.apply_to_all = kwargs.get("apply_to_all", False)
32
+ self.components = kwargs.get("components", [])
33
+ self.reference_net = kwargs.get("reference_net", "")
34
+ self.reference_net_per_component = kwargs.get("reference_net_per_component", {})
35
+ self.pin_order = kwargs.get("pin_order", None)
36
+
37
+
38
+ class CfgSParameters:
39
+ def __init__(self, pedb, data, path_lib=None):
40
+ self._pedb = pedb
29
41
  self.path_libraries = path_lib
30
- self._sparam_dict = sparam_dict
31
- self.name = self._sparam_dict.get("name", "")
32
- self.component_definition = self._sparam_dict.get("component_definition", "")
33
- self.file_path = self._sparam_dict.get("file_path", "")
34
- self.apply_to_all = self._sparam_dict.get("apply_to_all", False)
35
- self.components = self._sparam_dict.get("components", [])
36
- self.reference_net = self._sparam_dict.get("reference_net", "")
37
- self.reference_net_per_component = self._sparam_dict.get("reference_net_per_component", {})
42
+ self.s_parameters_models = [CfgSParameterModel(**i) for i in data]
38
43
 
39
44
  def apply(self):
40
- fpath = self.file_path
41
- if not Path(fpath).anchor:
42
- fpath = str(Path(self.path_libraries) / fpath)
43
- comp_def = self._pedb.definitions.component[self.component_definition]
44
- comp_def.add_n_port_model(fpath, self.name)
45
- comp_list = dict()
46
- if self.apply_to_all:
47
- comp_list.update(
48
- {refdes: comp for refdes, comp in comp_def.components.items() if refdes not in self.components}
49
- )
50
- else:
51
- comp_list.update(
52
- {refdes: comp for refdes, comp in comp_def.components.items() if refdes in self.components}
53
- )
45
+ for s_param in self.s_parameters_models:
46
+ fpath = s_param.file_path
47
+ if not Path(fpath).anchor:
48
+ fpath = str(Path(self.path_libraries) / fpath)
49
+ comp_def = self._pedb.definitions.component[s_param.component_definition]
50
+ if s_param.pin_order:
51
+ comp_def.set_properties(pin_order=s_param.pin_order)
52
+ comp_def.add_n_port_model(fpath, s_param.name)
53
+ comp_list = dict()
54
+ if s_param.apply_to_all:
55
+ comp_list.update(
56
+ {refdes: comp for refdes, comp in comp_def.components.items() if refdes not in s_param.components}
57
+ )
58
+ else:
59
+ comp_list.update(
60
+ {refdes: comp for refdes, comp in comp_def.components.items() if refdes in s_param.components}
61
+ )
54
62
 
55
- for refdes, comp in comp_list.items():
56
- if refdes in self.reference_net_per_component:
57
- ref_net = self.reference_net_per_component[refdes]
63
+ for refdes, comp in comp_list.items():
64
+ if refdes in s_param.reference_net_per_component:
65
+ ref_net = s_param.reference_net_per_component[refdes]
66
+ else:
67
+ ref_net = s_param.reference_net
68
+ comp.use_s_parameter_model(s_param.name, reference_net=ref_net)
69
+
70
+ def get_data_from_db(self):
71
+ db_comp_def = self._pedb.definitions.component
72
+ for name, compdef_obj in db_comp_def.items():
73
+ nport_models = compdef_obj.component_models
74
+ if not nport_models:
75
+ continue
58
76
  else:
59
- ref_net = self.reference_net
60
- comp.use_s_parameter_model(self.name, reference_net=ref_net)
77
+ pin_order = compdef_obj.get_properties()["pin_order"]
78
+ temp_comps = compdef_obj.components
79
+ for model_name, model_obj in nport_models.items():
80
+ temp_comp_list = []
81
+ reference_net_per_component = {}
82
+ for i in temp_comps.values():
83
+ s_param_model = i.model_properties.get("s_parameter_model")
84
+ if s_param_model:
85
+ if s_param_model["model_name"] == model_name:
86
+ temp_comp_list.append(i.refdes)
87
+ reference_net_per_component[i.refdes] = s_param_model["reference_net"]
88
+ else:
89
+ continue
90
+
91
+ self.s_parameters_models.append(
92
+ CfgSParameterModel(
93
+ name=model_name,
94
+ component_definition=name,
95
+ file_path=model_obj.reference_file,
96
+ apply_to_all=False,
97
+ components=temp_comp_list,
98
+ reference_net_per_component=reference_net_per_component,
99
+ pin_order=pin_order,
100
+ )
101
+ )
102
+
103
+ data = []
104
+ for i in self.s_parameters_models:
105
+ data.append(
106
+ {
107
+ "name": i.name,
108
+ "component_definition": i.component_definition,
109
+ "file_path": i.file_path,
110
+ "apply_to_all": i.apply_to_all,
111
+ "components": i.components,
112
+ "reference_net_per_component": i.reference_net_per_component,
113
+ "pin_order": i.pin_order,
114
+ }
115
+ )
116
+ return data
@@ -177,6 +177,11 @@ class CfgSetups:
177
177
  def get_data_from_db(self):
178
178
  setups = []
179
179
  for _, s in self._pedb.setups.items():
180
+ if float(self._pedb.edbversion) < 2025.1:
181
+ if not s.type == "hfss":
182
+ self._pedb.logger.warning("Only HFSS setups are exported in 2024 R2 and earlier version.")
183
+ continue
184
+
180
185
  stp = {}
181
186
  if s.type == "hfss":
182
187
  for p_name in CfgHFSSSetup(self._pedb).__dict__:
@@ -44,6 +44,7 @@ class CfgLayer(CfgBase):
44
44
  self.material = kwargs.get("material", None)
45
45
  self.fill_material = kwargs.get("fill_material", None)
46
46
  self.thickness = kwargs.get("thickness", None)
47
+ self.roughness = kwargs.get("roughness", None)
47
48
 
48
49
 
49
50
  class CfgStackup:
@@ -139,12 +140,7 @@ class CfgStackup:
139
140
  def get_layers_from_db(self):
140
141
  layers = []
141
142
  for name, obj in self._pedb.stackup.all_layers.items():
142
- layer = {}
143
- for p_name in CfgLayer().__dict__:
144
- p_value = getattr(obj, p_name, None)
145
- if p_value is not None:
146
- layer[p_name] = getattr(obj, p_name)
147
- layers.append(layer)
143
+ layers.append(obj.properties)
148
144
  return layers
149
145
 
150
146
  def get_data_from_db(self):
@@ -106,6 +106,9 @@ class Configuration:
106
106
  def run(self, **kwargs):
107
107
  """Apply configuration settings to the current design"""
108
108
 
109
+ if self.cfg_data.general:
110
+ self.cfg_data.general.apply()
111
+
109
112
  # Configure boundary settings
110
113
  if self.cfg_data.boundaries:
111
114
  self.cfg_data.boundaries.apply()
@@ -117,10 +120,6 @@ class Configuration:
117
120
  # Configure components
118
121
  self.cfg_data.components.apply()
119
122
 
120
- # Configure padstacks
121
- if self.cfg_data.padstacks:
122
- self.cfg_data.padstacks.apply()
123
-
124
123
  # Configure pin groups
125
124
  self.cfg_data.pin_groups.apply()
126
125
 
@@ -145,9 +144,12 @@ class Configuration:
145
144
  else:
146
145
  self.cfg_data.stackup.apply()
147
146
 
147
+ # Configure padstacks
148
+ if self.cfg_data.padstacks:
149
+ self.cfg_data.padstacks.apply()
150
+
148
151
  # Configure S-parameter
149
- for s_parameter_model in self.cfg_data.s_parameters:
150
- s_parameter_model.apply()
152
+ self.cfg_data.s_parameters.apply()
151
153
 
152
154
  # Configure SPICE models
153
155
  for spice_model in self.cfg_data.spice_models:
@@ -273,6 +275,8 @@ class Configuration:
273
275
  """
274
276
  self._pedb.logger.info("Getting data from layout database.")
275
277
  data = {}
278
+ if kwargs.get("general", False):
279
+ data["general"] = self.cfg_data.general.get_data_from_db()
276
280
  if kwargs.get("stackup", False):
277
281
  data["stackup"] = self.cfg_data.stackup.get_data_from_db()
278
282
  if kwargs.get("package_definitions", False):
@@ -293,6 +297,10 @@ class Configuration:
293
297
  data["operations"] = self.cfg_data.operations.get_data_from_db()
294
298
  if kwargs.get("padstacks", False):
295
299
  data["padstacks"] = self.cfg_data.padstacks.get_data_from_db()
300
+ if kwargs.get("s_parameters", False):
301
+ data["s_parameters"] = self.cfg_data.s_parameters.get_data_from_db()
302
+ if kwargs.get("boundaries", False):
303
+ data["boundaries"] = self.cfg_data.boundaries.get_data_from_db()
296
304
 
297
305
  return data
298
306
 
@@ -307,6 +315,11 @@ class Configuration:
307
315
  nets=True,
308
316
  pin_groups=True,
309
317
  operations=True,
318
+ components=True,
319
+ boundaries=True,
320
+ s_parameters=True,
321
+ padstacks=True,
322
+ general=True,
310
323
  ):
311
324
  """Export the configuration data from layout to a file.
312
325
 
@@ -330,12 +343,20 @@ class Configuration:
330
343
  Whether to export pin groups.
331
344
  operations : bool
332
345
  Whether to export operations.
346
+ components : bool
347
+ Whether to export component.
348
+ boundaries : bool
349
+ Whether to export boundaries.
350
+ s_parameters : bool
351
+ Whether to export s_parameters.
352
+ padstacks : bool
353
+ Whether to export padstacks.
354
+ general : bool
355
+ Whether to export general information.
333
356
  Returns
334
357
  -------
335
358
  bool
336
359
  """
337
- file_path = file_path if isinstance(file_path, Path) else Path(file_path)
338
- file_path = file_path if file_path.suffix == ".json" else file_path.with_suffix(".json")
339
360
  data = self.get_data_from_db(
340
361
  stackup=stackup,
341
362
  package_definitions=package_definitions,
@@ -345,7 +366,19 @@ class Configuration:
345
366
  nets=nets,
346
367
  pin_groups=pin_groups,
347
368
  operations=operations,
369
+ components=components,
370
+ boundaries=boundaries,
371
+ s_parameters=s_parameters,
372
+ padstacks=padstacks,
373
+ general=general,
348
374
  )
375
+
376
+ file_path = file_path if isinstance(file_path, Path) else Path(file_path)
377
+ file_path = file_path.with_suffix(".json") if file_path.suffix == "" else file_path
378
+
349
379
  with open(file_path, "w") as f:
350
- json.dump(data, f, ensure_ascii=False, indent=4)
380
+ if file_path.suffix == ".json":
381
+ json.dump(data, f, ensure_ascii=False, indent=4)
382
+ else:
383
+ toml.dump(data, f)
351
384
  return True if os.path.isfile(file_path) else False
pyedb/dotnet/edb.py CHANGED
@@ -558,6 +558,10 @@ class Edb(Database):
558
558
  for cell in list(self.top_circuit_cells):
559
559
  if cell.GetName() == self.cellname:
560
560
  self._active_cell = cell
561
+ if self._active_cell is None:
562
+ for cell in list(self.circuit_cells):
563
+ if cell.GetName() == self.cellname:
564
+ self._active_cell = cell
561
565
  # if self._active_cell is still None, set it to default cell
562
566
  if self._active_cell is None:
563
567
  self._active_cell = list(self.top_circuit_cells)[0]
@@ -1455,12 +1459,12 @@ class Edb(Database):
1455
1459
  def import_gds_file(
1456
1460
  self,
1457
1461
  inputGDS,
1458
- WorkDir=None,
1459
1462
  anstranslator_full_path="",
1460
1463
  use_ppe=False,
1461
1464
  control_file=None,
1462
1465
  tech_file=None,
1463
1466
  map_file=None,
1467
+ layer_filter=None,
1464
1468
  ):
1465
1469
  """Import a GDS file and generate an ``edb.def`` file in the working directory.
1466
1470
 
@@ -1471,10 +1475,6 @@ class Edb(Database):
1471
1475
  ----------
1472
1476
  inputGDS : str
1473
1477
  Full path to the GDS file.
1474
- WorkDir : str, optional
1475
- Directory in which to create the ``aedb`` folder. The default value is ``None``,
1476
- in which case the AEDB file is given the same name as the GDS file. Only the extension
1477
- differs.
1478
1478
  anstranslator_full_path : str, optional
1479
1479
  Full path to the Ansys translator.
1480
1480
  use_ppe : bool, optional
@@ -1484,31 +1484,67 @@ class Edb(Database):
1484
1484
  the XML file in the same directory as the GDS file. To succeed, the XML file and GDS file must
1485
1485
  have the same name. Only the extension differs.
1486
1486
  tech_file : str, optional
1487
- Technology file. It uses Helic to convert tech file to xml and then imports the gds. Works on Linux only.
1487
+ Technology file. For versions<2024.1 it uses Helic to convert tech file to xml and then imports
1488
+ the gds. Works on Linux only.
1489
+ For versions>=2024.1 it can directly parse through supported foundry tech files.
1488
1490
  map_file : str, optional
1489
1491
  Layer map file.
1490
-
1491
- Returns
1492
- -------
1493
- bool
1494
- ``True`` when successful, ``False`` when failed.
1492
+ layer_filter:str,optional
1493
+ Layer filter file.
1495
1494
 
1496
1495
  """
1497
- if not is_linux and tech_file:
1498
- self.logger.error("Technology files are supported only in Linux. Use control file instead.")
1499
- return False
1500
1496
  control_file_temp = os.path.join(tempfile.gettempdir(), os.path.split(inputGDS)[-1][:-3] + "xml")
1501
- ControlFile(xml_input=control_file, tecnhology=tech_file, layer_map=map_file).write_xml(control_file_temp)
1502
- if self.import_layout_pcb(
1503
- inputGDS,
1504
- working_dir=WorkDir,
1505
- anstranslator_full_path=anstranslator_full_path,
1506
- use_ppe=use_ppe,
1507
- control_file=control_file_temp,
1508
- ):
1509
- return True
1497
+ if float(self.edbversion) < 2024.1:
1498
+ if not is_linux and tech_file:
1499
+ self.logger.error("Technology files are supported only in Linux. Use control file instead.")
1500
+ return False
1501
+
1502
+ ControlFile(xml_input=control_file, tecnhology=tech_file, layer_map=map_file).write_xml(control_file_temp)
1503
+ if self.import_layout_pcb(
1504
+ inputGDS,
1505
+ anstranslator_full_path=anstranslator_full_path,
1506
+ use_ppe=use_ppe,
1507
+ control_file=control_file_temp,
1508
+ ):
1509
+ return True
1510
+ else:
1511
+ return False
1510
1512
  else:
1511
- return False
1513
+ temp_map_file = os.path.splitext(inputGDS)[0] + ".map"
1514
+ temp_layermap_file = os.path.splitext(inputGDS)[0] + ".layermap"
1515
+
1516
+ if map_file is None:
1517
+ if os.path.isfile(temp_map_file):
1518
+ map_file = temp_map_file
1519
+ elif os.path.isfile(temp_layermap_file):
1520
+ map_file = temp_layermap_file
1521
+ else:
1522
+ self.logger.error("Unable to define map file.")
1523
+
1524
+ if tech_file is None:
1525
+ if control_file is None:
1526
+ temp_control_file = os.path.splitext(inputGDS)[0] + ".xml"
1527
+ if os.path.isfile(temp_control_file):
1528
+ control_file = temp_control_file
1529
+ else:
1530
+ self.logger.error("Unable to define control file.")
1531
+
1532
+ command = [anstranslator_full_path, inputGDS, f'-g="{map_file}"', f'-c="{control_file}"']
1533
+ else:
1534
+ command = [
1535
+ anstranslator_full_path,
1536
+ inputGDS,
1537
+ f'-o="{control_file_temp}"' f'-t="{tech_file}"',
1538
+ f'-g="{map_file}"',
1539
+ f'-f="{layer_filter}"',
1540
+ ]
1541
+
1542
+ result = subprocess.run(command, capture_output=True, text=True, shell=True)
1543
+ print(result.stdout)
1544
+ print(command)
1545
+ temp_inputGDS = inputGDS.split(".gds")[0]
1546
+ self.edbpath = temp_inputGDS + ".aedb"
1547
+ return self.open_edb()
1512
1548
 
1513
1549
  def _create_extent(
1514
1550
  self,
@@ -1630,6 +1666,14 @@ class Edb(Database):
1630
1666
  )
1631
1667
  else:
1632
1668
  obj_data = i.Expand(expansion_size, tolerance, round_corner, round_extension)
1669
+ if inlcude_voids_in_extents and "PolygonData" not in str(i) and i.has_voids and obj_data:
1670
+ for void in i.voids:
1671
+ void_data = void.primitive_object.GetPolygonData().Expand(
1672
+ -1 * expansion_size, tolerance, round_corner, round_extension
1673
+ )
1674
+ if void_data:
1675
+ for v in list(void_data):
1676
+ obj_data[0].AddHole(v)
1633
1677
  if obj_data:
1634
1678
  if not inlcude_voids_in_extents:
1635
1679
  unite_polys.extend(list(obj_data))
@@ -2200,7 +2244,7 @@ class Edb(Database):
2200
2244
  pins_to_preserve = []
2201
2245
  nets_to_preserve = []
2202
2246
  if preserve_components_with_model:
2203
- for el in self.components.instances.values():
2247
+ for el in self.layout.groups:
2204
2248
  if el.model_type in [
2205
2249
  "SPICEModel",
2206
2250
  "SParameterModel",
@@ -2209,9 +2253,9 @@ class Edb(Database):
2209
2253
  pins_to_preserve.extend([i.id for i in el.pins.values()])
2210
2254
  nets_to_preserve.extend(el.nets)
2211
2255
  if include_pingroups:
2212
- for reference in reference_list:
2213
- for pin in self.nets.nets[reference].padstack_instances:
2214
- if pin.pingroups:
2256
+ for pingroup in self.padstacks.pingroups:
2257
+ for pin in pingroup.pins.values():
2258
+ if pin.net_name in reference_list:
2215
2259
  pins_to_preserve.append(pin.id)
2216
2260
  if check_terminals:
2217
2261
  terms = [
@@ -2229,23 +2273,41 @@ class Edb(Database):
2229
2273
  reference_pinsts = []
2230
2274
  reference_prims = []
2231
2275
  reference_paths = []
2232
- for i in self.padstacks.instances.values():
2233
- net_name = i.net_name
2234
- id = i.id
2276
+ pins_to_delete = []
2277
+
2278
+ def check_instances(item):
2279
+ net_name = item.net_name
2280
+ id = item.id
2235
2281
  if net_name not in all_list and id not in pins_to_preserve:
2236
- i.delete()
2282
+ pins_to_delete.append(item)
2237
2283
  elif net_name in reference_list and id not in pins_to_preserve:
2238
- reference_pinsts.append(i)
2239
- for i in self.modeler.primitives:
2240
- if i:
2241
- net_name = i.net_name
2284
+ reference_pinsts.append(item)
2285
+
2286
+ with ThreadPoolExecutor(number_of_threads) as pool:
2287
+ pool.map(lambda item: check_instances(item), self.layout.padstack_instances)
2288
+
2289
+ for i in pins_to_delete:
2290
+ i.delete()
2291
+
2292
+ prim_to_delete = []
2293
+
2294
+ def check_prims(item):
2295
+ if item:
2296
+ net_name = item.net_name
2242
2297
  if net_name not in all_list:
2243
- i.delete()
2244
- elif net_name in reference_list and not i.is_void:
2245
- if keep_lines_as_path and i.type == "Path":
2246
- reference_paths.append(i)
2298
+ prim_to_delete.append(item)
2299
+ elif net_name in reference_list and not item.is_void:
2300
+ if keep_lines_as_path and item.type == "Path":
2301
+ reference_paths.append(item)
2247
2302
  else:
2248
- reference_prims.append(i)
2303
+ reference_prims.append(item)
2304
+
2305
+ with ThreadPoolExecutor(number_of_threads) as pool:
2306
+ pool.map(lambda item: check_prims(item), self.modeler.primitives)
2307
+
2308
+ for i in prim_to_delete:
2309
+ i.delete()
2310
+
2249
2311
  self.logger.info_timer("Net clean up")
2250
2312
  self.logger.reset_timer()
2251
2313
 
@@ -2280,17 +2342,17 @@ class Edb(Database):
2280
2342
  if extent_type in ["Conforming", self.edb_api.geometry.extent_type.Conforming, 1]:
2281
2343
  if extent_defeature > 0:
2282
2344
  _poly = _poly.Defeature(extent_defeature)
2283
-
2284
2345
  _poly1 = _poly.CreateFromArcs(_poly.GetArcData(), True)
2285
2346
  if inlcude_voids_in_extents:
2286
2347
  for hole in list(_poly.Holes):
2287
2348
  if hole.Area() >= 0.05 * _poly1.Area():
2288
2349
  _poly1.AddHole(hole)
2350
+ self.logger.info(f"Number of voids included:{len(list(_poly1.Holes))}")
2289
2351
  _poly = _poly1
2290
2352
  if not _poly or _poly.IsNull():
2291
2353
  self._logger.error("Failed to create Extent.")
2292
2354
  return []
2293
- self.logger.info_timer("Expanded Net Polygon Creation")
2355
+ self.logger.info_timer("Extent Creation")
2294
2356
  self.logger.reset_timer()
2295
2357
  _poly_list = convert_py_list_to_net_list([_poly])
2296
2358
  prims_to_delete = []
@@ -2373,20 +2435,17 @@ class Edb(Database):
2373
2435
  for pin in pins_to_delete:
2374
2436
  pin.delete()
2375
2437
 
2376
- self.logger.info_timer(
2377
- "Padstack Instances removal completed. {} instances removed.".format(len(pins_to_delete))
2378
- )
2438
+ self.logger.info_timer("{} Padstack Instances deleted.".format(len(pins_to_delete)))
2379
2439
  self.logger.reset_timer()
2380
2440
 
2381
- # with ThreadPoolExecutor(number_of_threads) as pool:
2382
- # pool.map(lambda item: clip_path(item), reference_paths)
2383
-
2384
- for item in reference_paths:
2385
- clip_path(item)
2386
- for prim in reference_prims: # removing multithreading as failing with new layer from primitive
2387
- clean_prim(prim)
2388
- # with ThreadPoolExecutor(number_of_threads) as pool:
2389
- # pool.map(lambda item: clean_prim(item), reference_prims)
2441
+ with ThreadPoolExecutor(number_of_threads) as pool:
2442
+ pool.map(lambda item: clip_path(item), reference_paths)
2443
+ with ThreadPoolExecutor(number_of_threads) as pool:
2444
+ pool.map(lambda item: clean_prim(item), reference_prims)
2445
+ # for item in reference_paths:
2446
+ # clip_path(item)
2447
+ # for prim in reference_prims: # removing multithreading as failing with new layer from primitive
2448
+ # clean_prim(prim)
2390
2449
 
2391
2450
  for el in poly_to_create:
2392
2451
  self.modeler.create_polygon(el[0], el[1], net_name=el[2], voids=el[3])
@@ -2394,7 +2453,7 @@ class Edb(Database):
2394
2453
  for prim in prims_to_delete:
2395
2454
  prim.delete()
2396
2455
 
2397
- self.logger.info_timer("Primitives cleanup completed. {} primitives deleted.".format(len(prims_to_delete)))
2456
+ self.logger.info_timer("{} Primitives deleted.".format(len(prims_to_delete)))
2398
2457
  self.logger.reset_timer()
2399
2458
 
2400
2459
  i = 0
@@ -2403,7 +2462,7 @@ class Edb(Database):
2403
2462
  val.edbcomponent.Delete()
2404
2463
  i += 1
2405
2464
  i += 1
2406
- self.logger.info("Deleted {} additional components".format(i))
2465
+ self.logger.info("{} components deleted".format(i))
2407
2466
  if remove_single_pin_components:
2408
2467
  self.components.delete_single_pin_rlc()
2409
2468
  self.logger.info_timer("Single Pins components deleted")