pyedb 0.44.0__py3-none-any.whl → 0.46.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyedb might be problematic. Click here for more details.

Files changed (39) hide show
  1. pyedb/__init__.py +1 -1
  2. pyedb/configuration/cfg_boundaries.py +1 -1
  3. pyedb/configuration/cfg_components.py +7 -7
  4. pyedb/configuration/cfg_data.py +1 -1
  5. pyedb/configuration/cfg_general.py +8 -2
  6. pyedb/configuration/cfg_modeler.py +7 -0
  7. pyedb/configuration/cfg_operations.py +48 -2
  8. pyedb/configuration/cfg_pin_groups.py +1 -1
  9. pyedb/configuration/cfg_ports_sources.py +159 -51
  10. pyedb/configuration/cfg_s_parameter_models.py +51 -1
  11. pyedb/configuration/cfg_setup.py +77 -16
  12. pyedb/configuration/configuration.py +13 -3
  13. pyedb/dotnet/database/cell/primitive/path.py +12 -0
  14. pyedb/dotnet/database/geometry/point_data.py +26 -0
  15. pyedb/dotnet/database/geometry/polygon_data.py +9 -0
  16. pyedb/dotnet/database/nets.py +13 -3
  17. pyedb/dotnet/edb.py +41 -18
  18. pyedb/generic/design_types.py +2 -0
  19. pyedb/grpc/database/components.py +1 -2
  20. pyedb/grpc/database/definition/component_def.py +1 -1
  21. pyedb/grpc/database/hfss.py +10 -1
  22. pyedb/grpc/database/layout_validation.py +2 -2
  23. pyedb/grpc/database/padstacks.py +15 -9
  24. pyedb/grpc/database/ports/ports.py +3 -3
  25. pyedb/grpc/database/simulation_setup/hfss_simulation_setup.py +18 -13
  26. pyedb/grpc/database/simulation_setup/siwave_simulation_setup.py +73 -30
  27. pyedb/grpc/database/simulation_setup/sweep_data.py +12 -1
  28. pyedb/grpc/database/siwave.py +10 -1
  29. pyedb/grpc/database/source_excitations.py +12 -2
  30. pyedb/grpc/database/stackup.py +12 -4
  31. pyedb/grpc/database/terminal/edge_terminal.py +93 -0
  32. pyedb/grpc/database/terminal/pingroup_terminal.py +14 -1
  33. pyedb/grpc/edb.py +13 -9
  34. pyedb/grpc/edb_init.py +19 -15
  35. pyedb/grpc/rpc_session.py +11 -8
  36. {pyedb-0.44.0.dist-info → pyedb-0.46.0.dist-info}/METADATA +6 -6
  37. {pyedb-0.44.0.dist-info → pyedb-0.46.0.dist-info}/RECORD +39 -39
  38. {pyedb-0.44.0.dist-info → pyedb-0.46.0.dist-info}/LICENSE +0 -0
  39. {pyedb-0.44.0.dist-info → pyedb-0.46.0.dist-info}/WHEEL +0 -0
@@ -32,6 +32,59 @@ class CfgSetup:
32
32
  """
33
33
 
34
34
  class Common:
35
+ class Grpc:
36
+ def __init__(self, parent):
37
+ self.parent = parent
38
+
39
+ def apply_freq_sweep(self, edb_setup):
40
+ for i in self.parent.parent.freq_sweep:
41
+ f_set = []
42
+ freq_string = []
43
+ for f in i.get("frequencies", []):
44
+ if isinstance(f, dict):
45
+ increment = f.get("increment", f.get("points", f.get("samples", f.get("step"))))
46
+ f_set.append([f["distribution"], f["start"], f["stop"], increment])
47
+ else:
48
+ freq_string.append(f)
49
+ discrete_sweep = True
50
+ if i["type"] == "interpolation":
51
+ discrete_sweep = False
52
+ if freq_string:
53
+ for _sweep in freq_string:
54
+ _sw = _sweep.split(" ")
55
+ edb_setup.add_sweep(
56
+ name=i["name"],
57
+ distribution=_sw[0],
58
+ start_freq=_sw[1],
59
+ stop_freq=_sw[2],
60
+ step=_sw[3],
61
+ discrete=discrete_sweep,
62
+ )
63
+ else:
64
+ edb_setup.add_sweep(i["name"], frequency_set=f_set, discrete=discrete_sweep)
65
+
66
+ class DotNet(Grpc):
67
+ def __init__(self, parent):
68
+ super().__init__(parent)
69
+
70
+ @staticmethod
71
+ def set_frequency_string(sweep, freq_string):
72
+ sweep.frequency_string = freq_string
73
+
74
+ def apply_freq_sweep(self, edb_setup):
75
+ for i in self.parent.parent.freq_sweep:
76
+ f_set = []
77
+ freq_string = []
78
+ for f in i.get("frequencies", []):
79
+ if isinstance(f, dict):
80
+ increment = f.get("increment", f.get("points", f.get("samples", f.get("step"))))
81
+ f_set.append([f["distribution"], f["start"], f["stop"], increment])
82
+ else:
83
+ freq_string.append(f)
84
+ sweep = edb_setup.add_sweep(i["name"], frequency_set=f_set, sweep_type=i["type"])
85
+ if len(freq_string) > 0:
86
+ self.parent.api.set_frequency_string(sweep, freq_string)
87
+
35
88
  @property
36
89
  def pyedb_obj(self):
37
90
  return self.parent.pyedb_obj
@@ -39,24 +92,17 @@ class CfgSetup:
39
92
  def __init__(self, parent):
40
93
  self.parent = parent
41
94
  self.pedb = parent.pedb
95
+ if self.pedb.grpc:
96
+ self.api = self.Grpc(self)
97
+ else:
98
+ self.api = self.DotNet(self)
42
99
 
43
100
  def _retrieve_parameters_from_edb_common(self):
44
101
  self.parent.name = self.pyedb_obj.name
45
102
  self.parent.type = self.pyedb_obj.type
46
103
 
47
104
  def _apply_freq_sweep(self, edb_setup):
48
- for i in self.parent.freq_sweep:
49
- f_set = []
50
- freq_string = []
51
- for f in i.get("frequencies", []):
52
- if isinstance(f, dict):
53
- increment = f.get("increment", f.get("points", f.get("samples", f.get("step"))))
54
- f_set.append([f["distribution"], f["start"], f["stop"], increment])
55
- else:
56
- freq_string.append(f)
57
- sweep = edb_setup.add_sweep(i["name"], frequency_set=f_set, sweep_type=i["type"])
58
- if len(freq_string) > 0:
59
- sweep.frequency_string = freq_string
105
+ self.api.apply_freq_sweep(edb_setup)
60
106
 
61
107
  class Grpc(Common):
62
108
  def __init__(self, parent):
@@ -149,7 +195,7 @@ class CfgSIwaveDCSetup(CfgSetup):
149
195
  name=self.parent.name, dc_slider_position=self.parent.dc_slider_position
150
196
  )
151
197
  edb_setup.settings.dc.dc_slider_pos = self.parent.dc_slider_position
152
- edb_setup.settings.export_dc_thermal_data = self.parent.dc_ir_settings["export_dc_thermal_data"]
198
+ edb_setup.settings.export_dc_thermal_data = self.parent.dc_ir_settings.get("export_dc_thermal_data", False)
153
199
 
154
200
  def retrieve_parameters_from_edb(self):
155
201
  self._retrieve_parameters_from_edb_common()
@@ -168,7 +214,7 @@ class CfgSIwaveDCSetup(CfgSetup):
168
214
  )
169
215
  edb_setup.dc_settings.dc_slider_position = self.parent.dc_slider_position
170
216
  dc_ir_settings = self.parent.dc_ir_settings
171
- edb_setup.dc_ir_settings.export_dc_thermal_data = dc_ir_settings["export_dc_thermal_data"]
217
+ edb_setup.dc_ir_settings.export_dc_thermal_data = dc_ir_settings.get("export_dc_thermal_data", False)
172
218
 
173
219
  def __init__(self, pedb, pyedb_obj, **kwargs):
174
220
  super().__init__(pedb, pyedb_obj, **kwargs)
@@ -216,8 +262,12 @@ class CfgHFSSSetup(CfgSetup):
216
262
  self.parent.max_num_passes = single_frequency_adaptive_solution.max_passes
217
263
  self.parent.max_mag_delta_s = float(single_frequency_adaptive_solution.max_delta)
218
264
  self.parent.freq_sweep = []
219
- for sw in self.pyedb_obj.sweep_data:
220
- self.parent.freq_sweep.append({"name": sw.name, "type": sw.type, "frequencies": sw.frequency_string})
265
+ setup_sweeps = self.sort_sweep_data(self.pyedb_obj.sweep_data)
266
+ for setup_name, sweeps in setup_sweeps.items():
267
+ sw_name = sweeps[0].name
268
+ sw_type = sweeps[0].type.name.lower().split("_")[0]
269
+ freq_strings = [f.frequency_string for f in sweeps]
270
+ self.parent.freq_sweep.append({"name": sw_name, "type": sw_type, "frequencies": freq_strings})
221
271
 
222
272
  self.parent.mesh_operations = []
223
273
  from ansys.edb.core.simulation_setup.mesh_operation import (
@@ -241,6 +291,17 @@ class CfgHFSSSetup(CfgSetup):
241
291
  }
242
292
  )
243
293
 
294
+ @staticmethod
295
+ def sort_sweep_data(sweep_data):
296
+ """grpc sweep data contains all sweeps for each setup, we need to sort thwm by setup"""
297
+ setups = {}
298
+ for sweep in sweep_data:
299
+ if sweep.name not in setups:
300
+ setups[sweep.name] = [sweep]
301
+ else:
302
+ setups[sweep.name].append(sweep)
303
+ return setups
304
+
244
305
  class DotNet(Grpc):
245
306
  def __init__(self, parent):
246
307
  super().__init__(parent)
@@ -205,6 +205,8 @@ class Configuration:
205
205
  self.parent.cfg_data.components.retrieve_parameters_from_edb()
206
206
  components = []
207
207
  for i in self.parent.cfg_data.components.components:
208
+ if i.type == "io":
209
+ components.append(i.get_attributes())
208
210
  components.append(i.get_attributes())
209
211
 
210
212
  if kwargs.get("components", False):
@@ -306,6 +308,14 @@ class Configuration:
306
308
  file_path = file_path if isinstance(file_path, Path) else Path(file_path)
307
309
  file_path = file_path.with_suffix(".json") if file_path.suffix == "" else file_path
308
310
 
311
+ for comp in data["components"]:
312
+ for key, value in comp.items():
313
+ try:
314
+ json.dumps(value)
315
+ print(f"Key '{key}' is serializable.")
316
+ except TypeError as e:
317
+ print(f"Key '{key}' failed: {e}")
318
+
309
319
  with open(file_path, "w") as f:
310
320
  if file_path.suffix == ".json":
311
321
  json.dump(data, f, ensure_ascii=False, indent=4)
@@ -472,9 +482,6 @@ class Configuration:
472
482
  # Configure package definitions
473
483
  self.cfg_data.package_definitions.apply()
474
484
 
475
- # Configure operations
476
- self.cfg_data.operations.apply()
477
-
478
485
  # Modeler
479
486
  self.cfg_data.modeler.apply()
480
487
 
@@ -484,6 +491,9 @@ class Configuration:
484
491
  # Configure probes
485
492
  self.cfg_data.probes.apply()
486
493
 
494
+ # Configure operations
495
+ self.cfg_data.operations.apply()
496
+
487
497
  return True
488
498
 
489
499
  def _load_stackup(self):
@@ -341,6 +341,18 @@ class Path(Primitive):
341
341
  polygon_data = self._edb.geometry.polygon_data.dotnetobj(convert_py_list_to_net_list(points), False)
342
342
  self._edb_object.SetCenterLine(polygon_data)
343
343
 
344
+ def get_center_line_polygon_data(self):
345
+ """Gets center lines of the path as a PolygonData object."""
346
+ edb_object = self._edb_object.GetCenterLine()
347
+ return self._pedb.pedb_class.database.geometry.polygon_data.PolygonData(self._pedb, edb_object=edb_object)
348
+
349
+ def set_center_line_polygon_data(self, polygon_data):
350
+ """Sets center lines of the path from a PolygonData object."""
351
+ if not self._edb_object.SetCenterLine(polygon_data._edb_object):
352
+ raise ValueError
353
+ else:
354
+ return True
355
+
344
356
  @property
345
357
  def corner_style(self):
346
358
  """:class:`PathCornerType`: Path's corner style."""
@@ -35,3 +35,29 @@ class PointData:
35
35
  self._pedb.edb_value(x),
36
36
  self._pedb.edb_value(y),
37
37
  )
38
+
39
+ @property
40
+ def x(self):
41
+ """X value of point."""
42
+ return self._edb_object.X.ToString()
43
+
44
+ @x.setter
45
+ def x(self, value):
46
+ self._edb_object.X = self._pedb.edb_value(value)
47
+
48
+ @property
49
+ def x_evaluated(self):
50
+ return self._edb_object.X.ToDouble()
51
+
52
+ @property
53
+ def y(self):
54
+ """Y value of point."""
55
+ return self._edb_object.Y.ToString()
56
+
57
+ @y.setter
58
+ def y(self, value):
59
+ self._edb_object.Y = self._pedb.edb_value(value)
60
+
61
+ @property
62
+ def y_evaluated(self):
63
+ return self._edb_object.Y.ToDouble()
@@ -135,3 +135,12 @@ class PolygonData:
135
135
  def point_in_polygon(self, x: Union[str, float], y: Union[str, float]) -> bool:
136
136
  """Determines whether a point is inside the polygon."""
137
137
  return self._edb_object.PointInPolygon(self._pedb.point_data(x, y))
138
+
139
+ def get_point(self, index):
140
+ """Gets the point at the index as a PointData object."""
141
+ edb_object = self._edb_object.GetPoint(index)
142
+ return self._pedb.pedb_class.database.geometry.point_data.PointData(self._pedb, edb_object)
143
+
144
+ def set_point(self, index, point_data):
145
+ """Sets the point at the index from a PointData object."""
146
+ self._edb_object.SetPoint(index, point_data)
@@ -308,11 +308,21 @@ class EdbNets(CommonNets):
308
308
  val_value = cmp.rlc_values
309
309
  if refdes in exception_list:
310
310
  pass
311
- elif val_type == "Inductor" and val_value[1] < inductor_below:
311
+ elif (
312
+ val_type == "Inductor"
313
+ and self._pedb.edb_value(val_value[1]).ToDouble() <= self._pedb.edb_value(inductor_below).ToDouble()
314
+ ):
312
315
  pass
313
- elif val_type == "Resistor" and val_value[0] < resistor_below:
316
+ elif (
317
+ val_type == "Resistor"
318
+ and self._pedb.edb_value(val_value[0]).ToDouble() <= self._pedb.edb_value(resistor_below).ToDouble()
319
+ ):
314
320
  pass
315
- elif val_type == "Capacitor" and val_value[2] > capacitor_above:
321
+ elif (
322
+ val_type == "Capacitor"
323
+ and self._pedb.edb_value(val_value[2]).ToDouble()
324
+ >= self._pedb.edb_value(capacitor_above).ToDouble()
325
+ ):
316
326
  pass
317
327
  else:
318
328
  continue
pyedb/dotnet/edb.py CHANGED
@@ -41,13 +41,13 @@ from zipfile import ZipFile as zpf
41
41
  import rtree
42
42
 
43
43
  from pyedb.configuration.configuration import Configuration
44
+ import pyedb.dotnet
44
45
  from pyedb.dotnet.database.Variables import decompose_variable_value
45
46
  from pyedb.dotnet.database.cell.layout import Layout
46
47
  from pyedb.dotnet.database.cell.terminal.terminal import Terminal
47
48
  from pyedb.dotnet.database.components import Components
48
49
  import pyedb.dotnet.database.dotnet.database
49
50
  from pyedb.dotnet.database.dotnet.database import Database
50
- from pyedb.dotnet.database.edb_data.control_file import convert_technology_file
51
51
  from pyedb.dotnet.database.edb_data.design_options import EdbDesignOptions
52
52
  from pyedb.dotnet.database.edb_data.edbvalue import EdbValue
53
53
  from pyedb.dotnet.database.edb_data.ports import (
@@ -134,9 +134,17 @@ class Edb(Database):
134
134
  Reference to the AEDT project object.
135
135
  student_version : bool, optional
136
136
  Whether to open the AEDT student version. The default is ``False.``
137
+ control_file : str, optional
138
+ Path to the XML file. The default is ``None``, in which case an attempt is made to find
139
+ the XML file in the same directory as the board file. To succeed, the XML file and board file
140
+ must have the same name. Only the extension differs.
141
+ map_file : str, optional
142
+ Layer map .map file.
137
143
  technology_file : str, optional
138
144
  Full path to technology file to be converted to xml before importing or xml.
139
145
  Supported by GDS format only.
146
+ layer_filter:str,optional
147
+ Layer filter .txt file.
140
148
 
141
149
  Examples
142
150
  --------
@@ -182,7 +190,10 @@ class Edb(Database):
182
190
  oproject=None,
183
191
  student_version: bool = False,
184
192
  use_ppe: bool = False,
193
+ control_file: str = None,
194
+ map_file: str = None,
185
195
  technology_file: str = None,
196
+ layer_filter: str = None,
186
197
  remove_existing_aedt: bool = False,
187
198
  ):
188
199
  if isinstance(edbpath, Path):
@@ -232,27 +243,35 @@ class Edb(Database):
232
243
  zipped_file.extractall(edbpath[:-4])
233
244
  self.logger.info("ODB++ unzipped successfully.")
234
245
  zipped_file.close()
235
- control_file = None
236
- if technology_file:
237
- if os.path.splitext(technology_file)[1] == ".xml":
238
- control_file = technology_file
239
- else:
240
- control_file = convert_technology_file(technology_file, edbversion=edbversion)
241
246
  self.logger.info("Translating ODB++ to EDB...")
242
- self.import_layout_file(edbpath[:-4], working_dir, use_ppe=use_ppe, control_file=control_file)
247
+ if not self.import_layout_file(
248
+ edbpath[:-4],
249
+ working_dir,
250
+ use_ppe=use_ppe,
251
+ control_file=control_file,
252
+ tech_file=technology_file,
253
+ layer_filter=layer_filter,
254
+ map_file=map_file,
255
+ ):
256
+ raise AttributeError("Translation was unsuccessful")
257
+ return False
243
258
  if settings.enable_local_log_file and self.log_name:
244
259
  self._logger.add_file_logger(self.log_name, "Edb")
245
260
  self.logger.info("EDB %s was created correctly from %s file.", self.edbpath, edbpath)
246
261
  elif edbpath[-3:] in ["brd", "mcm", "sip", "gds", "xml", "dxf", "tgz", "anf"]:
247
262
  self.edbpath = edbpath[:-4] + ".aedb"
248
263
  working_dir = os.path.dirname(edbpath)
249
- control_file = None
250
- if technology_file:
251
- if os.path.splitext(technology_file)[1] == ".xml":
252
- control_file = technology_file
253
- else:
254
- control_file = convert_technology_file(technology_file, edbversion=edbversion)
255
- self.import_layout_file(edbpath, working_dir, use_ppe=use_ppe, control_file=control_file)
264
+ if not self.import_layout_file(
265
+ edbpath,
266
+ working_dir,
267
+ use_ppe=use_ppe,
268
+ control_file=control_file,
269
+ tech_file=technology_file,
270
+ layer_filter=layer_filter,
271
+ map_file=map_file,
272
+ ):
273
+ raise AttributeError("Translation was unsuccessful")
274
+ return False
256
275
  if settings.enable_local_log_file and self.log_name:
257
276
  self._logger.add_file_logger(self.log_name, "Edb")
258
277
  self.logger.info("EDB %s was created correctly from %s file.", self.edbpath, edbpath[-2:])
@@ -371,6 +390,11 @@ class Edb(Database):
371
390
  self._stackup2 = self._stackup
372
391
  self._materials = Materials(self)
373
392
 
393
+ @property
394
+ def pedb_class(self):
395
+ if not self.grpc:
396
+ return pyedb.dotnet
397
+
374
398
  @property
375
399
  def grpc(self):
376
400
  """grpc flag."""
@@ -584,7 +608,7 @@ class Edb(Database):
584
608
  # self.standalone = False
585
609
 
586
610
  self.run_as_standalone(self.standalone)
587
- self.create(self.edbpath)
611
+ self._db = self.create(self.edbpath)
588
612
  if not self.active_db:
589
613
  self.logger.warning("Error creating the database.")
590
614
  self._active_cell = None
@@ -733,8 +757,7 @@ class Edb(Database):
733
757
  cmd_translator.append('-t="{}"'.format(tech_file))
734
758
  if layer_filter:
735
759
  cmd_translator.append('-f="{}"'.format(layer_filter))
736
- p = subprocess.Popen(cmd_translator)
737
- p.wait()
760
+ subprocess.run(cmd_translator)
738
761
  if not os.path.exists(os.path.join(working_dir, aedb_name)):
739
762
  self.logger.error("Translator failed to translate.")
740
763
  return False
@@ -33,6 +33,7 @@ def Edb(
33
33
  use_ppe=False,
34
34
  technology_file=None,
35
35
  grpc=False,
36
+ control_file=None,
36
37
  ):
37
38
  """Provides the EDB application interface.
38
39
 
@@ -127,6 +128,7 @@ def Edb(
127
128
  student_version=student_version,
128
129
  use_ppe=use_ppe,
129
130
  technology_file=technology_file,
131
+ control_file=control_file,
130
132
  )
131
133
 
132
134
 
@@ -178,8 +178,7 @@ class Components(object):
178
178
  @property
179
179
  def nport_comp_definition(self):
180
180
  """Retrieve Nport component definition list."""
181
- m = "Ansys.Ansoft.Edb.Definition.NPortComponentModel"
182
- return {name: l for name, l in self.definitions.items() if m in [i for i in l.model]}
181
+ return {name: l for name, l in self.definitions.items() if l.reference_file}
183
182
 
184
183
  def import_definition(self, file_path):
185
184
  """Import component definition from json file.
@@ -230,4 +230,4 @@ class ComponentDef(GrpcComponentDef):
230
230
  if pin_order:
231
231
  old = {i.name: i for i in self.component_pins}
232
232
  temp = [old[str(i)] for i in pin_order]
233
- self.component_pins = temp
233
+ self.reorder_pins(temp)
@@ -1240,6 +1240,12 @@ class Hfss(object):
1240
1240
  from ansys.edb.core.simulation_setup.hfss_simulation_setup import (
1241
1241
  HfssSimulationSetup as GrpcHfssSimulationSetup,
1242
1242
  )
1243
+ from ansys.edb.core.simulation_setup.simulation_setup import (
1244
+ Distribution as GrpcDistribution,
1245
+ )
1246
+ from ansys.edb.core.simulation_setup.simulation_setup import (
1247
+ FrequencyData as GrpcFrequencyData,
1248
+ )
1243
1249
  from ansys.edb.core.simulation_setup.simulation_setup import (
1244
1250
  SweepData as GrpcSweepData,
1245
1251
  )
@@ -1267,7 +1273,10 @@ class Hfss(object):
1267
1273
  sweep_name = f"sweep_{len(setup.sweep_data) + 1}"
1268
1274
  sweep_data = [
1269
1275
  GrpcSweepData(
1270
- name=sweep_name, distribution=distribution, start_f=start_freq, end_f=stop_freq, step=step_freq
1276
+ name=sweep_name,
1277
+ frequency_data=GrpcFrequencyData(
1278
+ distribution=GrpcDistribution[distribution], start_f=start_freq, end_f=stop_freq, step=step_freq
1279
+ ),
1271
1280
  )
1272
1281
  ]
1273
1282
  if discrete_sweep:
@@ -284,9 +284,9 @@ class LayoutValidation:
284
284
  if prim.net_name in net_list:
285
285
  new_prims.extend(prim.fix_self_intersections())
286
286
  if new_prims:
287
- self._pedb._logger.info("Self-intersections detected and removed.")
287
+ self._pedb.logger.info("Self-intersections detected and removed.")
288
288
  else:
289
- self._pedb._logger.info("Self-intersection not found.")
289
+ self._pedb.logger.info("Self-intersection not found.")
290
290
  return True
291
291
 
292
292
  def illegal_net_names(self, fix=False):
@@ -1569,14 +1569,13 @@ class Padstacks(object):
1569
1569
  for id, inst in self.instances.items():
1570
1570
  instances_index[id] = inst.position
1571
1571
  for contour_box in contour_boxes:
1572
- all_instances = self.instances
1573
1572
  instances = self.get_padstack_instances_id_intersecting_polygon(
1574
1573
  points=contour_box, padstack_instances_index=instances_index
1575
1574
  )
1576
1575
  if net_filter:
1577
- instances = [self.instances[id] for id in instances if not self.instances[id].net.name in net_filter]
1576
+ instances = [id for id in instances if not self.instances[id].net.name in net_filter]
1578
1577
  net = self.instances[instances[0]].net.name
1579
- instances_pts = np.array([self.instances[id].position for id in instances])
1578
+ instances_pts = np.array([self.instances[inst].position for inst in instances])
1580
1579
  convex_hull_contour = ConvexHull(instances_pts)
1581
1580
  contour_points = list(instances_pts[convex_hull_contour.vertices])
1582
1581
  layer = list(self._pedb.stackup.layers.values())[0].name
@@ -1595,9 +1594,15 @@ class Padstacks(object):
1595
1594
  stop_layer=stop_layer,
1596
1595
  ):
1597
1596
  self._logger.error(f"Failed to create padstack definition {new_padstack_def}")
1598
- merged_instance = self.place(position=[0, 0], definition_name=new_padstack_def, net_name=net)
1599
- merged_via_ids.append(merged_instance.id)
1600
- [self.instances[id].delete() for id in instances]
1597
+ merged_instance = self.place(
1598
+ position=[0, 0],
1599
+ definition_name=new_padstack_def,
1600
+ net_name=net,
1601
+ fromlayer=start_layer,
1602
+ tolayer=stop_layer,
1603
+ )
1604
+ merged_via_ids.append(merged_instance.edb_uid)
1605
+ [self.instances[inst].delete() for inst in instances]
1601
1606
  return merged_via_ids
1602
1607
 
1603
1608
  def reduce_via_in_bounding_box(self, bounding_box, x_samples, y_samples, nets=None):
@@ -1611,7 +1616,7 @@ class Padstacks(object):
1611
1616
  x_samples : int
1612
1617
  y_samples : int
1613
1618
  nets : str or list, optional
1614
- net name of list of nets name applying filtering on padstack instances selection. If ``None`` is provided
1619
+ net name of list of nets name applying filtering on pad-stack instances selection. If ``None`` is provided
1615
1620
  all instances are included in the index. Default value is ``None``.
1616
1621
 
1617
1622
  Returns
@@ -1622,10 +1627,11 @@ class Padstacks(object):
1622
1627
 
1623
1628
  padstacks_inbox = self.get_padstack_instances_intersecting_bounding_box(bounding_box, nets)
1624
1629
  if not padstacks_inbox:
1625
- raise "No pad-stack in bounding box."
1630
+ return False
1626
1631
  else:
1627
1632
  if len(padstacks_inbox) <= (x_samples * y_samples):
1628
- raise f"more samples {x_samples * y_samples} than existing {len(padstacks_inbox)}"
1633
+ self._pedb.logger.error(f"more samples {x_samples * y_samples} than existing {len(padstacks_inbox)}")
1634
+ return False
1629
1635
  else:
1630
1636
  # extract ids and positions
1631
1637
  vias = {item: self.instances[item].position for item in padstacks_inbox}
@@ -146,7 +146,7 @@ class WavePort(EdgeTerminal):
146
146
  """
147
147
 
148
148
  def __init__(self, pedb, edb_terminal):
149
- super().__init__(pedb, edb_terminal.msg)
149
+ super().__init__(pedb, edb_terminal)
150
150
 
151
151
  @property
152
152
  def horizontal_extent_factor(self):
@@ -161,9 +161,9 @@ class WavePort(EdgeTerminal):
161
161
 
162
162
  @horizontal_extent_factor.setter
163
163
  def horizontal_extent_factor(self, value):
164
- self.p = p
165
- p = self.p
164
+ p = self._hfss_port_property
166
165
  p["Horizontal Extent Factor"] = value
166
+ self._hfss_port_property = p
167
167
 
168
168
  @property
169
169
  def vertical_extent_factor(self):
@@ -367,18 +367,23 @@ class HfssSimulationSetup(GrpcHfssSimulationSetup):
367
367
  start_freq = self._pedb.number_with_units(start_freq, "Hz")
368
368
  stop_freq = self._pedb.number_with_units(stop_freq, "Hz")
369
369
  step = str(step)
370
- if distribution.lower() == "linear":
371
- distribution = "LIN"
372
- elif distribution.lower() == "linear_count":
373
- distribution = "LINC"
374
- elif distribution.lower() == "exponential":
375
- distribution = "ESTP"
376
- elif distribution.lower() == "decade_count":
377
- distribution = "DEC"
378
- elif distribution.lower() == "octave_count":
379
- distribution = "OCT"
380
- else:
381
- distribution = "LIN"
370
+ if not distribution in ["LIN", "LINC", "ESTP", "DEC", "OCT"]:
371
+ if distribution.lower() == "linear" or distribution.lower() == "linear scale":
372
+ distribution = "LIN"
373
+ elif distribution.lower() == "linear_count" or distribution.lower() == "linear count":
374
+ distribution = "LINC"
375
+ elif distribution.lower() == "exponential":
376
+ distribution = "ESTP"
377
+ elif (
378
+ distribution.lower() == "decade_count"
379
+ or distribution.lower() == "decade count"
380
+ or distribution.lower()
381
+ ) == "log scale":
382
+ distribution = "DEC"
383
+ elif distribution.lower() == "octave_count" or distribution.lower() == "octave count":
384
+ distribution = "OCT"
385
+ else:
386
+ distribution = "LIN"
382
387
  if not name:
383
388
  name = f"sweep_{init_sweep_count + 1}"
384
389
  sweep_data = [
@@ -392,7 +397,7 @@ class HfssSimulationSetup(GrpcHfssSimulationSetup):
392
397
  sweep_data.append(sweep)
393
398
  self.sweep_data = sweep_data
394
399
  if len(self.sweep_data) == init_sweep_count + 1:
395
- return True
400
+ return self.sweep_data[-1]
396
401
  else:
397
402
  self._pedb.logger.error("Failed to add frequency sweep data")
398
403
  return False