pyedb 0.44.0__py3-none-any.whl → 0.45.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyedb might be problematic. Click here for more details.

Files changed (37) hide show
  1. pyedb/__init__.py +1 -1
  2. pyedb/configuration/cfg_boundaries.py +1 -1
  3. pyedb/configuration/cfg_components.py +7 -7
  4. pyedb/configuration/cfg_general.py +8 -2
  5. pyedb/configuration/cfg_modeler.py +7 -0
  6. pyedb/configuration/cfg_operations.py +40 -1
  7. pyedb/configuration/cfg_pin_groups.py +1 -1
  8. pyedb/configuration/cfg_ports_sources.py +159 -51
  9. pyedb/configuration/cfg_s_parameter_models.py +51 -1
  10. pyedb/configuration/cfg_setup.py +77 -16
  11. pyedb/configuration/configuration.py +13 -3
  12. pyedb/dotnet/database/cell/primitive/path.py +12 -0
  13. pyedb/dotnet/database/geometry/point_data.py +26 -0
  14. pyedb/dotnet/database/geometry/polygon_data.py +9 -0
  15. pyedb/dotnet/database/nets.py +13 -3
  16. pyedb/dotnet/edb.py +41 -18
  17. pyedb/grpc/database/components.py +1 -2
  18. pyedb/grpc/database/definition/component_def.py +1 -1
  19. pyedb/grpc/database/hfss.py +10 -1
  20. pyedb/grpc/database/layout_validation.py +2 -2
  21. pyedb/grpc/database/padstacks.py +15 -9
  22. pyedb/grpc/database/ports/ports.py +3 -3
  23. pyedb/grpc/database/simulation_setup/hfss_simulation_setup.py +18 -13
  24. pyedb/grpc/database/simulation_setup/siwave_simulation_setup.py +73 -30
  25. pyedb/grpc/database/simulation_setup/sweep_data.py +12 -1
  26. pyedb/grpc/database/siwave.py +10 -1
  27. pyedb/grpc/database/source_excitations.py +12 -2
  28. pyedb/grpc/database/stackup.py +12 -4
  29. pyedb/grpc/database/terminal/edge_terminal.py +93 -0
  30. pyedb/grpc/database/terminal/pingroup_terminal.py +14 -1
  31. pyedb/grpc/edb.py +13 -9
  32. pyedb/grpc/edb_init.py +19 -15
  33. pyedb/grpc/rpc_session.py +11 -8
  34. {pyedb-0.44.0.dist-info → pyedb-0.45.0.dist-info}/METADATA +6 -6
  35. {pyedb-0.44.0.dist-info → pyedb-0.45.0.dist-info}/RECORD +37 -37
  36. {pyedb-0.44.0.dist-info → pyedb-0.45.0.dist-info}/LICENSE +0 -0
  37. {pyedb-0.44.0.dist-info → pyedb-0.45.0.dist-info}/WHEEL +0 -0
@@ -205,6 +205,8 @@ class Configuration:
205
205
  self.parent.cfg_data.components.retrieve_parameters_from_edb()
206
206
  components = []
207
207
  for i in self.parent.cfg_data.components.components:
208
+ if i.type == "io":
209
+ components.append(i.get_attributes())
208
210
  components.append(i.get_attributes())
209
211
 
210
212
  if kwargs.get("components", False):
@@ -306,6 +308,14 @@ class Configuration:
306
308
  file_path = file_path if isinstance(file_path, Path) else Path(file_path)
307
309
  file_path = file_path.with_suffix(".json") if file_path.suffix == "" else file_path
308
310
 
311
+ for comp in data["components"]:
312
+ for key, value in comp.items():
313
+ try:
314
+ json.dumps(value)
315
+ print(f"Key '{key}' is serializable.")
316
+ except TypeError as e:
317
+ print(f"Key '{key}' failed: {e}")
318
+
309
319
  with open(file_path, "w") as f:
310
320
  if file_path.suffix == ".json":
311
321
  json.dump(data, f, ensure_ascii=False, indent=4)
@@ -472,9 +482,6 @@ class Configuration:
472
482
  # Configure package definitions
473
483
  self.cfg_data.package_definitions.apply()
474
484
 
475
- # Configure operations
476
- self.cfg_data.operations.apply()
477
-
478
485
  # Modeler
479
486
  self.cfg_data.modeler.apply()
480
487
 
@@ -484,6 +491,9 @@ class Configuration:
484
491
  # Configure probes
485
492
  self.cfg_data.probes.apply()
486
493
 
494
+ # Configure operations
495
+ self.cfg_data.operations.apply()
496
+
487
497
  return True
488
498
 
489
499
  def _load_stackup(self):
@@ -341,6 +341,18 @@ class Path(Primitive):
341
341
  polygon_data = self._edb.geometry.polygon_data.dotnetobj(convert_py_list_to_net_list(points), False)
342
342
  self._edb_object.SetCenterLine(polygon_data)
343
343
 
344
+ def get_center_line_polygon_data(self):
345
+ """Gets center lines of the path as a PolygonData object."""
346
+ edb_object = self._edb_object.GetCenterLine()
347
+ return self._pedb.pedb_class.database.geometry.polygon_data.PolygonData(self._pedb, edb_object=edb_object)
348
+
349
+ def set_center_line_polygon_data(self, polygon_data):
350
+ """Sets center lines of the path from a PolygonData object."""
351
+ if not self._edb_object.SetCenterLine(polygon_data._edb_object):
352
+ raise ValueError
353
+ else:
354
+ return True
355
+
344
356
  @property
345
357
  def corner_style(self):
346
358
  """:class:`PathCornerType`: Path's corner style."""
@@ -35,3 +35,29 @@ class PointData:
35
35
  self._pedb.edb_value(x),
36
36
  self._pedb.edb_value(y),
37
37
  )
38
+
39
+ @property
40
+ def x(self):
41
+ """X value of point."""
42
+ return self._edb_object.X.ToString()
43
+
44
+ @x.setter
45
+ def x(self, value):
46
+ self._edb_object.X = self._pedb.edb_value(value)
47
+
48
+ @property
49
+ def x_evaluated(self):
50
+ return self._edb_object.X.ToDouble()
51
+
52
+ @property
53
+ def y(self):
54
+ """Y value of point."""
55
+ return self._edb_object.Y.ToString()
56
+
57
+ @y.setter
58
+ def y(self, value):
59
+ self._edb_object.Y = self._pedb.edb_value(value)
60
+
61
+ @property
62
+ def y_evaluated(self):
63
+ return self._edb_object.Y.ToDouble()
@@ -135,3 +135,12 @@ class PolygonData:
135
135
  def point_in_polygon(self, x: Union[str, float], y: Union[str, float]) -> bool:
136
136
  """Determines whether a point is inside the polygon."""
137
137
  return self._edb_object.PointInPolygon(self._pedb.point_data(x, y))
138
+
139
+ def get_point(self, index):
140
+ """Gets the point at the index as a PointData object."""
141
+ edb_object = self._edb_object.GetPoint(index)
142
+ return self._pedb.pedb_class.database.geometry.point_data.PointData(self._pedb, edb_object)
143
+
144
+ def set_point(self, index, point_data):
145
+ """Sets the point at the index from a PointData object."""
146
+ self._edb_object.SetPoint(index, point_data)
@@ -308,11 +308,21 @@ class EdbNets(CommonNets):
308
308
  val_value = cmp.rlc_values
309
309
  if refdes in exception_list:
310
310
  pass
311
- elif val_type == "Inductor" and val_value[1] < inductor_below:
311
+ elif (
312
+ val_type == "Inductor"
313
+ and self._pedb.edb_value(val_value[1]).ToDouble() <= self._pedb.edb_value(inductor_below).ToDouble()
314
+ ):
312
315
  pass
313
- elif val_type == "Resistor" and val_value[0] < resistor_below:
316
+ elif (
317
+ val_type == "Resistor"
318
+ and self._pedb.edb_value(val_value[0]).ToDouble() <= self._pedb.edb_value(resistor_below).ToDouble()
319
+ ):
314
320
  pass
315
- elif val_type == "Capacitor" and val_value[2] > capacitor_above:
321
+ elif (
322
+ val_type == "Capacitor"
323
+ and self._pedb.edb_value(val_value[2]).ToDouble()
324
+ >= self._pedb.edb_value(capacitor_above).ToDouble()
325
+ ):
316
326
  pass
317
327
  else:
318
328
  continue
pyedb/dotnet/edb.py CHANGED
@@ -41,13 +41,13 @@ from zipfile import ZipFile as zpf
41
41
  import rtree
42
42
 
43
43
  from pyedb.configuration.configuration import Configuration
44
+ import pyedb.dotnet
44
45
  from pyedb.dotnet.database.Variables import decompose_variable_value
45
46
  from pyedb.dotnet.database.cell.layout import Layout
46
47
  from pyedb.dotnet.database.cell.terminal.terminal import Terminal
47
48
  from pyedb.dotnet.database.components import Components
48
49
  import pyedb.dotnet.database.dotnet.database
49
50
  from pyedb.dotnet.database.dotnet.database import Database
50
- from pyedb.dotnet.database.edb_data.control_file import convert_technology_file
51
51
  from pyedb.dotnet.database.edb_data.design_options import EdbDesignOptions
52
52
  from pyedb.dotnet.database.edb_data.edbvalue import EdbValue
53
53
  from pyedb.dotnet.database.edb_data.ports import (
@@ -134,9 +134,17 @@ class Edb(Database):
134
134
  Reference to the AEDT project object.
135
135
  student_version : bool, optional
136
136
  Whether to open the AEDT student version. The default is ``False.``
137
+ control_file : str, optional
138
+ Path to the XML file. The default is ``None``, in which case an attempt is made to find
139
+ the XML file in the same directory as the board file. To succeed, the XML file and board file
140
+ must have the same name. Only the extension differs.
141
+ map_file : str, optional
142
+ Layer map .map file.
137
143
  technology_file : str, optional
138
144
  Full path to technology file to be converted to xml before importing or xml.
139
145
  Supported by GDS format only.
146
+ layer_filter:str,optional
147
+ Layer filter .txt file.
140
148
 
141
149
  Examples
142
150
  --------
@@ -182,7 +190,10 @@ class Edb(Database):
182
190
  oproject=None,
183
191
  student_version: bool = False,
184
192
  use_ppe: bool = False,
193
+ control_file: str = None,
194
+ map_file: str = None,
185
195
  technology_file: str = None,
196
+ layer_filter: str = None,
186
197
  remove_existing_aedt: bool = False,
187
198
  ):
188
199
  if isinstance(edbpath, Path):
@@ -232,27 +243,35 @@ class Edb(Database):
232
243
  zipped_file.extractall(edbpath[:-4])
233
244
  self.logger.info("ODB++ unzipped successfully.")
234
245
  zipped_file.close()
235
- control_file = None
236
- if technology_file:
237
- if os.path.splitext(technology_file)[1] == ".xml":
238
- control_file = technology_file
239
- else:
240
- control_file = convert_technology_file(technology_file, edbversion=edbversion)
241
246
  self.logger.info("Translating ODB++ to EDB...")
242
- self.import_layout_file(edbpath[:-4], working_dir, use_ppe=use_ppe, control_file=control_file)
247
+ if not self.import_layout_file(
248
+ edbpath[:-4],
249
+ working_dir,
250
+ use_ppe=use_ppe,
251
+ control_file=control_file,
252
+ tech_file=technology_file,
253
+ layer_filter=layer_filter,
254
+ map_file=map_file,
255
+ ):
256
+ raise AttributeError("Translation was unsuccessful")
257
+ return False
243
258
  if settings.enable_local_log_file and self.log_name:
244
259
  self._logger.add_file_logger(self.log_name, "Edb")
245
260
  self.logger.info("EDB %s was created correctly from %s file.", self.edbpath, edbpath)
246
261
  elif edbpath[-3:] in ["brd", "mcm", "sip", "gds", "xml", "dxf", "tgz", "anf"]:
247
262
  self.edbpath = edbpath[:-4] + ".aedb"
248
263
  working_dir = os.path.dirname(edbpath)
249
- control_file = None
250
- if technology_file:
251
- if os.path.splitext(technology_file)[1] == ".xml":
252
- control_file = technology_file
253
- else:
254
- control_file = convert_technology_file(technology_file, edbversion=edbversion)
255
- self.import_layout_file(edbpath, working_dir, use_ppe=use_ppe, control_file=control_file)
264
+ if not self.import_layout_file(
265
+ edbpath,
266
+ working_dir,
267
+ use_ppe=use_ppe,
268
+ control_file=control_file,
269
+ tech_file=technology_file,
270
+ layer_filter=layer_filter,
271
+ map_file=map_file,
272
+ ):
273
+ raise AttributeError("Translation was unsuccessful")
274
+ return False
256
275
  if settings.enable_local_log_file and self.log_name:
257
276
  self._logger.add_file_logger(self.log_name, "Edb")
258
277
  self.logger.info("EDB %s was created correctly from %s file.", self.edbpath, edbpath[-2:])
@@ -371,6 +390,11 @@ class Edb(Database):
371
390
  self._stackup2 = self._stackup
372
391
  self._materials = Materials(self)
373
392
 
393
+ @property
394
+ def pedb_class(self):
395
+ if not self.grpc:
396
+ return pyedb.dotnet
397
+
374
398
  @property
375
399
  def grpc(self):
376
400
  """grpc flag."""
@@ -584,7 +608,7 @@ class Edb(Database):
584
608
  # self.standalone = False
585
609
 
586
610
  self.run_as_standalone(self.standalone)
587
- self.create(self.edbpath)
611
+ self._db = self.create(self.edbpath)
588
612
  if not self.active_db:
589
613
  self.logger.warning("Error creating the database.")
590
614
  self._active_cell = None
@@ -733,8 +757,7 @@ class Edb(Database):
733
757
  cmd_translator.append('-t="{}"'.format(tech_file))
734
758
  if layer_filter:
735
759
  cmd_translator.append('-f="{}"'.format(layer_filter))
736
- p = subprocess.Popen(cmd_translator)
737
- p.wait()
760
+ subprocess.run(cmd_translator)
738
761
  if not os.path.exists(os.path.join(working_dir, aedb_name)):
739
762
  self.logger.error("Translator failed to translate.")
740
763
  return False
@@ -178,8 +178,7 @@ class Components(object):
178
178
  @property
179
179
  def nport_comp_definition(self):
180
180
  """Retrieve Nport component definition list."""
181
- m = "Ansys.Ansoft.Edb.Definition.NPortComponentModel"
182
- return {name: l for name, l in self.definitions.items() if m in [i for i in l.model]}
181
+ return {name: l for name, l in self.definitions.items() if l.reference_file}
183
182
 
184
183
  def import_definition(self, file_path):
185
184
  """Import component definition from json file.
@@ -230,4 +230,4 @@ class ComponentDef(GrpcComponentDef):
230
230
  if pin_order:
231
231
  old = {i.name: i for i in self.component_pins}
232
232
  temp = [old[str(i)] for i in pin_order]
233
- self.component_pins = temp
233
+ self.reorder_pins(temp)
@@ -1240,6 +1240,12 @@ class Hfss(object):
1240
1240
  from ansys.edb.core.simulation_setup.hfss_simulation_setup import (
1241
1241
  HfssSimulationSetup as GrpcHfssSimulationSetup,
1242
1242
  )
1243
+ from ansys.edb.core.simulation_setup.simulation_setup import (
1244
+ Distribution as GrpcDistribution,
1245
+ )
1246
+ from ansys.edb.core.simulation_setup.simulation_setup import (
1247
+ FrequencyData as GrpcFrequencyData,
1248
+ )
1243
1249
  from ansys.edb.core.simulation_setup.simulation_setup import (
1244
1250
  SweepData as GrpcSweepData,
1245
1251
  )
@@ -1267,7 +1273,10 @@ class Hfss(object):
1267
1273
  sweep_name = f"sweep_{len(setup.sweep_data) + 1}"
1268
1274
  sweep_data = [
1269
1275
  GrpcSweepData(
1270
- name=sweep_name, distribution=distribution, start_f=start_freq, end_f=stop_freq, step=step_freq
1276
+ name=sweep_name,
1277
+ frequency_data=GrpcFrequencyData(
1278
+ distribution=GrpcDistribution[distribution], start_f=start_freq, end_f=stop_freq, step=step_freq
1279
+ ),
1271
1280
  )
1272
1281
  ]
1273
1282
  if discrete_sweep:
@@ -284,9 +284,9 @@ class LayoutValidation:
284
284
  if prim.net_name in net_list:
285
285
  new_prims.extend(prim.fix_self_intersections())
286
286
  if new_prims:
287
- self._pedb._logger.info("Self-intersections detected and removed.")
287
+ self._pedb.logger.info("Self-intersections detected and removed.")
288
288
  else:
289
- self._pedb._logger.info("Self-intersection not found.")
289
+ self._pedb.logger.info("Self-intersection not found.")
290
290
  return True
291
291
 
292
292
  def illegal_net_names(self, fix=False):
@@ -1569,14 +1569,13 @@ class Padstacks(object):
1569
1569
  for id, inst in self.instances.items():
1570
1570
  instances_index[id] = inst.position
1571
1571
  for contour_box in contour_boxes:
1572
- all_instances = self.instances
1573
1572
  instances = self.get_padstack_instances_id_intersecting_polygon(
1574
1573
  points=contour_box, padstack_instances_index=instances_index
1575
1574
  )
1576
1575
  if net_filter:
1577
- instances = [self.instances[id] for id in instances if not self.instances[id].net.name in net_filter]
1576
+ instances = [id for id in instances if not self.instances[id].net.name in net_filter]
1578
1577
  net = self.instances[instances[0]].net.name
1579
- instances_pts = np.array([self.instances[id].position for id in instances])
1578
+ instances_pts = np.array([self.instances[inst].position for inst in instances])
1580
1579
  convex_hull_contour = ConvexHull(instances_pts)
1581
1580
  contour_points = list(instances_pts[convex_hull_contour.vertices])
1582
1581
  layer = list(self._pedb.stackup.layers.values())[0].name
@@ -1595,9 +1594,15 @@ class Padstacks(object):
1595
1594
  stop_layer=stop_layer,
1596
1595
  ):
1597
1596
  self._logger.error(f"Failed to create padstack definition {new_padstack_def}")
1598
- merged_instance = self.place(position=[0, 0], definition_name=new_padstack_def, net_name=net)
1599
- merged_via_ids.append(merged_instance.id)
1600
- [self.instances[id].delete() for id in instances]
1597
+ merged_instance = self.place(
1598
+ position=[0, 0],
1599
+ definition_name=new_padstack_def,
1600
+ net_name=net,
1601
+ fromlayer=start_layer,
1602
+ tolayer=stop_layer,
1603
+ )
1604
+ merged_via_ids.append(merged_instance.edb_uid)
1605
+ [self.instances[inst].delete() for inst in instances]
1601
1606
  return merged_via_ids
1602
1607
 
1603
1608
  def reduce_via_in_bounding_box(self, bounding_box, x_samples, y_samples, nets=None):
@@ -1611,7 +1616,7 @@ class Padstacks(object):
1611
1616
  x_samples : int
1612
1617
  y_samples : int
1613
1618
  nets : str or list, optional
1614
- net name of list of nets name applying filtering on padstack instances selection. If ``None`` is provided
1619
+ net name of list of nets name applying filtering on pad-stack instances selection. If ``None`` is provided
1615
1620
  all instances are included in the index. Default value is ``None``.
1616
1621
 
1617
1622
  Returns
@@ -1622,10 +1627,11 @@ class Padstacks(object):
1622
1627
 
1623
1628
  padstacks_inbox = self.get_padstack_instances_intersecting_bounding_box(bounding_box, nets)
1624
1629
  if not padstacks_inbox:
1625
- raise "No pad-stack in bounding box."
1630
+ return False
1626
1631
  else:
1627
1632
  if len(padstacks_inbox) <= (x_samples * y_samples):
1628
- raise f"more samples {x_samples * y_samples} than existing {len(padstacks_inbox)}"
1633
+ self._pedb.logger.error(f"more samples {x_samples * y_samples} than existing {len(padstacks_inbox)}")
1634
+ return False
1629
1635
  else:
1630
1636
  # extract ids and positions
1631
1637
  vias = {item: self.instances[item].position for item in padstacks_inbox}
@@ -146,7 +146,7 @@ class WavePort(EdgeTerminal):
146
146
  """
147
147
 
148
148
  def __init__(self, pedb, edb_terminal):
149
- super().__init__(pedb, edb_terminal.msg)
149
+ super().__init__(pedb, edb_terminal)
150
150
 
151
151
  @property
152
152
  def horizontal_extent_factor(self):
@@ -161,9 +161,9 @@ class WavePort(EdgeTerminal):
161
161
 
162
162
  @horizontal_extent_factor.setter
163
163
  def horizontal_extent_factor(self, value):
164
- self.p = p
165
- p = self.p
164
+ p = self._hfss_port_property
166
165
  p["Horizontal Extent Factor"] = value
166
+ self._hfss_port_property = p
167
167
 
168
168
  @property
169
169
  def vertical_extent_factor(self):
@@ -367,18 +367,23 @@ class HfssSimulationSetup(GrpcHfssSimulationSetup):
367
367
  start_freq = self._pedb.number_with_units(start_freq, "Hz")
368
368
  stop_freq = self._pedb.number_with_units(stop_freq, "Hz")
369
369
  step = str(step)
370
- if distribution.lower() == "linear":
371
- distribution = "LIN"
372
- elif distribution.lower() == "linear_count":
373
- distribution = "LINC"
374
- elif distribution.lower() == "exponential":
375
- distribution = "ESTP"
376
- elif distribution.lower() == "decade_count":
377
- distribution = "DEC"
378
- elif distribution.lower() == "octave_count":
379
- distribution = "OCT"
380
- else:
381
- distribution = "LIN"
370
+ if not distribution in ["LIN", "LINC", "ESTP", "DEC", "OCT"]:
371
+ if distribution.lower() == "linear" or distribution.lower() == "linear scale":
372
+ distribution = "LIN"
373
+ elif distribution.lower() == "linear_count" or distribution.lower() == "linear count":
374
+ distribution = "LINC"
375
+ elif distribution.lower() == "exponential":
376
+ distribution = "ESTP"
377
+ elif (
378
+ distribution.lower() == "decade_count"
379
+ or distribution.lower() == "decade count"
380
+ or distribution.lower()
381
+ ) == "log scale":
382
+ distribution = "DEC"
383
+ elif distribution.lower() == "octave_count" or distribution.lower() == "octave count":
384
+ distribution = "OCT"
385
+ else:
386
+ distribution = "LIN"
382
387
  if not name:
383
388
  name = f"sweep_{init_sweep_count + 1}"
384
389
  sweep_data = [
@@ -392,7 +397,7 @@ class HfssSimulationSetup(GrpcHfssSimulationSetup):
392
397
  sweep_data.append(sweep)
393
398
  self.sweep_data = sweep_data
394
399
  if len(self.sweep_data) == init_sweep_count + 1:
395
- return True
400
+ return self.sweep_data[-1]
396
401
  else:
397
402
  self._pedb.logger.error("Failed to add frequency sweep data")
398
403
  return False
@@ -57,7 +57,14 @@ class SiwaveSimulationSetup(GrpcSIWaveSimulationSetup):
57
57
  super(SiwaveSimulationSetup, self.__class__).type.__set__(self, GrpcSimulationSetupType.SI_WAVE_DCIR)
58
58
 
59
59
  def add_sweep(
60
- self, name=None, distribution="linear", start_freq="0GHz", stop_freq="20GHz", step="10MHz", discrete=False
60
+ self,
61
+ name=None,
62
+ distribution="linear",
63
+ start_freq="0GHz",
64
+ stop_freq="20GHz",
65
+ step="10MHz",
66
+ discrete=False,
67
+ frequency_set=None,
61
68
  ):
62
69
  """Add a HFSS frequency sweep.
63
70
 
@@ -81,39 +88,75 @@ class SiwaveSimulationSetup(GrpcSIWaveSimulationSetup):
81
88
  distribution. Must be integer in that case.
82
89
  discrete : bool, optional
83
90
  Whether the sweep is discrete. The default is ``False``.
91
+ frequency_set : List, optional
92
+ Frequency set is a list adding one or more frequency sweeps. If ``frequency_set`` is provided, the other
93
+ arguments are ignored except ``discrete``. Default value is ``None``.
94
+ example of frequency_set : [['linear_scale', '50MHz', '200MHz', '10MHz']].
84
95
 
85
96
  Returns
86
97
  -------
87
98
  bool
88
99
  """
89
100
  init_sweep_count = len(self.sweep_data)
90
- start_freq = self._pedb.number_with_units(start_freq, "Hz")
91
- stop_freq = self._pedb.number_with_units(stop_freq, "Hz")
92
- step = str(step)
93
- if distribution.lower() == "linear":
94
- distribution = "LIN"
95
- elif distribution.lower() == "linear_count":
96
- distribution = "LINC"
97
- elif distribution.lower() == "exponential":
98
- distribution = "ESTP"
99
- elif distribution.lower() == "decade_count":
100
- distribution = "DEC"
101
- elif distribution.lower() == "octave_count":
102
- distribution = "OCT"
101
+ if frequency_set:
102
+ for sweep in frequency_set:
103
+ if "linear_scale" in sweep:
104
+ distribution = "LIN"
105
+ elif "linear_count" in sweep:
106
+ distribution = "LINC"
107
+ elif "exponential" in sweep:
108
+ distribution = "ESTP"
109
+ elif "log_scale" in sweep:
110
+ distribution = "DEC"
111
+ elif "octave_count" in sweep:
112
+ distribution = "OCT"
113
+ else:
114
+ distribution = "LIN"
115
+ start_freq = self._pedb.number_with_units(sweep[1], "Hz")
116
+ stop_freq = self._pedb.number_with_units(sweep[2], "Hz")
117
+ step = str(sweep[3])
118
+ if not name:
119
+ name = f"sweep_{init_sweep_count + 1}"
120
+ sweep_data = [
121
+ SweepData(
122
+ self._pedb, name=name, distribution=distribution, start_f=start_freq, end_f=stop_freq, step=step
123
+ )
124
+ ]
125
+ if discrete:
126
+ sweep_data[0].type = sweep_data[0].type.DISCRETE_SWEEP
127
+ for sweep in self.sweep_data:
128
+ sweep_data.append(sweep)
129
+ self.sweep_data = sweep_data
103
130
  else:
104
- distribution = "LIN"
105
- if not name:
106
- name = f"sweep_{init_sweep_count + 1}"
107
- sweep_data = [
108
- SweepData(self._pedb, name=name, distribution=distribution, start_f=start_freq, end_f=stop_freq, step=step)
109
- ]
110
- if discrete:
111
- sweep_data[0].type = sweep_data[0].type.DISCRETE_SWEEP
112
- for sweep in self.sweep_data:
113
- sweep_data.append(sweep)
114
- self.sweep_data = sweep_data
115
- if len(self.sweep_data) == init_sweep_count + 1:
116
- return True
117
- else:
118
- self._pedb.logger.error("Failed to add frequency sweep data")
119
- return False
131
+ start_freq = self._pedb.number_with_units(start_freq, "Hz")
132
+ stop_freq = self._pedb.number_with_units(stop_freq, "Hz")
133
+ step = str(step)
134
+ if distribution.lower() == "linear":
135
+ distribution = "LIN"
136
+ elif distribution.lower() == "linear_count":
137
+ distribution = "LINC"
138
+ elif distribution.lower() == "exponential":
139
+ distribution = "ESTP"
140
+ elif distribution.lower() == "decade_count":
141
+ distribution = "DEC"
142
+ elif distribution.lower() == "octave_count":
143
+ distribution = "OCT"
144
+ else:
145
+ distribution = "LIN"
146
+ if not name:
147
+ name = f"sweep_{init_sweep_count + 1}"
148
+ sweep_data = [
149
+ SweepData(
150
+ self._pedb, name=name, distribution=distribution, start_f=start_freq, end_f=stop_freq, step=step
151
+ )
152
+ ]
153
+ if discrete:
154
+ sweep_data[0].type = sweep_data[0].type.DISCRETE_SWEEP
155
+ for sweep in self.sweep_data:
156
+ sweep_data.append(sweep)
157
+ self.sweep_data = sweep_data
158
+ if len(self.sweep_data) == init_sweep_count + 1:
159
+ return True
160
+ else:
161
+ self._pedb.logger.error("Failed to add frequency sweep data")
162
+ return False
@@ -20,6 +20,12 @@
20
20
  # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
21
  # SOFTWARE.
22
22
 
23
+ from ansys.edb.core.simulation_setup.simulation_setup import (
24
+ Distribution as GrpcDistribution,
25
+ )
26
+ from ansys.edb.core.simulation_setup.simulation_setup import (
27
+ FrequencyData as GrpcFrequencyData,
28
+ )
23
29
  from ansys.edb.core.simulation_setup.simulation_setup import SweepData as GrpcSweepData
24
30
 
25
31
 
@@ -27,6 +33,11 @@ class SweepData(GrpcSweepData):
27
33
  """Frequency sweep data class."""
28
34
 
29
35
  def __init__(self, pedb, name, distribution, start_f, end_f, step, edb_object=None):
30
- super().__init__(name=name, distribution=distribution, start_f=start_f, end_f=end_f, step=step)
36
+ super().__init__(
37
+ name=name,
38
+ frequency_data=GrpcFrequencyData(
39
+ distribution=GrpcDistribution[distribution], start_f=start_f, end_f=end_f, step=step
40
+ ),
41
+ )
31
42
  self._edb_object = edb_object
32
43
  self._pedb = pedb
@@ -28,6 +28,12 @@ import os
28
28
  import warnings
29
29
 
30
30
  from ansys.edb.core.database import ProductIdType as GrpcProductIdType
31
+ from ansys.edb.core.simulation_setup.simulation_setup import (
32
+ Distribution as GrpcDistribution,
33
+ )
34
+ from ansys.edb.core.simulation_setup.simulation_setup import (
35
+ FrequencyData as GrpcFrequencyData,
36
+ )
31
37
  from ansys.edb.core.simulation_setup.simulation_setup import SweepData as GrpcSweepData
32
38
 
33
39
  from pyedb.misc.siw_feature_config.xtalk_scan.scan_config import SiwaveScanConfig
@@ -592,7 +598,10 @@ class Siwave(object):
592
598
  sweep_name = f"sweep_{len(setup.sweep_data) + 1}"
593
599
  sweep_data = [
594
600
  GrpcSweepData(
595
- name=sweep_name, distribution=distribution, start_f=start_freq, end_f=stop_freq, step=step_freq
601
+ name=sweep_name,
602
+ frequency_data=GrpcFrequencyData(
603
+ distribution=GrpcDistribution[distribution], start_f=start_freq, end_f=stop_freq, step=step_freq
604
+ ),
596
605
  )
597
606
  ]
598
607
  if discrete_sweep:
@@ -225,7 +225,7 @@ class SourceExcitation:
225
225
  if refdes and any(refdes.rlc_values):
226
226
  return self._pedb.components.deactivate_rlc_component(component=refdes, create_circuit_port=True)
227
227
  if not port_name:
228
- port_name = f"Port_{pins[0].net_name}_{pins[0].name}"
228
+ port_name = f"Port_{pins[0].net_name}_{pins[0].component.name}_{pins[0].name}"
229
229
 
230
230
  if len(pins) > 1 or pingroup_on_single_pin:
231
231
  pec_boundary = False
@@ -713,9 +713,19 @@ class SourceExcitation:
713
713
  -------
714
714
  Edb pin group terminal.
715
715
  """
716
+ from ansys.edb.core.hierarchy.pin_group import PinGroup as GrpcPinGroup
717
+
718
+ from pyedb.grpc.database.hierarchy.pingroup import PinGroup
719
+
716
720
  if pingroup.is_null:
717
721
  self._logger.error(f"{pingroup} is null")
718
- pin = PadstackInstance(self._pedb, pingroup.pins[0])
722
+ if not pingroup.pins:
723
+ self._pedb.logger.error("No pins defined on pingroup.")
724
+ return False
725
+ if isinstance(pingroup, GrpcPinGroup):
726
+ pingroup = PinGroup(self._pedb, pingroup)
727
+ pin = list(pingroup.pins.values())[0]
728
+ pin = PadstackInstance(self._pedb, pin)
719
729
  if term_name is None:
720
730
  term_name = f"{pin.component.name}.{pin.name}.{pin.net_name}"
721
731
  for t in self._pedb.active_layout.terminals: