siliconcompiler 0.35.4__py3-none-any.whl → 0.36.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. siliconcompiler/_metadata.py +1 -1
  2. siliconcompiler/constraints/__init__.py +4 -1
  3. siliconcompiler/constraints/asic_timing.py +230 -38
  4. siliconcompiler/constraints/fpga_timing.py +209 -14
  5. siliconcompiler/constraints/timing_mode.py +82 -0
  6. siliconcompiler/data/templates/tcl/manifest.tcl.j2 +0 -6
  7. siliconcompiler/flowgraph.py +95 -42
  8. siliconcompiler/flows/generate_openroad_rcx.py +2 -2
  9. siliconcompiler/flows/highresscreenshotflow.py +37 -0
  10. siliconcompiler/library.py +2 -1
  11. siliconcompiler/package/__init__.py +39 -45
  12. siliconcompiler/project.py +4 -1
  13. siliconcompiler/scheduler/scheduler.py +64 -35
  14. siliconcompiler/scheduler/schedulernode.py +5 -2
  15. siliconcompiler/scheduler/slurm.py +7 -6
  16. siliconcompiler/scheduler/taskscheduler.py +19 -16
  17. siliconcompiler/schema/_metadata.py +1 -1
  18. siliconcompiler/schema/namedschema.py +2 -4
  19. siliconcompiler/schema_support/cmdlineschema.py +0 -3
  20. siliconcompiler/schema_support/dependencyschema.py +0 -6
  21. siliconcompiler/schema_support/record.py +4 -3
  22. siliconcompiler/tool.py +58 -27
  23. siliconcompiler/tools/_common/tcl/sc_schema_access.tcl +0 -6
  24. siliconcompiler/tools/chisel/convert.py +44 -0
  25. siliconcompiler/tools/ghdl/convert.py +37 -2
  26. siliconcompiler/tools/icarus/compile.py +14 -0
  27. siliconcompiler/tools/keplerformal/__init__.py +7 -0
  28. siliconcompiler/tools/keplerformal/lec.py +112 -0
  29. siliconcompiler/tools/klayout/drc.py +14 -0
  30. siliconcompiler/tools/klayout/export.py +40 -0
  31. siliconcompiler/tools/klayout/operations.py +40 -0
  32. siliconcompiler/tools/klayout/screenshot.py +66 -1
  33. siliconcompiler/tools/klayout/scripts/klayout_export.py +10 -40
  34. siliconcompiler/tools/klayout/scripts/klayout_show.py +4 -4
  35. siliconcompiler/tools/klayout/scripts/klayout_utils.py +13 -1
  36. siliconcompiler/tools/montage/tile.py +26 -12
  37. siliconcompiler/tools/openroad/__init__.py +11 -0
  38. siliconcompiler/tools/openroad/_apr.py +780 -11
  39. siliconcompiler/tools/openroad/antenna_repair.py +26 -0
  40. siliconcompiler/tools/openroad/fillmetal_insertion.py +14 -0
  41. siliconcompiler/tools/openroad/global_placement.py +67 -0
  42. siliconcompiler/tools/openroad/global_route.py +15 -0
  43. siliconcompiler/tools/openroad/init_floorplan.py +19 -2
  44. siliconcompiler/tools/openroad/macro_placement.py +252 -0
  45. siliconcompiler/tools/openroad/power_grid.py +43 -0
  46. siliconcompiler/tools/openroad/power_grid_analysis.py +1 -1
  47. siliconcompiler/tools/openroad/rcx_bench.py +28 -0
  48. siliconcompiler/tools/openroad/rcx_extract.py +14 -0
  49. siliconcompiler/tools/openroad/rdlroute.py +14 -0
  50. siliconcompiler/tools/openroad/repair_design.py +41 -0
  51. siliconcompiler/tools/openroad/repair_timing.py +54 -0
  52. siliconcompiler/tools/openroad/screenshot.py +31 -1
  53. siliconcompiler/tools/openroad/scripts/apr/preamble.tcl +8 -0
  54. siliconcompiler/tools/openroad/scripts/apr/sc_init_floorplan.tcl +54 -15
  55. siliconcompiler/tools/openroad/scripts/apr/sc_irdrop.tcl +6 -4
  56. siliconcompiler/tools/openroad/scripts/apr/sc_write_data.tcl +4 -4
  57. siliconcompiler/tools/openroad/scripts/common/procs.tcl +14 -5
  58. siliconcompiler/tools/openroad/scripts/common/read_liberty.tcl +2 -2
  59. siliconcompiler/tools/openroad/scripts/common/reports.tcl +6 -3
  60. siliconcompiler/tools/openroad/scripts/common/screenshot.tcl +1 -1
  61. siliconcompiler/tools/openroad/scripts/common/write_data_physical.tcl +8 -0
  62. siliconcompiler/tools/openroad/scripts/common/write_images.tcl +16 -12
  63. siliconcompiler/tools/openroad/scripts/sc_rdlroute.tcl +3 -1
  64. siliconcompiler/tools/openroad/write_data.py +78 -2
  65. siliconcompiler/tools/opensta/scripts/sc_check_library.tcl +2 -2
  66. siliconcompiler/tools/opensta/scripts/sc_report_libraries.tcl +2 -2
  67. siliconcompiler/tools/opensta/scripts/sc_timing.tcl +12 -14
  68. siliconcompiler/tools/opensta/timing.py +42 -3
  69. siliconcompiler/tools/slang/elaborate.py +16 -1
  70. siliconcompiler/tools/surelog/parse.py +54 -0
  71. siliconcompiler/tools/verilator/compile.py +120 -0
  72. siliconcompiler/tools/vivado/syn_fpga.py +27 -0
  73. siliconcompiler/tools/vpr/route.py +40 -0
  74. siliconcompiler/tools/xdm/convert.py +14 -0
  75. siliconcompiler/tools/xyce/simulate.py +26 -0
  76. siliconcompiler/tools/yosys/lec_asic.py +13 -0
  77. siliconcompiler/tools/yosys/syn_asic.py +332 -3
  78. siliconcompiler/tools/yosys/syn_fpga.py +32 -0
  79. siliconcompiler/toolscripts/_tools.json +9 -4
  80. siliconcompiler/toolscripts/ubuntu22/install-keplerformal.sh +72 -0
  81. siliconcompiler/toolscripts/ubuntu24/install-keplerformal.sh +72 -0
  82. siliconcompiler/utils/multiprocessing.py +11 -0
  83. siliconcompiler/utils/settings.py +70 -49
  84. {siliconcompiler-0.35.4.dist-info → siliconcompiler-0.36.1.dist-info}/METADATA +4 -4
  85. {siliconcompiler-0.35.4.dist-info → siliconcompiler-0.36.1.dist-info}/RECORD +89 -83
  86. {siliconcompiler-0.35.4.dist-info → siliconcompiler-0.36.1.dist-info}/WHEEL +0 -0
  87. {siliconcompiler-0.35.4.dist-info → siliconcompiler-0.36.1.dist-info}/entry_points.txt +0 -0
  88. {siliconcompiler-0.35.4.dist-info → siliconcompiler-0.36.1.dist-info}/licenses/LICENSE +0 -0
  89. {siliconcompiler-0.35.4.dist-info → siliconcompiler-0.36.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,82 @@
1
+ from typing import Union, List, Tuple, Optional
2
+
3
+ from siliconcompiler.schema import NamedSchema, EditableSchema, Parameter, \
4
+ PerNode, Scope
5
+ from siliconcompiler import Design
6
+
7
+
8
+ class TimingModeSchema(NamedSchema):
9
+ """
10
+ Represents a single timing mode for design constraints.
11
+
12
+ This class encapsulates the SDC filesets used for a specific timing mode.
13
+ """
14
+
15
+ def __init__(self, name: Optional[str] = None):
16
+ super().__init__()
17
+ self.set_name(name)
18
+
19
+ schema = EditableSchema(self)
20
+ schema.insert(
21
+ 'sdcfileset',
22
+ Parameter(
23
+ '[(str,str)]',
24
+ pernode=PerNode.OPTIONAL,
25
+ scope=Scope.GLOBAL,
26
+ shorthelp="Constraint: SDC files",
27
+ switch="-constraint_timing_file 'scenario <file>'",
28
+ example=["api: mode.set('constraint', 'timing', 'worst', 'file', 'hello.sdc')"],
29
+ help="""List of timing constraint sets files to use for the scenario. The
30
+ values are combined with any constraints specified by the design
31
+ 'constraint' parameter. If no constraints are found, a default
32
+ constraint file is used based on the clock definitions."""))
33
+
34
+ def add_sdcfileset(self,
35
+ design: Union[Design, str],
36
+ fileset: str,
37
+ clobber: bool = False,
38
+ step: Optional[str] = None, index: Optional[Union[str, int]] = None):
39
+ """
40
+ Adds an SDC fileset for a given design.
41
+
42
+ Args:
43
+ design (:class:`Design` or str): The design object or the name of the design to
44
+ associate the fileset with.
45
+ fileset (str): The name of the SDC fileset to add.
46
+ clobber (bool): If True, existing SDC filesets for the design at the specified
47
+ step/index will be overwritten.
48
+ If False (default), the SDC fileset will be added.
49
+ step (str, optional): step name.
50
+ index (str, optional): index name.
51
+
52
+ Raises:
53
+ TypeError: If `design` is not a Design object or a string, or if `fileset` is not
54
+ a string.
55
+ """
56
+ if isinstance(design, Design):
57
+ design = design.name
58
+
59
+ if not isinstance(design, str):
60
+ raise TypeError("design must be a design object or string")
61
+
62
+ if not isinstance(fileset, str):
63
+ raise TypeError("fileset must be a string")
64
+
65
+ if clobber:
66
+ return self.set("sdcfileset", (design, fileset), step=step, index=index)
67
+ else:
68
+ return self.add("sdcfileset", (design, fileset), step=step, index=index)
69
+
70
+ def get_sdcfileset(self, step: Optional[str] = None, index: Optional[Union[str, int]] = None) \
71
+ -> List[Tuple[str, str]]:
72
+ """
73
+ Gets the list of SDC filesets.
74
+
75
+ Args:
76
+ step (str, optional): step name.
77
+ index (str, optional): index name.
78
+
79
+ Returns:
80
+ A list of tuples, where each tuple contains the design name and the SDC fileset name.
81
+ """
82
+ return self.get("sdcfileset", step=step, index=index)
@@ -17,12 +17,6 @@ proc sc_root {} {
17
17
 
18
18
  {% include 'tools/_common/tcl/sc_schema_access.tcl' %}
19
19
 
20
- # Redefine
21
- proc _sc_cfg_get_debug { args } {
22
- {% if record_access %}puts "{{ record_access_id }} [join $args ,]"{% endif %}
23
- }
24
-
25
-
26
20
  #############################################
27
21
  # Tool variables
28
22
  #############################################
@@ -734,7 +734,7 @@ class Flowgraph(NamedSchema, DocsSchema):
734
734
  """
735
735
  return Flowgraph.__name__
736
736
 
737
- def __get_graph_information(self):
737
+ def __get_graph_information(self, landscape):
738
738
  '''
739
739
  Internal helper to gather all node and edge info for graphviz.
740
740
 
@@ -744,6 +744,14 @@ class Flowgraph(NamedSchema, DocsSchema):
744
744
  - dict: Information about each node.
745
745
  - list: Information about each edge.
746
746
  '''
747
+ from siliconcompiler import Project
748
+ if landscape:
749
+ out_label_suffix = ':e'
750
+ in_label_suffix = ':w'
751
+ else:
752
+ out_label_suffix = ':s'
753
+ in_label_suffix = ':n'
754
+
747
755
  # Setup nodes
748
756
  node_exec_order = self.get_execution_order()
749
757
 
@@ -752,13 +760,9 @@ class Flowgraph(NamedSchema, DocsSchema):
752
760
  for step, index in rank_nodes:
753
761
  node_rank[f'{step}/{index}'] = rank
754
762
 
755
- # TODO: This appears to be unused, legacy from when files were nodes
756
- all_graph_inputs = set()
757
-
758
763
  exit_nodes = [f'{step}/{index}' for step, index in self.get_exit_nodes()]
759
764
 
760
765
  nodes = {}
761
- edges = []
762
766
 
763
767
  def clean_label(label):
764
768
  return label.replace("<", "").replace(">", "")
@@ -770,19 +774,34 @@ class Flowgraph(NamedSchema, DocsSchema):
770
774
 
771
775
  runtime_flow = RuntimeFlowgraph(self)
772
776
 
777
+ root = self._parent()._parent() # Brittle since this replies on location
778
+ if not isinstance(root, Project):
779
+ root = None
780
+ has_io = False
781
+ else:
782
+ has_io = True
783
+
773
784
  for step, index in all_nodes:
774
785
  graph_node = self.get_graph_node(step, index)
775
- tool = graph_node.get("tool")
776
- task = graph_node.get("task")
786
+ tool: str = graph_node.get("tool")
787
+ task: str = graph_node.get("task")
777
788
 
778
789
  inputs = []
779
790
  outputs = []
791
+ if has_io and root:
792
+ try:
793
+ inputs = root.get('tool', tool, 'task', task, 'input', step=step, index=index)
794
+ outputs = root.get('tool', tool, 'task', task, 'output', step=step, index=index)
795
+ except KeyError:
796
+ has_io = False
797
+ if not inputs and not outputs:
798
+ has_io = False
780
799
 
781
800
  node = f'{step}/{index}'
782
801
 
783
802
  nodes[node] = {
784
803
  "node": (step, index),
785
- "file_inputs": inputs,
804
+ "file_inputs": set(inputs),
786
805
  "inputs": {clean_text(f): f'input-{clean_label(f)}' for f in sorted(inputs)},
787
806
  "outputs": {clean_text(f): f'output-{clean_label(f)}' for f in sorted(outputs)},
788
807
  "task": f'{tool}/{task}' if tool != 'builtin' else task,
@@ -800,23 +819,41 @@ class Flowgraph(NamedSchema, DocsSchema):
800
819
  rank_diff[in_node_name] = node_rank[node] - node_rank[in_node_name]
801
820
  nodes[node]["rank_diff"] = rank_diff
802
821
 
822
+ if not has_io:
823
+ for info in nodes.values():
824
+ info["inputs"] = []
825
+ info["outputs"] = []
826
+
827
+ edges = []
828
+ edges_io = []
803
829
  for step, index in all_nodes:
804
830
  node = f'{step}/{index}'
805
831
  all_inputs = []
806
832
  for in_step, in_index in self.get_graph_node(step, index).get_input():
807
833
  all_inputs.append(f'{in_step}/{in_index}')
808
834
  for item in all_inputs:
809
- edges.append((item, node, 1 if node in exit_nodes else 2))
810
-
811
- return all_graph_inputs, nodes, edges
835
+ edges.append((f"{item}{out_label_suffix}",
836
+ f"{node}{in_label_suffix}",
837
+ 1 if node in exit_nodes else 2))
838
+ if has_io:
839
+ for infile in nodes[node]["inputs"]:
840
+ if infile in nodes[item]["outputs"]:
841
+ outlabel = f"{item}:output-{clean_label(infile)}"
842
+ inlabel = f"{node}:input-{clean_label(infile)}"
843
+ edges_io.append((f"{outlabel}{out_label_suffix}",
844
+ f"{inlabel}{in_label_suffix}",
845
+ 1 if node in exit_nodes else 2))
846
+
847
+ return nodes, edges, edges_io, has_io
812
848
 
813
849
  def write_flowgraph(self, filename: str,
814
- fillcolor: Optional[str] = '#ffffff',
815
- fontcolor: Optional[str] = '#000000',
816
- background: Optional[str] = 'transparent',
817
- fontsize: Optional[Union[int, str]] = 14,
818
- border: Optional[bool] = True,
819
- landscape: Optional[bool] = False) -> None:
850
+ fillcolor: str = '#ffffff',
851
+ fontcolor: str = '#000000',
852
+ background: str = 'transparent',
853
+ fontsize: Union[int, str] = 14,
854
+ border: bool = True,
855
+ landscape: bool = False,
856
+ show_io: Optional[bool] = None) -> None:
820
857
  r'''
821
858
  Renders and saves the compilation flowgraph to a file.
822
859
 
@@ -837,6 +874,7 @@ class Flowgraph(NamedSchema, DocsSchema):
837
874
  fontsize (str): Node text font size
838
875
  border (bool): Enables node border if True
839
876
  landscape (bool): Renders graph in landscape layout if True
877
+ show_io (bool): Add file input/outputs to graph
840
878
 
841
879
  Examples:
842
880
  >>> flow.write_flowgraph('mydump.png')
@@ -858,21 +896,22 @@ class Flowgraph(NamedSchema, DocsSchema):
858
896
  # controlling graph direction
859
897
  if landscape:
860
898
  rankdir = 'LR'
861
- out_label_suffix = ':e'
862
- in_label_suffix = ':w'
863
899
  else:
864
900
  rankdir = 'TB'
865
- out_label_suffix = ':s'
866
- in_label_suffix = ':n'
867
901
 
868
- all_graph_inputs, nodes, edges = self.__get_graph_information()
902
+ nodes, edges, edges_io, has_io = self.__get_graph_information(landscape)
869
903
 
870
- out_label_suffix = ''
871
- in_label_suffix = ''
904
+ if show_io is None:
905
+ show_io = has_io
906
+ elif not has_io:
907
+ show_io = False
872
908
 
873
909
  dot = graphviz.Digraph(format=fileformat)
874
910
  dot.graph_attr['rankdir'] = rankdir
875
911
  dot.attr(bgcolor=background)
912
+ if show_io:
913
+ dot.graph_attr['concentrate'] = 'true'
914
+ dot.graph_attr['ranksep'] = '0.75'
876
915
 
877
916
  subgraphs = {
878
917
  "graphs": {},
@@ -901,24 +940,25 @@ class Flowgraph(NamedSchema, DocsSchema):
901
940
 
902
941
  subgraph_temp["nodes"].append(node)
903
942
 
904
- with dot.subgraph(name='inputs') as input_graph:
905
- input_graph.graph_attr['cluster'] = 'true'
906
- input_graph.graph_attr['color'] = background
907
-
908
- # add inputs
909
- for graph_input in sorted(all_graph_inputs):
910
- input_graph.node(
911
- graph_input, label=graph_input, bordercolor=fontcolor, style='filled',
912
- fontcolor=fontcolor, fontsize=fontsize, ordering="in",
913
- penwidth=penwidth, fillcolor=fillcolor, shape="box")
914
-
915
943
  def make_node(graph, node, prefix):
916
944
  '''Helper function to create a node in the graphviz object.'''
917
945
  info = nodes[node]
918
946
 
919
- shape = "oval"
947
+ shape = "oval" if not show_io else "Mrecord"
920
948
  task_label = f"\\n ({info['task']})" if info['task'] is not None else ""
921
- labelname = f"{node.replace(prefix, '')}{task_label}"
949
+ if show_io:
950
+ input_labels = [f"<{ikey}> {ifile}" for ifile, ikey in info['inputs'].items()]
951
+ output_labels = [f"<{okey}> {ofile}" for ofile, okey in info['outputs'].items()]
952
+ center_text = f"\\n {node.replace(prefix, '')} {task_label} \\n\\n"
953
+ labelname = "{"
954
+ if input_labels:
955
+ labelname += f"{{ {' | '.join(input_labels)} }} |"
956
+ labelname += center_text
957
+ if output_labels:
958
+ labelname += f"| {{ {' | '.join(output_labels)} }}"
959
+ labelname += "}"
960
+ else:
961
+ labelname = f"{node.replace(prefix, '')}{task_label}"
922
962
 
923
963
  graph.node(node, label=labelname, bordercolor=fontcolor, style='filled',
924
964
  fontcolor=fontcolor, fontsize=fontsize, ordering="in",
@@ -975,8 +1015,12 @@ class Flowgraph(NamedSchema, DocsSchema):
975
1015
  build_graph(subgraphs, dot, "")
976
1016
 
977
1017
  # Add all the edges
978
- for edge0, edge1, weight in edges:
979
- dot.edge(f'{edge0}{out_label_suffix}', f'{edge1}{in_label_suffix}', weight=str(weight))
1018
+ if show_io:
1019
+ for edge0, edge1, weight in edges_io:
1020
+ dot.edge(edge0, edge1, weight=str(weight))
1021
+ else:
1022
+ for edge0, edge1, weight in edges:
1023
+ dot.edge(edge0, edge1, weight=str(weight))
980
1024
 
981
1025
  dot.render(filename=fileroot, cleanup=True)
982
1026
 
@@ -1270,21 +1314,30 @@ class RuntimeFlowgraph:
1270
1314
  if (step, index) not in self.get_nodes():
1271
1315
  raise ValueError(f"{step}/{index} is not a valid node")
1272
1316
 
1317
+ base_nodes = set(self.__base.get_nodes())
1318
+
1273
1319
  if record is None:
1274
1320
  inputs = set()
1275
1321
  for in_step, in_index in self.__base.get(step, index, "input"):
1276
- if (in_step, in_index) not in self.get_nodes():
1322
+ if (in_step, in_index) not in base_nodes:
1323
+ continue
1324
+ if (in_step, in_index) in self.__prune:
1277
1325
  continue
1278
1326
  inputs.add((in_step, in_index))
1279
1327
  return sorted(inputs)
1280
1328
 
1281
1329
  inputs = set()
1282
1330
  for in_step, in_index in self.__base.get(step, index, "input"):
1283
- if (in_step, in_index) not in self.get_nodes():
1331
+ if (in_step, in_index) not in base_nodes:
1332
+ continue
1333
+ if (in_step, in_index) in self.__prune:
1284
1334
  continue
1285
1335
 
1286
1336
  if record.get("status", step=in_step, index=in_index) == NodeStatus.SKIPPED:
1287
- inputs.update(self.get_node_inputs(in_step, in_index, record=record))
1337
+ if (in_step, in_index) not in self.get_nodes():
1338
+ inputs.update(self.__base.get(in_step, in_index, "input"))
1339
+ else:
1340
+ inputs.update(self.get_node_inputs(in_step, in_index, record=record))
1288
1341
  else:
1289
1342
  inputs.add((in_step, in_index))
1290
1343
  return sorted(inputs)
@@ -74,8 +74,8 @@ class GenerateOpenRCXFlow(Flowgraph):
74
74
  @classmethod
75
75
  def make_docs(cls):
76
76
  from siliconcompiler.tools.builtin.nop import NOPTask
77
- return [GenerateOpenRCXFlow(NOPTask(), corners=3, serial_extraction=False),
78
- GenerateOpenRCXFlow(NOPTask(), corners=3, serial_extraction=True)]
77
+ return [cls(NOPTask(), corners=3, serial_extraction=False),
78
+ cls(NOPTask(), corners=3, serial_extraction=True)]
79
79
 
80
80
 
81
81
  ##################################################
@@ -0,0 +1,37 @@
1
+ from siliconcompiler import Flowgraph
2
+
3
+ from siliconcompiler.tools.builtin import importfiles
4
+ from siliconcompiler.tools.klayout import operations
5
+ from siliconcompiler.tools.klayout import screenshot
6
+ from siliconcompiler.tools.montage import tile
7
+
8
+
9
+ class HighResScreenshotFlow(Flowgraph):
10
+ '''A high resolution screenshot flow.
11
+
12
+ This flow is designed to generate a high resolution design image from a GDS
13
+ or OAS file by preparing the layout, taking tiled screenshots, and merging
14
+ them into a single image.
15
+ '''
16
+ def __init__(self, name: str = "screenshotflow", add_prepare: bool = True):
17
+ super().__init__(name)
18
+
19
+ self.node('import', importfiles.ImportFilesTask())
20
+ if add_prepare:
21
+ self.node('prepare', operations.OperationsTask())
22
+ self.node('screenshot', screenshot.ScreenshotTask())
23
+ self.node('merge', tile.TileTask())
24
+
25
+ if add_prepare:
26
+ self.edge('import', 'prepare')
27
+ self.edge('prepare', 'screenshot')
28
+ else:
29
+ self.edge('import', 'screenshot')
30
+ self.edge('screenshot', 'merge')
31
+
32
+ @classmethod
33
+ def make_docs(cls):
34
+ return [
35
+ cls(add_prepare=True),
36
+ cls(add_prepare=False)
37
+ ]
@@ -276,7 +276,8 @@ class StdCellLibrary(DependencySchema, ToolLibrarySchema):
276
276
  'filler',
277
277
  'tap',
278
278
  'endcap',
279
- 'antenna']:
279
+ 'antenna',
280
+ 'physicalonly']:
280
281
  schema.insert(
281
282
  'asic', 'cells', item,
282
283
  Parameter(
@@ -23,7 +23,7 @@ import uuid
23
23
 
24
24
  import os.path
25
25
 
26
- from typing import Optional, List, Dict, Type, Union, TYPE_CHECKING, ClassVar
26
+ from typing import Optional, List, Dict, Type, Union, TYPE_CHECKING, Final
27
27
 
28
28
  from fasteners import InterProcessLock
29
29
  from importlib.metadata import distributions, distribution
@@ -32,6 +32,7 @@ from urllib import parse as url_parse
32
32
 
33
33
  from siliconcompiler.utils import get_plugins, default_cache_dir
34
34
  from siliconcompiler.utils.paths import cwdirsafe
35
+ from siliconcompiler.utils.multiprocessing import MPManager
35
36
 
36
37
  if TYPE_CHECKING:
37
38
  from siliconcompiler.project import Project
@@ -54,12 +55,7 @@ class Resolver:
54
55
  source (str): The URI or path specifying the data source.
55
56
  reference (str): A version, commit hash, or tag for remote sources.
56
57
  """
57
- _RESOLVERS_LOCK: ClassVar[threading.Lock] = threading.Lock()
58
- _RESOLVERS: ClassVar[Dict[str, Type["Resolver"]]] = {}
59
- __STORAGE: str = "__Resolver_cache_id"
60
-
61
- __CACHE_LOCK: ClassVar[threading.Lock] = threading.Lock()
62
- __CACHE: ClassVar[Dict[str, Dict[str, str]]] = {}
58
+ __STORAGE: Final[str] = "__Resolver_cache_id"
63
59
 
64
60
  def __init__(self, name: str,
65
61
  root: Optional[Union["Project", "BaseSchema"]],
@@ -89,18 +85,19 @@ class Resolver:
89
85
  built-in resolvers (file, key, python) and any resolvers provided
90
86
  by external plugins.
91
87
  """
92
- with Resolver._RESOLVERS_LOCK:
93
- Resolver._RESOLVERS.clear()
88
+ settings = MPManager().get_transient_settings()
89
+ if settings.get_category("resolvers"):
90
+ # Already populated
91
+ return
94
92
 
95
- Resolver._RESOLVERS.update({
96
- "": FileResolver,
97
- "file": FileResolver,
98
- "key": KeyPathResolver,
99
- "python": PythonPathResolver
100
- })
93
+ settings.set("resolvers", "", FileResolver)
94
+ settings.set("resolvers", "file", FileResolver)
95
+ settings.set("resolvers", "key", KeyPathResolver)
96
+ settings.set("resolvers", "python", PythonPathResolver)
101
97
 
102
- for resolver in get_plugins("path_resolver"):
103
- Resolver._RESOLVERS.update(resolver())
98
+ for resolver in get_plugins("path_resolver"):
99
+ for scheme, res in resolver().items():
100
+ settings.set("resolvers", scheme, res)
104
101
 
105
102
  @staticmethod
106
103
  def find_resolver(source: str) -> Type["Resolver"]:
@@ -119,13 +116,13 @@ class Resolver:
119
116
  if os.path.isabs(source):
120
117
  return FileResolver
121
118
 
122
- if not Resolver._RESOLVERS:
123
- Resolver.populate_resolvers()
119
+ Resolver.populate_resolvers()
124
120
 
125
121
  url = url_parse.urlparse(source)
126
- with Resolver._RESOLVERS_LOCK:
127
- if url.scheme in Resolver._RESOLVERS:
128
- return Resolver._RESOLVERS[url.scheme]
122
+ settings = MPManager().get_transient_settings()
123
+ resolver = settings.get("resolvers", url.scheme, None)
124
+ if resolver:
125
+ return resolver
129
126
 
130
127
  raise ValueError(f"Source URI '{source}' is not supported")
131
128
 
@@ -231,15 +228,14 @@ class Resolver:
231
228
  if root is None:
232
229
  return None
233
230
 
234
- with Resolver.__CACHE_LOCK:
235
- root_id = Resolver.__get_root_id(root)
236
- if root_id not in Resolver.__CACHE:
237
- Resolver.__CACHE[root_id] = {}
231
+ cache_id = f"resolver-cache-{Resolver.__get_root_id(root)}"
238
232
 
239
- if name:
240
- return Resolver.__CACHE[root_id].get(name, None)
233
+ settings = MPManager().get_transient_settings()
241
234
 
242
- return Resolver.__CACHE[root_id].copy()
235
+ if name is not None:
236
+ return settings.get(cache_id, name, None)
237
+
238
+ return settings.get_category(cache_id)
243
239
 
244
240
  @staticmethod
245
241
  def set_cache(root: Optional[Union["Project", "BaseSchema"]],
@@ -256,11 +252,11 @@ class Resolver:
256
252
  if root is None:
257
253
  return
258
254
 
259
- with Resolver.__CACHE_LOCK:
260
- root_id = Resolver.__get_root_id(root)
261
- if root_id not in Resolver.__CACHE:
262
- Resolver.__CACHE[root_id] = {}
263
- Resolver.__CACHE[root_id][name] = str(path)
255
+ cache_id = f"resolver-cache-{Resolver.__get_root_id(root)}"
256
+
257
+ settings = MPManager().get_transient_settings()
258
+
259
+ settings.set(cache_id, name, str(path))
264
260
 
265
261
  @staticmethod
266
262
  def reset_cache(root: Optional[Union["Project", "BaseSchema"]]) -> None:
@@ -272,11 +268,11 @@ class Resolver:
272
268
  """
273
269
  if root is None:
274
270
  return
271
+ cache_id = f"resolver-cache-{Resolver.__get_root_id(root)}"
272
+
273
+ settings = MPManager().get_transient_settings()
275
274
 
276
- with Resolver.__CACHE_LOCK:
277
- root_id = Resolver.__get_root_id(root)
278
- if root_id in Resolver.__CACHE:
279
- del Resolver.__CACHE[root_id]
275
+ settings.delete(cache_id)
280
276
 
281
277
  def get_path(self) -> str:
282
278
  """
@@ -334,9 +330,6 @@ class RemoteResolver(Resolver):
334
330
  both thread-safe and process-safe locking to prevent race conditions when
335
331
  multiple SC instances try to download the same resource simultaneously.
336
332
  """
337
- _CACHE_LOCKS = {}
338
- _CACHE_LOCK = threading.Lock()
339
-
340
333
  def __init__(self, name: str,
341
334
  root: Optional[Union["Project", "BaseSchema"]],
342
335
  source: str,
@@ -429,10 +422,11 @@ class RemoteResolver(Resolver):
429
422
 
430
423
  def thread_lock(self) -> threading.Lock:
431
424
  """Gets a threading.Lock specific to this resolver instance."""
432
- with RemoteResolver._CACHE_LOCK:
433
- if self.name not in RemoteResolver._CACHE_LOCKS:
434
- RemoteResolver._CACHE_LOCKS[self.name] = threading.Lock()
435
- return RemoteResolver._CACHE_LOCKS[self.name]
425
+ settings = MPManager().get_transient_settings()
426
+ locks = settings.get_category("resolver-remote-cache-locks")
427
+ if self.name not in locks:
428
+ settings.set("resolver-remote-cache-locks", self.name, threading.Lock(), keep=True)
429
+ return settings.get("resolver-remote-cache-locks", self.name)
436
430
 
437
431
  @contextlib.contextmanager
438
432
  def __thread_lock(self):
@@ -205,7 +205,7 @@ class Project(PathSchemaBase, CommandLineSchema, BaseSchema):
205
205
  Returns:
206
206
  str: The name of the top-level design.
207
207
  """
208
- return self.get("option", "design")
208
+ return self.option.get_design()
209
209
 
210
210
  @property
211
211
  def design(self) -> Design:
@@ -373,6 +373,9 @@ class Project(PathSchemaBase, CommandLineSchema, BaseSchema):
373
373
  """
374
374
  if isinstance(obj, DependencySchema):
375
375
  for dep in obj.get_dep():
376
+ if isinstance(dep, (Design, LibrarySchema)):
377
+ if self._has_library(dep.name):
378
+ continue
376
379
  self.add_dep(dep)
377
380
 
378
381
  # Rebuild dependencies to ensure instances are correct