siliconcompiler 0.35.3__py3-none-any.whl → 0.36.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (96) hide show
  1. siliconcompiler/_metadata.py +1 -1
  2. siliconcompiler/apps/sc_issue.py +18 -2
  3. siliconcompiler/checklist.py +2 -1
  4. siliconcompiler/constraints/__init__.py +4 -1
  5. siliconcompiler/constraints/asic_component.py +49 -11
  6. siliconcompiler/constraints/asic_floorplan.py +23 -21
  7. siliconcompiler/constraints/asic_pins.py +55 -17
  8. siliconcompiler/constraints/asic_timing.py +280 -57
  9. siliconcompiler/constraints/fpga_timing.py +212 -18
  10. siliconcompiler/constraints/timing_mode.py +82 -0
  11. siliconcompiler/data/templates/replay/replay.sh.j2 +27 -14
  12. siliconcompiler/data/templates/tcl/manifest.tcl.j2 +0 -6
  13. siliconcompiler/flowgraph.py +95 -42
  14. siliconcompiler/flows/generate_openroad_rcx.py +2 -2
  15. siliconcompiler/flows/highresscreenshotflow.py +37 -0
  16. siliconcompiler/library.py +2 -1
  17. siliconcompiler/package/__init__.py +56 -51
  18. siliconcompiler/project.py +13 -2
  19. siliconcompiler/scheduler/docker.py +24 -25
  20. siliconcompiler/scheduler/scheduler.py +143 -100
  21. siliconcompiler/scheduler/schedulernode.py +138 -22
  22. siliconcompiler/scheduler/slurm.py +120 -35
  23. siliconcompiler/scheduler/taskscheduler.py +19 -23
  24. siliconcompiler/schema/_metadata.py +1 -1
  25. siliconcompiler/schema/editableschema.py +29 -0
  26. siliconcompiler/schema/namedschema.py +2 -4
  27. siliconcompiler/schema/parametervalue.py +14 -2
  28. siliconcompiler/schema_support/cmdlineschema.py +0 -3
  29. siliconcompiler/schema_support/dependencyschema.py +0 -6
  30. siliconcompiler/schema_support/option.py +82 -1
  31. siliconcompiler/schema_support/pathschema.py +7 -13
  32. siliconcompiler/schema_support/record.py +4 -3
  33. siliconcompiler/tool.py +105 -52
  34. siliconcompiler/tools/_common/tcl/sc_schema_access.tcl +0 -6
  35. siliconcompiler/tools/keplerformal/__init__.py +7 -0
  36. siliconcompiler/tools/keplerformal/lec.py +112 -0
  37. siliconcompiler/tools/klayout/__init__.py +3 -0
  38. siliconcompiler/tools/klayout/screenshot.py +66 -1
  39. siliconcompiler/tools/klayout/scripts/klayout_convert_drc_db.py +1 -0
  40. siliconcompiler/tools/klayout/scripts/klayout_export.py +11 -40
  41. siliconcompiler/tools/klayout/scripts/klayout_operations.py +1 -0
  42. siliconcompiler/tools/klayout/scripts/klayout_show.py +5 -4
  43. siliconcompiler/tools/klayout/scripts/klayout_utils.py +16 -5
  44. siliconcompiler/tools/montage/tile.py +26 -12
  45. siliconcompiler/tools/openroad/__init__.py +27 -1
  46. siliconcompiler/tools/openroad/_apr.py +107 -14
  47. siliconcompiler/tools/openroad/clock_tree_synthesis.py +1 -0
  48. siliconcompiler/tools/openroad/global_placement.py +1 -0
  49. siliconcompiler/tools/openroad/init_floorplan.py +119 -7
  50. siliconcompiler/tools/openroad/power_grid_analysis.py +174 -0
  51. siliconcompiler/tools/openroad/repair_design.py +1 -0
  52. siliconcompiler/tools/openroad/repair_timing.py +1 -0
  53. siliconcompiler/tools/openroad/scripts/apr/preamble.tcl +1 -1
  54. siliconcompiler/tools/openroad/scripts/apr/sc_init_floorplan.tcl +91 -18
  55. siliconcompiler/tools/openroad/scripts/apr/sc_irdrop.tcl +148 -0
  56. siliconcompiler/tools/openroad/scripts/apr/sc_repair_design.tcl +1 -1
  57. siliconcompiler/tools/openroad/scripts/apr/sc_write_data.tcl +8 -10
  58. siliconcompiler/tools/openroad/scripts/common/procs.tcl +15 -6
  59. siliconcompiler/tools/openroad/scripts/common/read_liberty.tcl +2 -2
  60. siliconcompiler/tools/openroad/scripts/common/reports.tcl +7 -4
  61. siliconcompiler/tools/openroad/scripts/common/screenshot.tcl +1 -1
  62. siliconcompiler/tools/openroad/scripts/common/write_data_physical.tcl +8 -0
  63. siliconcompiler/tools/openroad/scripts/common/write_images.tcl +16 -12
  64. siliconcompiler/tools/openroad/scripts/rcx/sc_rcx_bench.tcl +2 -4
  65. siliconcompiler/tools/openroad/scripts/sc_rdlroute.tcl +3 -1
  66. siliconcompiler/tools/openroad/write_data.py +2 -2
  67. siliconcompiler/tools/opensta/__init__.py +1 -1
  68. siliconcompiler/tools/opensta/scripts/sc_check_library.tcl +2 -2
  69. siliconcompiler/tools/opensta/scripts/sc_report_libraries.tcl +2 -2
  70. siliconcompiler/tools/opensta/scripts/sc_timing.tcl +13 -10
  71. siliconcompiler/tools/opensta/timing.py +6 -2
  72. siliconcompiler/tools/vivado/scripts/sc_bitstream.tcl +11 -0
  73. siliconcompiler/tools/vivado/scripts/sc_place.tcl +11 -0
  74. siliconcompiler/tools/vivado/scripts/sc_route.tcl +11 -0
  75. siliconcompiler/tools/vivado/scripts/sc_syn_fpga.tcl +10 -0
  76. siliconcompiler/tools/vpr/__init__.py +28 -0
  77. siliconcompiler/tools/yosys/scripts/sc_screenshot.tcl +1 -1
  78. siliconcompiler/tools/yosys/scripts/sc_synth_asic.tcl +40 -4
  79. siliconcompiler/tools/yosys/scripts/sc_synth_fpga.tcl +15 -5
  80. siliconcompiler/tools/yosys/syn_asic.py +42 -0
  81. siliconcompiler/tools/yosys/syn_fpga.py +8 -0
  82. siliconcompiler/toolscripts/_tools.json +12 -7
  83. siliconcompiler/toolscripts/ubuntu22/install-keplerformal.sh +72 -0
  84. siliconcompiler/toolscripts/ubuntu24/install-keplerformal.sh +72 -0
  85. siliconcompiler/utils/__init__.py +243 -51
  86. siliconcompiler/utils/curation.py +89 -56
  87. siliconcompiler/utils/issue.py +6 -1
  88. siliconcompiler/utils/multiprocessing.py +46 -2
  89. siliconcompiler/utils/paths.py +21 -0
  90. siliconcompiler/utils/settings.py +162 -0
  91. {siliconcompiler-0.35.3.dist-info → siliconcompiler-0.36.0.dist-info}/METADATA +5 -4
  92. {siliconcompiler-0.35.3.dist-info → siliconcompiler-0.36.0.dist-info}/RECORD +96 -87
  93. {siliconcompiler-0.35.3.dist-info → siliconcompiler-0.36.0.dist-info}/WHEEL +0 -0
  94. {siliconcompiler-0.35.3.dist-info → siliconcompiler-0.36.0.dist-info}/entry_points.txt +0 -0
  95. {siliconcompiler-0.35.3.dist-info → siliconcompiler-0.36.0.dist-info}/licenses/LICENSE +0 -0
  96. {siliconcompiler-0.35.3.dist-info → siliconcompiler-0.36.0.dist-info}/top_level.txt +0 -0
@@ -734,7 +734,7 @@ class Flowgraph(NamedSchema, DocsSchema):
734
734
  """
735
735
  return Flowgraph.__name__
736
736
 
737
- def __get_graph_information(self):
737
+ def __get_graph_information(self, landscape):
738
738
  '''
739
739
  Internal helper to gather all node and edge info for graphviz.
740
740
 
@@ -744,6 +744,14 @@ class Flowgraph(NamedSchema, DocsSchema):
744
744
  - dict: Information about each node.
745
745
  - list: Information about each edge.
746
746
  '''
747
+ from siliconcompiler import Project
748
+ if landscape:
749
+ out_label_suffix = ':e'
750
+ in_label_suffix = ':w'
751
+ else:
752
+ out_label_suffix = ':s'
753
+ in_label_suffix = ':n'
754
+
747
755
  # Setup nodes
748
756
  node_exec_order = self.get_execution_order()
749
757
 
@@ -752,13 +760,9 @@ class Flowgraph(NamedSchema, DocsSchema):
752
760
  for step, index in rank_nodes:
753
761
  node_rank[f'{step}/{index}'] = rank
754
762
 
755
- # TODO: This appears to be unused, legacy from when files were nodes
756
- all_graph_inputs = set()
757
-
758
763
  exit_nodes = [f'{step}/{index}' for step, index in self.get_exit_nodes()]
759
764
 
760
765
  nodes = {}
761
- edges = []
762
766
 
763
767
  def clean_label(label):
764
768
  return label.replace("<", "").replace(">", "")
@@ -770,19 +774,34 @@ class Flowgraph(NamedSchema, DocsSchema):
770
774
 
771
775
  runtime_flow = RuntimeFlowgraph(self)
772
776
 
777
+ root = self._parent()._parent() # Brittle since this replies on location
778
+ if not isinstance(root, Project):
779
+ root = None
780
+ has_io = False
781
+ else:
782
+ has_io = True
783
+
773
784
  for step, index in all_nodes:
774
785
  graph_node = self.get_graph_node(step, index)
775
- tool = graph_node.get("tool")
776
- task = graph_node.get("task")
786
+ tool: str = graph_node.get("tool")
787
+ task: str = graph_node.get("task")
777
788
 
778
789
  inputs = []
779
790
  outputs = []
791
+ if has_io and root:
792
+ try:
793
+ inputs = root.get('tool', tool, 'task', task, 'input', step=step, index=index)
794
+ outputs = root.get('tool', tool, 'task', task, 'output', step=step, index=index)
795
+ except KeyError:
796
+ has_io = False
797
+ if not inputs and not outputs:
798
+ has_io = False
780
799
 
781
800
  node = f'{step}/{index}'
782
801
 
783
802
  nodes[node] = {
784
803
  "node": (step, index),
785
- "file_inputs": inputs,
804
+ "file_inputs": set(inputs),
786
805
  "inputs": {clean_text(f): f'input-{clean_label(f)}' for f in sorted(inputs)},
787
806
  "outputs": {clean_text(f): f'output-{clean_label(f)}' for f in sorted(outputs)},
788
807
  "task": f'{tool}/{task}' if tool != 'builtin' else task,
@@ -800,23 +819,41 @@ class Flowgraph(NamedSchema, DocsSchema):
800
819
  rank_diff[in_node_name] = node_rank[node] - node_rank[in_node_name]
801
820
  nodes[node]["rank_diff"] = rank_diff
802
821
 
822
+ if not has_io:
823
+ for info in nodes.values():
824
+ info["inputs"] = []
825
+ info["outputs"] = []
826
+
827
+ edges = []
828
+ edges_io = []
803
829
  for step, index in all_nodes:
804
830
  node = f'{step}/{index}'
805
831
  all_inputs = []
806
832
  for in_step, in_index in self.get_graph_node(step, index).get_input():
807
833
  all_inputs.append(f'{in_step}/{in_index}')
808
834
  for item in all_inputs:
809
- edges.append((item, node, 1 if node in exit_nodes else 2))
810
-
811
- return all_graph_inputs, nodes, edges
835
+ edges.append((f"{item}{out_label_suffix}",
836
+ f"{node}{in_label_suffix}",
837
+ 1 if node in exit_nodes else 2))
838
+ if has_io:
839
+ for infile in nodes[node]["inputs"]:
840
+ if infile in nodes[item]["outputs"]:
841
+ outlabel = f"{item}:output-{clean_label(infile)}"
842
+ inlabel = f"{node}:input-{clean_label(infile)}"
843
+ edges_io.append((f"{outlabel}{out_label_suffix}",
844
+ f"{inlabel}{in_label_suffix}",
845
+ 1 if node in exit_nodes else 2))
846
+
847
+ return nodes, edges, edges_io, has_io
812
848
 
813
849
  def write_flowgraph(self, filename: str,
814
- fillcolor: Optional[str] = '#ffffff',
815
- fontcolor: Optional[str] = '#000000',
816
- background: Optional[str] = 'transparent',
817
- fontsize: Optional[Union[int, str]] = 14,
818
- border: Optional[bool] = True,
819
- landscape: Optional[bool] = False) -> None:
850
+ fillcolor: str = '#ffffff',
851
+ fontcolor: str = '#000000',
852
+ background: str = 'transparent',
853
+ fontsize: Union[int, str] = 14,
854
+ border: bool = True,
855
+ landscape: bool = False,
856
+ show_io: Optional[bool] = None) -> None:
820
857
  r'''
821
858
  Renders and saves the compilation flowgraph to a file.
822
859
 
@@ -837,6 +874,7 @@ class Flowgraph(NamedSchema, DocsSchema):
837
874
  fontsize (str): Node text font size
838
875
  border (bool): Enables node border if True
839
876
  landscape (bool): Renders graph in landscape layout if True
877
+ show_io (bool): Add file input/outputs to graph
840
878
 
841
879
  Examples:
842
880
  >>> flow.write_flowgraph('mydump.png')
@@ -858,21 +896,22 @@ class Flowgraph(NamedSchema, DocsSchema):
858
896
  # controlling graph direction
859
897
  if landscape:
860
898
  rankdir = 'LR'
861
- out_label_suffix = ':e'
862
- in_label_suffix = ':w'
863
899
  else:
864
900
  rankdir = 'TB'
865
- out_label_suffix = ':s'
866
- in_label_suffix = ':n'
867
901
 
868
- all_graph_inputs, nodes, edges = self.__get_graph_information()
902
+ nodes, edges, edges_io, has_io = self.__get_graph_information(landscape)
869
903
 
870
- out_label_suffix = ''
871
- in_label_suffix = ''
904
+ if show_io is None:
905
+ show_io = has_io
906
+ elif not has_io:
907
+ show_io = False
872
908
 
873
909
  dot = graphviz.Digraph(format=fileformat)
874
910
  dot.graph_attr['rankdir'] = rankdir
875
911
  dot.attr(bgcolor=background)
912
+ if show_io:
913
+ dot.graph_attr['concentrate'] = 'true'
914
+ dot.graph_attr['ranksep'] = '0.75'
876
915
 
877
916
  subgraphs = {
878
917
  "graphs": {},
@@ -901,24 +940,25 @@ class Flowgraph(NamedSchema, DocsSchema):
901
940
 
902
941
  subgraph_temp["nodes"].append(node)
903
942
 
904
- with dot.subgraph(name='inputs') as input_graph:
905
- input_graph.graph_attr['cluster'] = 'true'
906
- input_graph.graph_attr['color'] = background
907
-
908
- # add inputs
909
- for graph_input in sorted(all_graph_inputs):
910
- input_graph.node(
911
- graph_input, label=graph_input, bordercolor=fontcolor, style='filled',
912
- fontcolor=fontcolor, fontsize=fontsize, ordering="in",
913
- penwidth=penwidth, fillcolor=fillcolor, shape="box")
914
-
915
943
  def make_node(graph, node, prefix):
916
944
  '''Helper function to create a node in the graphviz object.'''
917
945
  info = nodes[node]
918
946
 
919
- shape = "oval"
947
+ shape = "oval" if not show_io else "Mrecord"
920
948
  task_label = f"\\n ({info['task']})" if info['task'] is not None else ""
921
- labelname = f"{node.replace(prefix, '')}{task_label}"
949
+ if show_io:
950
+ input_labels = [f"<{ikey}> {ifile}" for ifile, ikey in info['inputs'].items()]
951
+ output_labels = [f"<{okey}> {ofile}" for ofile, okey in info['outputs'].items()]
952
+ center_text = f"\\n {node.replace(prefix, '')} {task_label} \\n\\n"
953
+ labelname = "{"
954
+ if input_labels:
955
+ labelname += f"{{ {' | '.join(input_labels)} }} |"
956
+ labelname += center_text
957
+ if output_labels:
958
+ labelname += f"| {{ {' | '.join(output_labels)} }}"
959
+ labelname += "}"
960
+ else:
961
+ labelname = f"{node.replace(prefix, '')}{task_label}"
922
962
 
923
963
  graph.node(node, label=labelname, bordercolor=fontcolor, style='filled',
924
964
  fontcolor=fontcolor, fontsize=fontsize, ordering="in",
@@ -975,8 +1015,12 @@ class Flowgraph(NamedSchema, DocsSchema):
975
1015
  build_graph(subgraphs, dot, "")
976
1016
 
977
1017
  # Add all the edges
978
- for edge0, edge1, weight in edges:
979
- dot.edge(f'{edge0}{out_label_suffix}', f'{edge1}{in_label_suffix}', weight=str(weight))
1018
+ if show_io:
1019
+ for edge0, edge1, weight in edges_io:
1020
+ dot.edge(edge0, edge1, weight=str(weight))
1021
+ else:
1022
+ for edge0, edge1, weight in edges:
1023
+ dot.edge(edge0, edge1, weight=str(weight))
980
1024
 
981
1025
  dot.render(filename=fileroot, cleanup=True)
982
1026
 
@@ -1270,21 +1314,30 @@ class RuntimeFlowgraph:
1270
1314
  if (step, index) not in self.get_nodes():
1271
1315
  raise ValueError(f"{step}/{index} is not a valid node")
1272
1316
 
1317
+ base_nodes = set(self.__base.get_nodes())
1318
+
1273
1319
  if record is None:
1274
1320
  inputs = set()
1275
1321
  for in_step, in_index in self.__base.get(step, index, "input"):
1276
- if (in_step, in_index) not in self.get_nodes():
1322
+ if (in_step, in_index) not in base_nodes:
1323
+ continue
1324
+ if (in_step, in_index) in self.__prune:
1277
1325
  continue
1278
1326
  inputs.add((in_step, in_index))
1279
1327
  return sorted(inputs)
1280
1328
 
1281
1329
  inputs = set()
1282
1330
  for in_step, in_index in self.__base.get(step, index, "input"):
1283
- if (in_step, in_index) not in self.get_nodes():
1331
+ if (in_step, in_index) not in base_nodes:
1332
+ continue
1333
+ if (in_step, in_index) in self.__prune:
1284
1334
  continue
1285
1335
 
1286
1336
  if record.get("status", step=in_step, index=in_index) == NodeStatus.SKIPPED:
1287
- inputs.update(self.get_node_inputs(in_step, in_index, record=record))
1337
+ if (in_step, in_index) not in self.get_nodes():
1338
+ inputs.update(self.__base.get(in_step, in_index, "input"))
1339
+ else:
1340
+ inputs.update(self.get_node_inputs(in_step, in_index, record=record))
1288
1341
  else:
1289
1342
  inputs.add((in_step, in_index))
1290
1343
  return sorted(inputs)
@@ -74,8 +74,8 @@ class GenerateOpenRCXFlow(Flowgraph):
74
74
  @classmethod
75
75
  def make_docs(cls):
76
76
  from siliconcompiler.tools.builtin.nop import NOPTask
77
- return [GenerateOpenRCXFlow(NOPTask(), corners=3, serial_extraction=False),
78
- GenerateOpenRCXFlow(NOPTask(), corners=3, serial_extraction=True)]
77
+ return [cls(NOPTask(), corners=3, serial_extraction=False),
78
+ cls(NOPTask(), corners=3, serial_extraction=True)]
79
79
 
80
80
 
81
81
  ##################################################
@@ -0,0 +1,37 @@
1
+ from siliconcompiler import Flowgraph
2
+
3
+ from siliconcompiler.tools.builtin import importfiles
4
+ from siliconcompiler.tools.klayout import operations
5
+ from siliconcompiler.tools.klayout import screenshot
6
+ from siliconcompiler.tools.montage import tile
7
+
8
+
9
+ class HighResScreenshotFlow(Flowgraph):
10
+ '''A high resolution screenshot flow.
11
+
12
+ This flow is designed to generate a high resolution design image from a GDS
13
+ or OAS file by preparing the layout, taking tiled screenshots, and merging
14
+ them into a single image.
15
+ '''
16
+ def __init__(self, name: str = "screenshotflow", add_prepare: bool = True):
17
+ super().__init__(name)
18
+
19
+ self.node('import', importfiles.ImportFilesTask())
20
+ if add_prepare:
21
+ self.node('prepare', operations.OperationsTask())
22
+ self.node('screenshot', screenshot.ScreenshotTask())
23
+ self.node('merge', tile.TileTask())
24
+
25
+ if add_prepare:
26
+ self.edge('import', 'prepare')
27
+ self.edge('prepare', 'screenshot')
28
+ else:
29
+ self.edge('import', 'screenshot')
30
+ self.edge('screenshot', 'merge')
31
+
32
+ @classmethod
33
+ def make_docs(cls):
34
+ return [
35
+ cls(add_prepare=True),
36
+ cls(add_prepare=False)
37
+ ]
@@ -276,7 +276,8 @@ class StdCellLibrary(DependencySchema, ToolLibrarySchema):
276
276
  'filler',
277
277
  'tap',
278
278
  'endcap',
279
- 'antenna']:
279
+ 'antenna',
280
+ 'physicalonly']:
280
281
  schema.insert(
281
282
  'asic', 'cells', item,
282
283
  Parameter(
@@ -16,20 +16,23 @@ import logging
16
16
  import os
17
17
  import random
18
18
  import re
19
+ import shutil
19
20
  import time
20
21
  import threading
21
22
  import uuid
22
23
 
23
24
  import os.path
24
25
 
25
- from typing import Optional, List, Dict, Type, Union, TYPE_CHECKING, ClassVar
26
+ from typing import Optional, List, Dict, Type, Union, TYPE_CHECKING, Final
26
27
 
27
28
  from fasteners import InterProcessLock
28
29
  from importlib.metadata import distributions, distribution
29
30
  from pathlib import Path
30
31
  from urllib import parse as url_parse
31
32
 
32
- from siliconcompiler.utils import get_plugins
33
+ from siliconcompiler.utils import get_plugins, default_cache_dir
34
+ from siliconcompiler.utils.paths import cwdirsafe
35
+ from siliconcompiler.utils.multiprocessing import MPManager
33
36
 
34
37
  if TYPE_CHECKING:
35
38
  from siliconcompiler.project import Project
@@ -52,12 +55,7 @@ class Resolver:
52
55
  source (str): The URI or path specifying the data source.
53
56
  reference (str): A version, commit hash, or tag for remote sources.
54
57
  """
55
- _RESOLVERS_LOCK: ClassVar[threading.Lock] = threading.Lock()
56
- _RESOLVERS: ClassVar[Dict[str, Type["Resolver"]]] = {}
57
- __STORAGE: str = "__Resolver_cache_id"
58
-
59
- __CACHE_LOCK: ClassVar[threading.Lock] = threading.Lock()
60
- __CACHE: ClassVar[Dict[str, Dict[str, str]]] = {}
58
+ __STORAGE: Final[str] = "__Resolver_cache_id"
61
59
 
62
60
  def __init__(self, name: str,
63
61
  root: Optional[Union["Project", "BaseSchema"]],
@@ -87,18 +85,19 @@ class Resolver:
87
85
  built-in resolvers (file, key, python) and any resolvers provided
88
86
  by external plugins.
89
87
  """
90
- with Resolver._RESOLVERS_LOCK:
91
- Resolver._RESOLVERS.clear()
88
+ settings = MPManager().get_transient_settings()
89
+ if settings.get_category("resolvers"):
90
+ # Already populated
91
+ return
92
92
 
93
- Resolver._RESOLVERS.update({
94
- "": FileResolver,
95
- "file": FileResolver,
96
- "key": KeyPathResolver,
97
- "python": PythonPathResolver
98
- })
93
+ settings.set("resolvers", "", FileResolver)
94
+ settings.set("resolvers", "file", FileResolver)
95
+ settings.set("resolvers", "key", KeyPathResolver)
96
+ settings.set("resolvers", "python", PythonPathResolver)
99
97
 
100
- for resolver in get_plugins("path_resolver"):
101
- Resolver._RESOLVERS.update(resolver())
98
+ for resolver in get_plugins("path_resolver"):
99
+ for scheme, res in resolver().items():
100
+ settings.set("resolvers", scheme, res)
102
101
 
103
102
  @staticmethod
104
103
  def find_resolver(source: str) -> Type["Resolver"]:
@@ -117,13 +116,13 @@ class Resolver:
117
116
  if os.path.isabs(source):
118
117
  return FileResolver
119
118
 
120
- if not Resolver._RESOLVERS:
121
- Resolver.populate_resolvers()
119
+ Resolver.populate_resolvers()
122
120
 
123
121
  url = url_parse.urlparse(source)
124
- with Resolver._RESOLVERS_LOCK:
125
- if url.scheme in Resolver._RESOLVERS:
126
- return Resolver._RESOLVERS[url.scheme]
122
+ settings = MPManager().get_transient_settings()
123
+ resolver = settings.get("resolvers", url.scheme, None)
124
+ if resolver:
125
+ return resolver
127
126
 
128
127
  raise ValueError(f"Source URI '{source}' is not supported")
129
128
 
@@ -229,15 +228,14 @@ class Resolver:
229
228
  if root is None:
230
229
  return None
231
230
 
232
- with Resolver.__CACHE_LOCK:
233
- root_id = Resolver.__get_root_id(root)
234
- if root_id not in Resolver.__CACHE:
235
- Resolver.__CACHE[root_id] = {}
231
+ cache_id = f"resolver-cache-{Resolver.__get_root_id(root)}"
232
+
233
+ settings = MPManager().get_transient_settings()
236
234
 
237
- if name:
238
- return Resolver.__CACHE[root_id].get(name, None)
235
+ if name is not None:
236
+ return settings.get(cache_id, name, None)
239
237
 
240
- return Resolver.__CACHE[root_id].copy()
238
+ return settings.get_category(cache_id)
241
239
 
242
240
  @staticmethod
243
241
  def set_cache(root: Optional[Union["Project", "BaseSchema"]],
@@ -254,11 +252,11 @@ class Resolver:
254
252
  if root is None:
255
253
  return
256
254
 
257
- with Resolver.__CACHE_LOCK:
258
- root_id = Resolver.__get_root_id(root)
259
- if root_id not in Resolver.__CACHE:
260
- Resolver.__CACHE[root_id] = {}
261
- Resolver.__CACHE[root_id][name] = str(path)
255
+ cache_id = f"resolver-cache-{Resolver.__get_root_id(root)}"
256
+
257
+ settings = MPManager().get_transient_settings()
258
+
259
+ settings.set(cache_id, name, str(path))
262
260
 
263
261
  @staticmethod
264
262
  def reset_cache(root: Optional[Union["Project", "BaseSchema"]]) -> None:
@@ -270,11 +268,11 @@ class Resolver:
270
268
  """
271
269
  if root is None:
272
270
  return
271
+ cache_id = f"resolver-cache-{Resolver.__get_root_id(root)}"
273
272
 
274
- with Resolver.__CACHE_LOCK:
275
- root_id = Resolver.__get_root_id(root)
276
- if root_id in Resolver.__CACHE:
277
- del Resolver.__CACHE[root_id]
273
+ settings = MPManager().get_transient_settings()
274
+
275
+ settings.delete(cache_id)
278
276
 
279
277
  def get_path(self) -> str:
280
278
  """
@@ -332,9 +330,6 @@ class RemoteResolver(Resolver):
332
330
  both thread-safe and process-safe locking to prevent race conditions when
333
331
  multiple SC instances try to download the same resource simultaneously.
334
332
  """
335
- _CACHE_LOCKS = {}
336
- _CACHE_LOCK = threading.Lock()
337
-
338
333
  def __init__(self, name: str,
339
334
  root: Optional[Union["Project", "BaseSchema"]],
340
335
  source: str,
@@ -370,7 +365,7 @@ class RemoteResolver(Resolver):
370
365
  Returns:
371
366
  Path: The path to the cache directory.
372
367
  """
373
- default_path = os.path.join(Path.home(), '.sc', 'cache')
368
+ default_path = default_cache_dir()
374
369
  if not root:
375
370
  return Path(default_path)
376
371
 
@@ -380,8 +375,7 @@ class RemoteResolver(Resolver):
380
375
  if path:
381
376
  path = root.find_files('option', 'cachedir', missing_ok=True)
382
377
  if not path:
383
- path = os.path.join(getattr(root, "_Project__cwd", os.getcwd()),
384
- root.get('option', 'cachedir'))
378
+ path = os.path.join(cwdirsafe(root), root.get('option', 'cachedir'))
385
379
  if not path:
386
380
  path = default_path
387
381
 
@@ -428,10 +422,11 @@ class RemoteResolver(Resolver):
428
422
 
429
423
  def thread_lock(self) -> threading.Lock:
430
424
  """Gets a threading.Lock specific to this resolver instance."""
431
- with RemoteResolver._CACHE_LOCK:
432
- if self.name not in RemoteResolver._CACHE_LOCKS:
433
- RemoteResolver._CACHE_LOCKS[self.name] = threading.Lock()
434
- return RemoteResolver._CACHE_LOCKS[self.name]
425
+ settings = MPManager().get_transient_settings()
426
+ locks = settings.get_category("resolver-remote-cache-locks")
427
+ if self.name not in locks:
428
+ settings.set("resolver-remote-cache-locks", self.name, threading.Lock(), keep=True)
429
+ return settings.get("resolver-remote-cache-locks", self.name)
435
430
 
436
431
  @contextlib.contextmanager
437
432
  def __thread_lock(self):
@@ -543,7 +538,17 @@ class RemoteResolver(Resolver):
543
538
  if self.check_cache():
544
539
  return self.cache_path
545
540
 
546
- self.resolve_remote()
541
+ try:
542
+ self.resolve_remote()
543
+ except BaseException as e:
544
+ # Exception occurred, so need to cleanup
545
+ try:
546
+ shutil.rmtree(self.cache_path)
547
+ except BaseException as cleane:
548
+ self.logger.error(f"Exception occurred during cleanup: {cleane} "
549
+ f"({cleane.__class__.__name__})")
550
+ raise e from None
551
+
547
552
  self.set_changed()
548
553
  return self.cache_path
549
554
 
@@ -560,7 +565,7 @@ class FileResolver(Resolver):
560
565
  if source.startswith("file://"):
561
566
  source = source[7:]
562
567
  if source[0] != "$" and not os.path.isabs(source):
563
- source = os.path.join(getattr(root, "_Project__cwd", os.getcwd()), source)
568
+ source = os.path.join(cwdirsafe(root), source)
564
569
 
565
570
  super().__init__(name, root, f"file://{source}", None)
566
571
 
@@ -205,7 +205,7 @@ class Project(PathSchemaBase, CommandLineSchema, BaseSchema):
205
205
  Returns:
206
206
  str: The name of the top-level design.
207
207
  """
208
- return self.get("option", "design")
208
+ return self.option.get_design()
209
209
 
210
210
  @property
211
211
  def design(self) -> Design:
@@ -373,6 +373,9 @@ class Project(PathSchemaBase, CommandLineSchema, BaseSchema):
373
373
  """
374
374
  if isinstance(obj, DependencySchema):
375
375
  for dep in obj.get_dep():
376
+ if isinstance(dep, (Design, LibrarySchema)):
377
+ if self._has_library(dep.name):
378
+ continue
376
379
  self.add_dep(dep)
377
380
 
378
381
  # Rebuild dependencies to ensure instances are correct
@@ -394,13 +397,21 @@ class Project(PathSchemaBase, CommandLineSchema, BaseSchema):
394
397
  return
395
398
 
396
399
  edit_schema = EditableSchema(self)
397
- edit_schema.insert("flowgraph", flow.name, flow)
398
400
 
399
401
  # Instantiate tasks
400
402
  for task_cls in flow.get_all_tasks():
401
403
  task = task_cls()
402
404
  if not self.valid("tool", task.tool(), "task", task.task()):
403
405
  edit_schema.insert("tool", task.tool(), "task", task.task(), task)
406
+ else:
407
+ existing_task: Task = self.get("tool", task.tool(), "task", task.task(),
408
+ field="schema")
409
+ if type(existing_task) is not type(task):
410
+ raise TypeError(f"Task {task.tool()}/{task.task()} already exists with "
411
+ f"different type {type(existing_task).__name__}, "
412
+ f"imported type is {type(task).__name__}")
413
+
414
+ edit_schema.insert("flowgraph", flow.name, flow)
404
415
 
405
416
  def check_manifest(self) -> bool:
406
417
  """
@@ -3,6 +3,8 @@ import os
3
3
  import shlex
4
4
  import sys
5
5
 
6
+ import docker.errors
7
+
6
8
  from pathlib import Path
7
9
 
8
10
  import siliconcompiler
@@ -11,10 +13,9 @@ from siliconcompiler.package import RemoteResolver
11
13
  from siliconcompiler.utils import default_email_credentials_file
12
14
  from siliconcompiler.scheduler import SchedulerNode
13
15
  from siliconcompiler.utils.logging import SCBlankLoggerFormatter
14
- from siliconcompiler.utils.curation import collect
15
16
 
16
17
 
17
- def get_image(project, step, index):
18
+ def get_image(project, step, index) -> str:
18
19
  """Determines the Docker image to use for a given node.
19
20
 
20
21
  The image is selected based on the following priority:
@@ -32,7 +33,7 @@ def get_image(project, step, index):
32
33
  """
33
34
  from siliconcompiler import __version__
34
35
 
35
- queue = project.get('option', 'scheduler', 'queue', step=step, index=index)
36
+ queue = project.option.scheduler.get_queue(step=step, index=index)
36
37
  if queue:
37
38
  return queue
38
39
 
@@ -161,24 +162,24 @@ class DockerSchedulerNode(SchedulerNode):
161
162
  """
162
163
  A static pre-processing hook for the Docker scheduler.
163
164
 
164
- On Windows, this method forces all file/directory parameters to be
165
- copied rather than linked, which avoids issues with differing
166
- filesystem types between the host and the Linux-based container.
167
- It then triggers :meth:`.collect()` to ensure all files are staged.
168
-
169
165
  Args:
170
166
  project (Project): The project object to perform pre-processing on.
171
167
  """
172
- if sys.platform == 'win32':
173
- # this avoids the issue of different file system types
174
- project.logger.error('Setting copy field to true for docker run on Windows')
175
- for key in project.allkeys():
176
- if key[0] == 'history':
177
- continue
178
- sc_type = project.get(*key, field='type')
179
- if 'dir' in sc_type or 'file' in sc_type:
180
- project.set(*key, True, field='copy')
181
- collect(project)
168
+ try:
169
+ client = docker.from_env()
170
+ client.version()
171
+ except (docker.errors.DockerException, docker.errors.APIError):
172
+ raise RuntimeError('docker is not available or installed on this machine')
173
+
174
+ def mark_copy(self) -> bool:
175
+ if sys.platform != 'win32':
176
+ return False
177
+
178
+ do_collect = False
179
+ for key in self.get_required_path_keys():
180
+ self.project.set(*key, True, field='copy')
181
+ do_collect = True
182
+ return do_collect
182
183
 
183
184
  def run(self):
184
185
  """
@@ -196,12 +197,7 @@ class DockerSchedulerNode(SchedulerNode):
196
197
  """
197
198
  self._init_run_logger()
198
199
 
199
- try:
200
- client = docker.from_env()
201
- client.version()
202
- except (docker.errors.DockerException, docker.errors.APIError) as e:
203
- self.logger.error(f'Unable to connect to docker: {e}')
204
- self.halt()
200
+ client = docker.from_env()
205
201
 
206
202
  is_windows = sys.platform == 'win32'
207
203
 
@@ -233,7 +229,7 @@ class DockerSchedulerNode(SchedulerNode):
233
229
  email_file = default_email_credentials_file()
234
230
  if is_windows:
235
231
  # Hack to get around manifest merging
236
- self.project.set('option', 'cachedir', None)
232
+ self.project.option.set_cachedir(None)
237
233
  cache_dir = '/sc_cache'
238
234
  cwd = '/sc_docker'
239
235
  builddir = f'{cwd}/build'
@@ -347,3 +343,6 @@ class DockerSchedulerNode(SchedulerNode):
347
343
 
348
344
  # Restore working directory
349
345
  os.chdir(start_cwd)
346
+
347
+ def check_required_paths(self) -> bool:
348
+ return True