siliconcompiler 0.35.1__py3-none-any.whl → 0.35.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. siliconcompiler/_metadata.py +1 -1
  2. siliconcompiler/apps/sc_install.py +1 -1
  3. siliconcompiler/apps/sc_issue.py +8 -16
  4. siliconcompiler/apps/smake.py +106 -100
  5. siliconcompiler/checklist.py +349 -91
  6. siliconcompiler/design.py +8 -1
  7. siliconcompiler/flowgraph.py +419 -130
  8. siliconcompiler/flows/showflow.py +1 -2
  9. siliconcompiler/library.py +6 -5
  10. siliconcompiler/package/https.py +10 -5
  11. siliconcompiler/project.py +87 -37
  12. siliconcompiler/remote/client.py +17 -6
  13. siliconcompiler/scheduler/scheduler.py +284 -59
  14. siliconcompiler/scheduler/schedulernode.py +154 -102
  15. siliconcompiler/schema/__init__.py +3 -2
  16. siliconcompiler/schema/_metadata.py +1 -1
  17. siliconcompiler/schema/baseschema.py +210 -93
  18. siliconcompiler/schema/namedschema.py +21 -13
  19. siliconcompiler/schema/parameter.py +8 -1
  20. siliconcompiler/schema/safeschema.py +18 -7
  21. siliconcompiler/schema_support/dependencyschema.py +23 -3
  22. siliconcompiler/schema_support/filesetschema.py +10 -4
  23. siliconcompiler/schema_support/option.py +37 -34
  24. siliconcompiler/schema_support/pathschema.py +7 -2
  25. siliconcompiler/schema_support/record.py +5 -4
  26. siliconcompiler/targets/asap7_demo.py +4 -1
  27. siliconcompiler/tool.py +100 -8
  28. siliconcompiler/tools/__init__.py +10 -7
  29. siliconcompiler/tools/bambu/convert.py +19 -0
  30. siliconcompiler/tools/builtin/__init__.py +3 -2
  31. siliconcompiler/tools/builtin/filter.py +108 -0
  32. siliconcompiler/tools/builtin/importfiles.py +154 -0
  33. siliconcompiler/tools/execute/exec_input.py +4 -3
  34. siliconcompiler/tools/gtkwave/show.py +6 -2
  35. siliconcompiler/tools/icarus/compile.py +1 -0
  36. siliconcompiler/tools/klayout/scripts/klayout_show.py +1 -1
  37. siliconcompiler/tools/klayout/show.py +17 -5
  38. siliconcompiler/tools/openroad/screenshot.py +0 -1
  39. siliconcompiler/tools/openroad/scripts/common/screenshot.tcl +1 -1
  40. siliconcompiler/tools/openroad/scripts/common/write_images.tcl +2 -0
  41. siliconcompiler/tools/openroad/show.py +10 -0
  42. siliconcompiler/tools/surfer/show.py +7 -2
  43. siliconcompiler/tools/verilator/compile.py +2 -2
  44. siliconcompiler/tools/yosys/prepareLib.py +7 -2
  45. siliconcompiler/tools/yosys/syn_asic.py +20 -2
  46. siliconcompiler/toolscripts/_tools.json +5 -5
  47. siliconcompiler/toolscripts/rhel9/{install-yosys-wildebeest.sh → install-wildebeest.sh} +5 -5
  48. siliconcompiler/toolscripts/ubuntu22/{install-yosys-wildebeest.sh → install-wildebeest.sh} +5 -5
  49. siliconcompiler/toolscripts/ubuntu24/{install-yosys-wildebeest.sh → install-wildebeest.sh} +5 -5
  50. siliconcompiler/utils/__init__.py +1 -2
  51. siliconcompiler/utils/issue.py +38 -45
  52. {siliconcompiler-0.35.1.dist-info → siliconcompiler-0.35.3.dist-info}/METADATA +4 -4
  53. {siliconcompiler-0.35.1.dist-info → siliconcompiler-0.35.3.dist-info}/RECORD +57 -55
  54. {siliconcompiler-0.35.1.dist-info → siliconcompiler-0.35.3.dist-info}/WHEEL +0 -0
  55. {siliconcompiler-0.35.1.dist-info → siliconcompiler-0.35.3.dist-info}/entry_points.txt +0 -0
  56. {siliconcompiler-0.35.1.dist-info → siliconcompiler-0.35.3.dist-info}/licenses/LICENSE +0 -0
  57. {siliconcompiler-0.35.1.dist-info → siliconcompiler-0.35.3.dist-info}/top_level.txt +0 -0
siliconcompiler/tool.py CHANGED
@@ -32,10 +32,10 @@ import os.path
32
32
  from packaging.version import Version, InvalidVersion
33
33
  from packaging.specifiers import SpecifierSet, InvalidSpecifier
34
34
 
35
- from typing import List, Dict, Tuple, Union, Optional, Set, TextIO, Type, TYPE_CHECKING
35
+ from typing import List, Dict, Tuple, Union, Optional, Set, TextIO, Type, TypeVar, TYPE_CHECKING
36
36
  from pathlib import Path
37
37
 
38
- from siliconcompiler.schema import BaseSchema, NamedSchema, Journal, DocsSchema
38
+ from siliconcompiler.schema import BaseSchema, NamedSchema, Journal, DocsSchema, LazyLoad
39
39
  from siliconcompiler.schema import EditableSchema, Parameter, PerNode, Scope
40
40
  from siliconcompiler.schema.parametertype import NodeType
41
41
  from siliconcompiler.schema.utils import trim
@@ -53,6 +53,8 @@ if TYPE_CHECKING:
53
53
  from siliconcompiler.scheduler import SchedulerNode
54
54
  from siliconcompiler import Project
55
55
 
56
+ TTask = TypeVar('TTask')
57
+
56
58
 
57
59
  class TaskError(Exception):
58
60
  '''Error indicating that task execution cannot continue and should be terminated.'''
@@ -76,6 +78,15 @@ class TaskExecutableNotFound(TaskError):
76
78
  pass
77
79
 
78
80
 
81
+ class TaskExecutableNotReceived(TaskExecutableNotFound):
82
+ '''Error indicating that the tool executable was not received from a previous step.
83
+
84
+ This exception is raised specifically when a task expects to receive an executable
85
+ from an upstream task but no executable file was provided.
86
+ '''
87
+ pass
88
+
89
+
79
90
  class TaskSkip(TaskError):
80
91
  """
81
92
  Error raised to indicate that the current task should be skipped.
@@ -136,13 +147,14 @@ class Task(NamedSchema, PathSchema, DocsSchema):
136
147
 
137
148
  def _from_dict(self, manifest: Dict,
138
149
  keypath: Union[List[str], Tuple[str, ...]],
139
- version: Optional[Tuple[int, ...]] = None) \
150
+ version: Optional[Tuple[int, ...]] = None,
151
+ lazyload: LazyLoad = LazyLoad.ON) \
140
152
  -> Tuple[Set[Tuple[str, ...]], Set[Tuple[str, ...]]]:
141
153
  """
142
154
  Populates the schema from a dictionary, dynamically adding 'var'
143
155
  parameters found in the manifest that are not already defined.
144
156
  """
145
- if "var" in manifest:
157
+ if not lazyload.is_enforced and "var" in manifest:
146
158
  # Collect existing and manifest var keys
147
159
  var_keys = [k[0] for k in self.allkeys("var")]
148
160
  manifest_keys = set(manifest["var"].keys())
@@ -160,7 +172,7 @@ class Task(NamedSchema, PathSchema, DocsSchema):
160
172
  if not manifest["var"]:
161
173
  del manifest["var"]
162
174
 
163
- return super()._from_dict(manifest, keypath, version)
175
+ return super()._from_dict(manifest, keypath, version=version, lazyload=lazyload)
164
176
 
165
177
  @contextlib.contextmanager
166
178
  def runtime(self, node: "SchedulerNode",
@@ -802,6 +814,10 @@ class Task(NamedSchema, PathSchema, DocsSchema):
802
814
  schema = root.copy()
803
815
 
804
816
  for keypath in root.allkeys():
817
+ if keypath[0] == "history":
818
+ # Ignore history as this is not relevant to the task
819
+ continue
820
+
805
821
  paramtype: str = schema.get(*keypath, field='type')
806
822
  if 'file' not in paramtype and 'dir' not in paramtype:
807
823
  continue
@@ -1404,7 +1420,7 @@ class Task(NamedSchema, PathSchema, DocsSchema):
1404
1420
  return True
1405
1421
  return False
1406
1422
 
1407
- def set_refdir(self, dir: str, dataroot: Optional[str] = None,
1423
+ def set_refdir(self, dir: Union[Path, str], dataroot: Optional[str] = None,
1408
1424
  step: Optional[str] = None, index: Optional[Union[str, int]] = None,
1409
1425
  clobber: bool = False):
1410
1426
  '''Sets the reference directory for tool scripts and auxiliary files.
@@ -1428,7 +1444,7 @@ class Task(NamedSchema, PathSchema, DocsSchema):
1428
1444
  with self.active_dataroot(self._get_active_dataroot(dataroot)):
1429
1445
  return self.set("refdir", dir, step=step, index=index, clobber=clobber)
1430
1446
 
1431
- def set_script(self, script: str, dataroot: Optional[str] = ...,
1447
+ def set_script(self, script: Union[Path, str], dataroot: Optional[str] = ...,
1432
1448
  step: Optional[str] = None, index: Optional[Union[str, int]] = None,
1433
1449
  clobber: bool = False):
1434
1450
  '''Sets the main entry script for a script-based tool (e.g., a TCL script).
@@ -1691,7 +1707,8 @@ class Task(NamedSchema, PathSchema, DocsSchema):
1691
1707
  return self.add("sbom", version, sbom, step=step, index=index)
1692
1708
 
1693
1709
  def record_metric(self, metric: str, value: Union[int, float],
1694
- source_file: Optional[str] = None, source_unit: Optional[str] = None,
1710
+ source_file: Optional[Union[List[Union[Path, str]], Path, str]] = None,
1711
+ source_unit: Optional[str] = None,
1695
1712
  quiet: bool = False):
1696
1713
  '''
1697
1714
  Records a metric and associates the source file with it.
@@ -1783,6 +1800,81 @@ class Task(NamedSchema, PathSchema, DocsSchema):
1783
1800
  return super().find_files(*keypath, missing_ok=missing_ok,
1784
1801
  step=step, index=index)
1785
1802
 
1803
+ @classmethod
1804
+ def find_task(cls: Type[TTask], project: "Project") -> Union[Set[TTask], TTask]:
1805
+ """Finds registered task(s) in a project that match the calling class.
1806
+
1807
+ This method searches through all tasks configured in the provided `project`
1808
+ and returns those that meet specific criteria derived from the class on
1809
+ which this method is called. The filtering is based on three levels:
1810
+
1811
+ 1. **Class Type**: The primary filter ensures that any found task object
1812
+ is an instance of the calling class (`cls`).
1813
+ 2. **Tool Name**: If the calling class (`cls`) implements the `tool()`
1814
+ method, the search is narrowed to tasks with that specific tool name.
1815
+ 3. **Task Name**: If the calling class (`cls`) implements the `task()`
1816
+ method, the search is further narrowed to tasks with that name.
1817
+
1818
+ The method conveniently returns a single object if only one match is
1819
+ found, or a set of objects if multiple matches are found.
1820
+
1821
+ Args:
1822
+ project (Project): The project instance to search within.
1823
+
1824
+ Returns:
1825
+ Union[Task, Set[Task]]: A single Task instance if exactly one
1826
+ match is found, otherwise a set of matching Task instances.
1827
+
1828
+ Raises:
1829
+ TypeError: If the `project` argument is not a valid `Project` object.
1830
+ ValueError: If no tasks matching the specified criteria are found in
1831
+ the project.
1832
+ """
1833
+
1834
+ from siliconcompiler import Project
1835
+ if not isinstance(project, Project):
1836
+ raise TypeError("project must be a Project")
1837
+
1838
+ task_obj: "Task" = cls()
1839
+ tool, task = None, None
1840
+ try:
1841
+ tool = task_obj.tool()
1842
+ except NotImplementedError:
1843
+ pass
1844
+ try:
1845
+ task = task_obj.task()
1846
+ except NotImplementedError:
1847
+ pass
1848
+
1849
+ all_tasks: Set[Task] = set()
1850
+ for tool_name in project.getkeys("tool"):
1851
+ if tool and tool != tool_name:
1852
+ continue
1853
+ for task_name in project.getkeys("tool", tool_name, "task"):
1854
+ if task and task != task_name:
1855
+ continue
1856
+ all_tasks.add(project.get("tool", tool_name, "task", task_name, field="schema"))
1857
+
1858
+ tasks: Set[Task] = set()
1859
+ for task_obj in all_tasks:
1860
+ if not isinstance(task_obj, cls):
1861
+ continue
1862
+ tasks.add(task_obj)
1863
+
1864
+ if not tasks:
1865
+ parts = []
1866
+ if tool:
1867
+ parts.append(f"tool='{tool}'")
1868
+ if task:
1869
+ parts.append(f"task='{task}'")
1870
+ parts.append(f"class={cls.__name__}")
1871
+ criteria = ", ".join(parts) if parts else "any criteria"
1872
+ raise ValueError(f"No tasks found matching {criteria}")
1873
+
1874
+ if len(tasks) == 1:
1875
+ return next(iter(tasks))
1876
+ return tasks
1877
+
1786
1878
  def _find_files_search_paths(self, key: str,
1787
1879
  step: Optional[str],
1788
1880
  index: Optional[Union[int, str]]) -> List[str]:
@@ -36,6 +36,14 @@ def get_task(
36
36
  ValueError: If no tasks match the specified criteria.
37
37
  TypeError: If project is not a Project instance.
38
38
  """
39
+ import warnings
40
+ warnings.warn("This function is deprecated and will be removed in a future version, "
41
+ "use cls.find_task instead", DeprecationWarning, stacklevel=2)
42
+
43
+ if filter:
44
+ if inspect.isclass(filter):
45
+ return filter.find_task(project)
46
+
39
47
  from siliconcompiler import Project
40
48
  if not isinstance(project, Project):
41
49
  raise TypeError("project must be a Project")
@@ -52,10 +60,7 @@ def get_task(
52
60
  if task and task_obj.task() != task:
53
61
  continue
54
62
  if filter:
55
- if inspect.isclass(filter):
56
- if not isinstance(task_obj, filter):
57
- continue
58
- elif callable(filter):
63
+ if callable(filter):
59
64
  if not filter(task_obj):
60
65
  continue
61
66
  else:
@@ -69,9 +74,7 @@ def get_task(
69
74
  if task:
70
75
  parts.append(f"task='{task}'")
71
76
  if filter:
72
- if inspect.isclass(filter):
73
- parts.append(f"filter={filter.__name__}")
74
- elif callable(filter):
77
+ if callable(filter):
75
78
  filter_name = getattr(filter, '__name__', repr(filter))
76
79
  parts.append(f"filter={filter_name}")
77
80
  criteria = ", ".join(parts) if parts else "any criteria"
@@ -4,6 +4,8 @@ import shutil
4
4
 
5
5
  import os.path
6
6
 
7
+ from typing import Optional
8
+
7
9
  from siliconcompiler.utils import sc_open
8
10
 
9
11
  from siliconcompiler import Task
@@ -17,6 +19,23 @@ class ConvertTask(ASICTask, Task):
17
19
  self.add_parameter("memorychannels", "int", "Number of memory channels available",
18
20
  defvalue=1)
19
21
 
22
+ def set_bambu_memorychannels(self, channels: int,
23
+ step: Optional[str] = None, index: Optional[str] = None) -> None:
24
+ """Sets the number of memory channels for the Bambu synthesizer.
25
+
26
+ This method configures the 'memorychannels' variable within the Bambu
27
+ tool flow. It's used to specify the number of independent memory
28
+ channels the synthesized hardware should have.
29
+
30
+ Args:
31
+ channels: The number of memory channels to configure.
32
+ step: The specific synthesis step to which this setting applies.
33
+ If None, it applies globally. Defaults to None.
34
+ index: The index for the step, used if a step can have multiple
35
+ configurations. Defaults to None.
36
+ """
37
+ self.set("var", "memorychannels", channels, step=step, index=index)
38
+
20
39
  def tool(self):
21
40
  return "bambu"
22
41
 
@@ -7,7 +7,6 @@ from siliconcompiler import NodeStatus
7
7
 
8
8
  from siliconcompiler import Task
9
9
  from siliconcompiler import utils
10
- from siliconcompiler.tools import get_task
11
10
 
12
11
 
13
12
  class BuiltinTask(Task):
@@ -22,6 +21,8 @@ class BuiltinTask(Task):
22
21
 
23
22
  self._set_io_files()
24
23
 
24
+ self.set_threads(1)
25
+
25
26
  def _set_io_files(self):
26
27
  files = sorted(list(self.get_files_from_input_nodes().keys()))
27
28
  self.add_input_file(files)
@@ -58,7 +59,7 @@ class BuiltinTask(Task):
58
59
  flow.set("<step>", "<index>", "args", "errors==0")
59
60
  proj.set_flow(flow)
60
61
 
61
- get_task(proj, filter=NOPTask).add_output_file("<top>.v", step="<in>", index="0")
62
+ NOPTask.find_task(proj).add_output_file("<top>.v", step="<in>", index="0")
62
63
  node = SchedulerNode(proj, "<step>", "<index>")
63
64
  node.setup()
64
65
  return node.task
@@ -0,0 +1,108 @@
1
+ import fnmatch
2
+ import shutil
3
+
4
+ from typing import Optional, List, Union
5
+
6
+ from siliconcompiler import Task
7
+ from siliconcompiler import utils
8
+
9
+
10
+ class FilterTask(Task):
11
+ '''
12
+ A task for filtering files based on specified glob patterns.
13
+
14
+ This task determines which files to "keep" from a given set of inputs,
15
+ passing only those that match the criteria to the outputs.
16
+ '''
17
+ def __init__(self):
18
+ super().__init__()
19
+
20
+ self.add_parameter("keep", "[str]", "Glob of files to keep")
21
+
22
+ def add_filter_keep(self, keep: Union[List[str], str],
23
+ step: Optional[str] = None, index: Optional[str] = None,
24
+ clobber: bool = False) -> None:
25
+ '''
26
+ Adds one or more glob patterns for files to keep.
27
+
28
+ Args:
29
+ keep (Union[List[str], str]): A single glob pattern or a list of
30
+ glob patterns to identify which files should be kept.
31
+ step (Optional[str], optional): The specific workflow step to
32
+ apply this filter to. Defaults to None.
33
+ index (Optional[str], optional): The specific index within the step
34
+ to apply this filter to. Defaults to None.
35
+ clobber (bool, optional): If True, existing 'keep' patterns are
36
+ overwritten with the new value(s). If False, the new patterns
37
+ are appended to the existing list. Defaults to False.
38
+ '''
39
+ if clobber:
40
+ self.set("var", "keep", keep, step=step, index=index)
41
+ else:
42
+ self.add("var", "keep", keep, step=step, index=index)
43
+
44
+ def tool(self):
45
+ return "builtin"
46
+
47
+ def task(self):
48
+ return "filter"
49
+
50
+ def setup(self):
51
+ super().setup()
52
+
53
+ self.set_threads(1)
54
+
55
+ flow = self.project.get("flowgraph", self.project.option.get_flow(), field="schema")
56
+ graph_node = flow.get_graph_node(self.step, self.index)
57
+
58
+ if self.get("var", "keep"):
59
+ self.add_required_key("var", "keep")
60
+ elif graph_node.get_args():
61
+ self.add_required_key(graph_node, "args")
62
+
63
+ files = sorted(list(self.get_files_from_input_nodes().keys()))
64
+ if not files:
65
+ raise ValueError("task receives no files")
66
+
67
+ filters: List[str] = self.get("var", "keep") or graph_node.get_args()
68
+ if not filters:
69
+ filters = ["*"]
70
+
71
+ keep_files = []
72
+ for keep in filters:
73
+ keep_files.extend(fnmatch.filter(files, keep))
74
+
75
+ if not keep_files:
76
+ self.logger.warning(f"Filters ({', '.join(filters)}) removed all incoming files")
77
+ else:
78
+ self.add_input_file(keep_files)
79
+ self.add_output_file(keep_files)
80
+
81
+ def run(self):
82
+ self.logger.info(f"Running builtin task '{self.task()}'")
83
+
84
+ shutil.copytree('inputs', 'outputs', dirs_exist_ok=True,
85
+ copy_function=utils.link_symlink_copy)
86
+
87
+ return 0
88
+
89
+ @classmethod
90
+ def make_docs(cls):
91
+ from siliconcompiler import Flowgraph, Design, Project
92
+ from siliconcompiler.scheduler import SchedulerNode
93
+ from siliconcompiler.tools.builtin.nop import NOPTask
94
+ design = Design("<design>")
95
+ with design.active_fileset("docs"):
96
+ design.set_topmodule("top")
97
+ proj = Project(design)
98
+ proj.add_fileset("docs")
99
+ flow = Flowgraph("docsflow")
100
+ flow.node("<in>", NOPTask())
101
+ flow.node("<step>", cls(), index="<index>")
102
+ flow.edge("<in>", "<step>", head_index="<index>")
103
+ proj.set_flow(flow)
104
+
105
+ NOPTask.find_task(proj).add_output_file("<top>.v", step="<in>", index="0")
106
+ node = SchedulerNode(proj, "<step>", "<index>")
107
+ node.setup()
108
+ return node.task
@@ -0,0 +1,154 @@
1
+ import shutil
2
+
3
+ import os.path
4
+
5
+ from pathlib import Path
6
+
7
+ from typing import Union, List, Optional
8
+
9
+ from siliconcompiler import Task
10
+ from siliconcompiler.utils import link_copy
11
+
12
+
13
+ class ImportFilesTask(Task):
14
+ '''A built-in task to import (copy) files and directories.
15
+
16
+ This task provides a mechanism to copy specified files and directories
17
+ from their source locations into the current task's output directory
18
+ (``outputs/``), making them available for subsequent steps in the tool flow.
19
+ '''
20
+ def __init__(self):
21
+ super().__init__()
22
+
23
+ self.add_parameter("file", "[file]", "input files to import")
24
+ self.add_parameter("dir", "[dir]", "input directories to import")
25
+
26
+ def add_import_file(self, file: Union[List[Union[str, Path]], str, Path],
27
+ dataroot: Optional[str] = None,
28
+ step: Optional[str] = None, index: Optional[str] = None,
29
+ clobber: bool = False) -> None:
30
+ """Adds one or more files to the list of items to import.
31
+
32
+ Args:
33
+ file (Union[List[Union[str, Path]], str, Path]): The path(s) to the file(s)
34
+ to be imported.
35
+ dataroot (Optional[str]): The dataroot to use for resolving relative paths.
36
+ If None, the active dataroot is used. Defaults to None.
37
+ step (Optional[str]): The step to associate this file with.
38
+ Defaults to the current step.
39
+ index (Optional[str]): The index to associate this file with.
40
+ Defaults to the current index.
41
+ clobber (bool): If True, existing file entries for the specified
42
+ step/index will be overwritten. If False, the new file(s)
43
+ will be appended. Defaults to False.
44
+ """
45
+ with self.active_dataroot(self._get_active_dataroot(dataroot)):
46
+ if clobber:
47
+ self.set("var", "file", file, step=step, index=index)
48
+ else:
49
+ self.add("var", "file", file, step=step, index=index)
50
+
51
+ def add_import_dir(self, directory: Union[List[Union[str, Path]], str, Path],
52
+ dataroot: Optional[str] = None,
53
+ step: Optional[str] = None, index: Optional[str] = None,
54
+ clobber: bool = False) -> None:
55
+ """Adds one or more directories to the list of items to import.
56
+
57
+ Args:
58
+ directory (Union[List[Union[str, Path]], str, Path]): The path(s) to the
59
+ directory/directories to be imported.
60
+ dataroot (Optional[str]): The dataroot to use for resolving relative paths.
61
+ If None, the active dataroot is used. Defaults to None.
62
+ step (Optional[str]): The step to associate this directory with.
63
+ Defaults to the current step.
64
+ index (Optional[str]): The index to associate this directory with.
65
+ Defaults to the current index.
66
+ clobber (bool): If True, existing directory entries for the specified
67
+ step/index will be overwritten. If False, the new directory/directories
68
+ will be appended. Defaults to False.
69
+ """
70
+ with self.active_dataroot(self._get_active_dataroot(dataroot)):
71
+ if clobber:
72
+ self.set("var", "dir", directory, step=step, index=index)
73
+ else:
74
+ self.add("var", "dir", directory, step=step, index=index)
75
+
76
+ def tool(self) -> str:
77
+ return "builtin"
78
+
79
+ def task(self) -> str:
80
+ return "importfiles"
81
+
82
+ def setup(self) -> None:
83
+ """Prepares the task for execution by setting up dependencies and requirements.
84
+
85
+ This method validates that the task is configured correctly (i.e., has files
86
+ or directories to import and does not have formal input nodes) and registers
87
+ the items to be imported as task outputs.
88
+
89
+ Raises:
90
+ ValueError: If the task is configured with input nodes or if no
91
+ files or directories are specified for import.
92
+ """
93
+ super().setup()
94
+
95
+ self.set_threads(1)
96
+
97
+ if (self.step, self.index) not in self.schema_flow.get_entry_nodes():
98
+ raise ValueError("task must be an entry node")
99
+
100
+ if not self.get("var", "file") and not self.get("var", "dir"):
101
+ raise ValueError("task requires files or directories to import")
102
+
103
+ if self.get("var", "file"):
104
+ self.add_required_key("var", "file")
105
+
106
+ if self.get("var", "dir"):
107
+ self.add_required_key("var", "dir")
108
+
109
+ for file in self.get("var", "file") + self.get("var", "dir"):
110
+ self.add_output_file(os.path.basename(file))
111
+
112
+ def run(self) -> int:
113
+ """Executes the file and directory import process.
114
+
115
+ Copies all specified files and directories into the task's ``outputs/``
116
+ directory using a copy function that attempts to create hard links
117
+ as an optimization.
118
+
119
+ Returns:
120
+ int: A status code, where 0 indicates successful execution.
121
+ """
122
+ self.logger.info(f"Running builtin task '{self.task()}'")
123
+
124
+ for file in self.find_files("var", "file"):
125
+ self.logger.debug(f"Copying file {file} to outputs")
126
+ link_copy(file, "outputs/")
127
+
128
+ for directory in self.find_files("var", "dir"):
129
+ # For directories, copytree needs the destination to be the specific new directory name
130
+ dest_dir = os.path.join("outputs", os.path.basename(directory))
131
+ self.logger.debug(f"Copying directory {directory} to {dest_dir}")
132
+ shutil.copytree(directory, dest_dir, copy_function=link_copy)
133
+
134
+ return 0
135
+
136
+ @classmethod
137
+ def make_docs(cls):
138
+ from siliconcompiler import Flowgraph, Design, Project
139
+ from siliconcompiler.scheduler import SchedulerNode
140
+ design = Design("<design>")
141
+ with design.active_fileset("docs"):
142
+ design.set_topmodule("top")
143
+ proj = Project(design)
144
+ proj.add_fileset("docs")
145
+ flow = Flowgraph("docsflow")
146
+ flow.node("<step>", cls(), index="<index>")
147
+ proj.set_flow(flow)
148
+
149
+ cls.find_task(proj).add_import_file("import.txt")
150
+ cls.find_task(proj).add_import_dir("/directory")
151
+
152
+ node = SchedulerNode(proj, "<step>", "<index>")
153
+ node.setup()
154
+ return node.task
@@ -4,7 +4,7 @@ import stat
4
4
  import os.path
5
5
 
6
6
  from siliconcompiler import Task
7
- from siliconcompiler.tools import get_task
7
+ from siliconcompiler.tool import TaskExecutableNotReceived
8
8
 
9
9
 
10
10
  class ExecInputTask(Task):
@@ -41,7 +41,8 @@ class ExecInputTask(Task):
41
41
  break
42
42
 
43
43
  if not exec:
44
- raise FileNotFoundError(f'{self.step}/{self.index} did not receive an executable file')
44
+ raise TaskExecutableNotReceived(f'{self.step}/{self.index} did not receive an '
45
+ 'executable file')
45
46
 
46
47
  os.chmod(exec, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
47
48
 
@@ -64,7 +65,7 @@ class ExecInputTask(Task):
64
65
  flow.set("<step>", "<index>", "args", "errors==0")
65
66
  proj.set_flow(flow)
66
67
 
67
- get_task(proj, filter=NOPTask).add_output_file("<top>.exe", step="<in>", index="0")
68
+ NOPTask.find_task(proj).add_output_file("<top>.exe", step="<in>", index="0")
68
69
  node = SchedulerNode(proj, "<step>", "<index>")
69
70
  node.setup()
70
71
  return node.task
@@ -5,13 +5,13 @@ from siliconcompiler import ShowTask
5
5
 
6
6
  class ShowTask(ShowTask):
7
7
  '''
8
- Show a VCD file.
8
+ Show a VCD or FST file.
9
9
  '''
10
10
  def tool(self):
11
11
  return "gtkwave"
12
12
 
13
13
  def get_supported_show_extentions(self):
14
- return ["vcd"]
14
+ return ["vcd", "fst"]
15
15
 
16
16
  def parse_version(self, stdout):
17
17
  # First line: GTKWave Analyzer v3.3.116 (w)1999-2023 BSI
@@ -37,6 +37,8 @@ class ShowTask(ShowTask):
37
37
 
38
38
  if f"{self.design_topmodule}.vcd" in self.get_files_from_input_nodes():
39
39
  self.add_input_file(ext="vcd")
40
+ elif f"{self.design_topmodule}.fst" in self.get_files_from_input_nodes():
41
+ self.add_input_file(ext="fst")
40
42
  else:
41
43
  self.add_required_key("var", "showfilepath")
42
44
 
@@ -48,6 +50,8 @@ class ShowTask(ShowTask):
48
50
 
49
51
  if os.path.exists(f'inputs/{self.design_topmodule}.vcd'):
50
52
  dump = f'inputs/{self.design_topmodule}.vcd'
53
+ elif os.path.exists(f'inputs/{self.design_topmodule}.fst'):
54
+ dump = f'inputs/{self.design_topmodule}.fst'
51
55
  else:
52
56
  dump = self.find_files('var', 'showfilepath')
53
57
  options.append(f'--dump={dump}')
@@ -103,6 +103,7 @@ class CompileTask(Task):
103
103
  options.append('-D' + define)
104
104
 
105
105
  # add siliconcompiler specific defines
106
+ options.append("-DSILICONCOMPILER_TRACE_DIR=\"reports\"")
106
107
  options.append(f"-DSILICONCOMPILER_TRACE_FILE=\"reports/{self.design_topmodule}.vcd\"")
107
108
 
108
109
  #######################
@@ -250,7 +250,7 @@ def main():
250
250
  sc_filename = schema.get('tool', 'klayout', 'task', task, 'var', 'showfilepath',
251
251
  step=step, index=index)
252
252
  else:
253
- for ext in (f'{sc_fileext}.gz', sc_fileext):
253
+ for ext in (f'{sc_fileext}.gz', sc_fileext, 'gds.gz', "gds", 'oas.gz', 'oas'):
254
254
  sc_filename = f"inputs/{design}.{ext}"
255
255
  if os.path.exists(sc_filename):
256
256
  break
@@ -15,6 +15,17 @@ class ShowTask(ShowTask, KLayoutTask):
15
15
 
16
16
  self.set_script("klayout_show.py")
17
17
 
18
+ if f"{self.design_topmodule}.gds.gz" in self.get_files_from_input_nodes():
19
+ self.add_input_file(ext="gds.gz")
20
+ elif f"{self.design_topmodule}.gds" in self.get_files_from_input_nodes():
21
+ self.add_input_file(ext="gds")
22
+ elif f"{self.design_topmodule}.oas.gz" in self.get_files_from_input_nodes():
23
+ self.add_input_file(ext="oas.gz")
24
+ elif f"{self.design_topmodule}.oas" in self.get_files_from_input_nodes():
25
+ self.add_input_file(ext="oas")
26
+ else:
27
+ self.add_required_key("var", "showfilepath")
28
+
18
29
  self.add_commandline_option(["-nc", "-rm"], clobber=True)
19
30
 
20
31
  def get_supported_show_extentions(self):
@@ -23,8 +34,9 @@ class ShowTask(ShowTask, KLayoutTask):
23
34
  def pre_process(self):
24
35
  super().pre_process()
25
36
 
26
- rel_path = os.path.dirname(self.get("var", "showfilepath"))
27
- for ext in ('lyt', 'lyp'):
28
- ext_file = os.path.join(rel_path, f'{self.design_topmodule}.{ext}')
29
- if ext_file and os.path.exists(ext_file):
30
- shutil.copy2(ext_file, f"inputs/{self.design_topmodule}.{ext}")
37
+ if self.get("var", "showfilepath"):
38
+ rel_path = os.path.dirname(self.get("var", "showfilepath"))
39
+ for ext in ('lyt', 'lyp'):
40
+ ext_file = os.path.join(rel_path, f'{self.design_topmodule}.{ext}')
41
+ if ext_file and os.path.exists(ext_file):
42
+ shutil.copy2(ext_file, f"inputs/{self.design_topmodule}.{ext}")
@@ -16,7 +16,6 @@ class ScreenshotTask(ScreenshotTask, ShowTask):
16
16
  def setup(self):
17
17
  super().setup()
18
18
 
19
- self.unset("input")
20
19
  self.add_output_file(ext="png", clobber=True)
21
20
 
22
21
  self.set_script("sc_show.tcl")
@@ -11,5 +11,5 @@ sc_save_image \
11
11
  gui::restore_display_controls
12
12
 
13
13
  if { [sc_cfg_tool_task_get var include_report_images] } {
14
- source "${sc_refdir}/sc_write_images.tcl"
14
+ source "${sc_refdir}/common/write_images.tcl"
15
15
  }