siliconcompiler 0.34.1__py3-none-any.whl → 0.34.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- siliconcompiler/__init__.py +23 -4
- siliconcompiler/__main__.py +1 -7
- siliconcompiler/_metadata.py +1 -1
- siliconcompiler/apps/_common.py +104 -23
- siliconcompiler/apps/sc.py +4 -8
- siliconcompiler/apps/sc_dashboard.py +6 -4
- siliconcompiler/apps/sc_install.py +10 -6
- siliconcompiler/apps/sc_issue.py +7 -5
- siliconcompiler/apps/sc_remote.py +1 -1
- siliconcompiler/apps/sc_server.py +9 -14
- siliconcompiler/apps/sc_show.py +7 -6
- siliconcompiler/apps/smake.py +130 -94
- siliconcompiler/apps/utils/replay.py +4 -7
- siliconcompiler/apps/utils/summarize.py +3 -5
- siliconcompiler/asic.py +420 -0
- siliconcompiler/checklist.py +25 -2
- siliconcompiler/cmdlineschema.py +534 -0
- siliconcompiler/constraints/__init__.py +17 -0
- siliconcompiler/constraints/asic_component.py +378 -0
- siliconcompiler/constraints/asic_floorplan.py +449 -0
- siliconcompiler/constraints/asic_pins.py +489 -0
- siliconcompiler/constraints/asic_timing.py +517 -0
- siliconcompiler/core.py +10 -35
- siliconcompiler/data/templates/tcl/manifest.tcl.j2 +8 -0
- siliconcompiler/dependencyschema.py +96 -202
- siliconcompiler/design.py +327 -241
- siliconcompiler/filesetschema.py +250 -0
- siliconcompiler/flowgraph.py +298 -106
- siliconcompiler/fpga.py +124 -1
- siliconcompiler/library.py +331 -0
- siliconcompiler/metric.py +327 -92
- siliconcompiler/metrics/__init__.py +7 -0
- siliconcompiler/metrics/asic.py +245 -0
- siliconcompiler/metrics/fpga.py +220 -0
- siliconcompiler/package/__init__.py +391 -67
- siliconcompiler/package/git.py +92 -16
- siliconcompiler/package/github.py +114 -22
- siliconcompiler/package/https.py +79 -16
- siliconcompiler/packageschema.py +341 -16
- siliconcompiler/pathschema.py +255 -0
- siliconcompiler/pdk.py +566 -1
- siliconcompiler/project.py +1460 -0
- siliconcompiler/record.py +38 -1
- siliconcompiler/remote/__init__.py +5 -2
- siliconcompiler/remote/client.py +11 -6
- siliconcompiler/remote/schema.py +5 -23
- siliconcompiler/remote/server.py +41 -54
- siliconcompiler/report/__init__.py +3 -3
- siliconcompiler/report/dashboard/__init__.py +48 -14
- siliconcompiler/report/dashboard/cli/__init__.py +99 -21
- siliconcompiler/report/dashboard/cli/board.py +364 -179
- siliconcompiler/report/dashboard/web/__init__.py +90 -12
- siliconcompiler/report/dashboard/web/components/__init__.py +219 -240
- siliconcompiler/report/dashboard/web/components/flowgraph.py +49 -26
- siliconcompiler/report/dashboard/web/components/graph.py +139 -100
- siliconcompiler/report/dashboard/web/layouts/__init__.py +29 -1
- siliconcompiler/report/dashboard/web/layouts/_common.py +38 -2
- siliconcompiler/report/dashboard/web/layouts/vertical_flowgraph.py +39 -26
- siliconcompiler/report/dashboard/web/layouts/vertical_flowgraph_node_tab.py +50 -50
- siliconcompiler/report/dashboard/web/layouts/vertical_flowgraph_sac_tabs.py +49 -46
- siliconcompiler/report/dashboard/web/state.py +141 -14
- siliconcompiler/report/dashboard/web/utils/__init__.py +79 -16
- siliconcompiler/report/dashboard/web/utils/file_utils.py +74 -11
- siliconcompiler/report/dashboard/web/viewer.py +25 -1
- siliconcompiler/report/report.py +5 -2
- siliconcompiler/report/summary_image.py +29 -11
- siliconcompiler/scheduler/__init__.py +9 -1
- siliconcompiler/scheduler/docker.py +81 -4
- siliconcompiler/scheduler/run_node.py +37 -20
- siliconcompiler/scheduler/scheduler.py +211 -36
- siliconcompiler/scheduler/schedulernode.py +394 -60
- siliconcompiler/scheduler/send_messages.py +77 -29
- siliconcompiler/scheduler/slurm.py +76 -12
- siliconcompiler/scheduler/taskscheduler.py +142 -21
- siliconcompiler/schema/__init__.py +0 -4
- siliconcompiler/schema/baseschema.py +338 -59
- siliconcompiler/schema/editableschema.py +14 -6
- siliconcompiler/schema/journal.py +28 -17
- siliconcompiler/schema/namedschema.py +22 -14
- siliconcompiler/schema/parameter.py +89 -28
- siliconcompiler/schema/parametertype.py +2 -0
- siliconcompiler/schema/parametervalue.py +258 -15
- siliconcompiler/schema/safeschema.py +25 -2
- siliconcompiler/schema/schema_cfg.py +23 -19
- siliconcompiler/schema/utils.py +2 -2
- siliconcompiler/schema_obj.py +24 -5
- siliconcompiler/tool.py +1131 -265
- siliconcompiler/tools/bambu/__init__.py +41 -0
- siliconcompiler/tools/builtin/concatenate.py +2 -2
- siliconcompiler/tools/builtin/minimum.py +2 -1
- siliconcompiler/tools/builtin/mux.py +2 -1
- siliconcompiler/tools/builtin/nop.py +2 -1
- siliconcompiler/tools/builtin/verify.py +2 -1
- siliconcompiler/tools/klayout/__init__.py +95 -0
- siliconcompiler/tools/openroad/__init__.py +289 -0
- siliconcompiler/tools/openroad/scripts/apr/preamble.tcl +3 -0
- siliconcompiler/tools/openroad/scripts/apr/sc_detailed_route.tcl +7 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_global_route.tcl +8 -4
- siliconcompiler/tools/openroad/scripts/apr/sc_init_floorplan.tcl +9 -5
- siliconcompiler/tools/openroad/scripts/common/write_images.tcl +5 -1
- siliconcompiler/tools/slang/__init__.py +1 -1
- siliconcompiler/tools/slang/elaborate.py +2 -1
- siliconcompiler/tools/vivado/scripts/sc_run.tcl +1 -1
- siliconcompiler/tools/vivado/scripts/sc_syn_fpga.tcl +8 -1
- siliconcompiler/tools/vivado/syn_fpga.py +6 -0
- siliconcompiler/tools/vivado/vivado.py +35 -2
- siliconcompiler/tools/vpr/__init__.py +150 -0
- siliconcompiler/tools/yosys/__init__.py +369 -1
- siliconcompiler/tools/yosys/scripts/procs.tcl +0 -1
- siliconcompiler/toolscripts/_tools.json +5 -10
- siliconcompiler/utils/__init__.py +66 -0
- siliconcompiler/utils/flowgraph.py +2 -2
- siliconcompiler/utils/issue.py +2 -1
- siliconcompiler/utils/logging.py +14 -0
- siliconcompiler/utils/multiprocessing.py +256 -0
- siliconcompiler/utils/showtools.py +10 -0
- {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.3.dist-info}/METADATA +6 -6
- {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.3.dist-info}/RECORD +122 -115
- {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.3.dist-info}/entry_points.txt +3 -0
- siliconcompiler/schema/cmdlineschema.py +0 -250
- siliconcompiler/schema/packageschema.py +0 -101
- siliconcompiler/toolscripts/rhel8/install-slang.sh +0 -40
- siliconcompiler/toolscripts/rhel9/install-slang.sh +0 -40
- siliconcompiler/toolscripts/ubuntu20/install-slang.sh +0 -47
- siliconcompiler/toolscripts/ubuntu22/install-slang.sh +0 -37
- siliconcompiler/toolscripts/ubuntu24/install-slang.sh +0 -37
- {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.3.dist-info}/WHEEL +0 -0
- {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.3.dist-info}/licenses/LICENSE +0 -0
- {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.3.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,1460 @@
|
|
|
1
|
+
import importlib
|
|
2
|
+
import inspect
|
|
3
|
+
import logging
|
|
4
|
+
import os
|
|
5
|
+
import shutil
|
|
6
|
+
import sys
|
|
7
|
+
import uuid
|
|
8
|
+
|
|
9
|
+
import os.path
|
|
10
|
+
|
|
11
|
+
from inspect import getfullargspec
|
|
12
|
+
from typing import Set, Union, List, Tuple, Type, Callable, TextIO
|
|
13
|
+
|
|
14
|
+
from siliconcompiler.schema import BaseSchema, NamedSchema, EditableSchema, Parameter, Scope
|
|
15
|
+
from siliconcompiler.schema.parametervalue import NodeListValue, NodeSetValue
|
|
16
|
+
from siliconcompiler.schema.utils import trim
|
|
17
|
+
|
|
18
|
+
from siliconcompiler import DesignSchema, LibrarySchema
|
|
19
|
+
from siliconcompiler import FlowgraphSchema
|
|
20
|
+
from siliconcompiler import RecordSchema
|
|
21
|
+
from siliconcompiler import MetricSchema
|
|
22
|
+
from siliconcompiler import ChecklistSchema
|
|
23
|
+
from siliconcompiler import ToolSchema, TaskSchema
|
|
24
|
+
from siliconcompiler import ShowTaskSchema, ScreenshotTaskSchema
|
|
25
|
+
|
|
26
|
+
from siliconcompiler.cmdlineschema import CommandLineSchema
|
|
27
|
+
from siliconcompiler.dependencyschema import DependencySchema
|
|
28
|
+
from siliconcompiler.pathschema import PathSchemaBase
|
|
29
|
+
|
|
30
|
+
from siliconcompiler.schema.schema_cfg import schema_option_runtime, schema_arg, schema_version
|
|
31
|
+
|
|
32
|
+
from siliconcompiler.report.dashboard.cli import CliDashboard
|
|
33
|
+
from siliconcompiler.scheduler import Scheduler
|
|
34
|
+
from siliconcompiler.utils.logging import SCColorLoggerFormatter, SCLoggerFormatter
|
|
35
|
+
from siliconcompiler.utils import FilterDirectories, get_file_ext
|
|
36
|
+
from siliconcompiler.utils.multiprocessing import MPManager
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class Project(PathSchemaBase, CommandLineSchema, BaseSchema):
|
|
40
|
+
"""
|
|
41
|
+
The Project class is the core object in SiliconCompiler, representing a
|
|
42
|
+
complete hardware design project. It manages design parameters, libraries,
|
|
43
|
+
flowgraphs, metrics, and provides methods for compilation, data collection,
|
|
44
|
+
and reporting.
|
|
45
|
+
"""
|
|
46
|
+
|
|
47
|
+
def __init__(self, design: Union[DesignSchema, str] = None):
|
|
48
|
+
super().__init__()
|
|
49
|
+
|
|
50
|
+
# Initialize schema
|
|
51
|
+
schema = EditableSchema(self)
|
|
52
|
+
schema_version(schema)
|
|
53
|
+
schema_arg(schema)
|
|
54
|
+
|
|
55
|
+
schema.insert("checklist", "default", ChecklistSchema())
|
|
56
|
+
schema.insert("library", "default", DesignSchema())
|
|
57
|
+
schema.insert("flowgraph", "default", FlowgraphSchema())
|
|
58
|
+
schema.insert("metric", MetricSchema())
|
|
59
|
+
schema.insert("record", RecordSchema())
|
|
60
|
+
schema.insert("tool", "default", ToolSchema())
|
|
61
|
+
|
|
62
|
+
# Add options
|
|
63
|
+
schema_option_runtime(schema)
|
|
64
|
+
schema.insert(
|
|
65
|
+
"option", "env", "default",
|
|
66
|
+
Parameter(
|
|
67
|
+
"str",
|
|
68
|
+
scope=Scope.GLOBAL,
|
|
69
|
+
shorthelp="Option: environment variables",
|
|
70
|
+
example=["api: project.set('option', 'env', 'PDK_HOME', '/disk/mypdk')"],
|
|
71
|
+
help=trim("""
|
|
72
|
+
Certain tools and reference flows require global environment
|
|
73
|
+
variables to be set. These variables can be managed externally or
|
|
74
|
+
specified through the env variable.""")))
|
|
75
|
+
|
|
76
|
+
schema.insert(
|
|
77
|
+
"option", "design",
|
|
78
|
+
Parameter(
|
|
79
|
+
"str",
|
|
80
|
+
scope=Scope.GLOBAL,
|
|
81
|
+
shorthelp="Option: Design library name",
|
|
82
|
+
example=["cli: -design hello_world",
|
|
83
|
+
"api: project.set('option', 'design', 'hello_world')"],
|
|
84
|
+
switch=["-design <str>"],
|
|
85
|
+
help="Name of the top level library"))
|
|
86
|
+
schema.insert(
|
|
87
|
+
"option", "alias",
|
|
88
|
+
Parameter(
|
|
89
|
+
"[(str,str,str,str)]",
|
|
90
|
+
scope=Scope.GLOBAL,
|
|
91
|
+
shorthelp="Option: Fileset alias mapping",
|
|
92
|
+
example=["api: project.set('option', 'alias', ('design', 'rtl', 'lambda', 'rtl')"],
|
|
93
|
+
help=trim("""List of filesets to alias during a run. When an alias is specific
|
|
94
|
+
it will be used instead of the source fileset. It is useful when you
|
|
95
|
+
want to substitute a fileset from one library with a fileset from another,
|
|
96
|
+
without changing the original design's code.
|
|
97
|
+
For example, you might use it to swap in a different version of an IP
|
|
98
|
+
block or a specific test environment.""")))
|
|
99
|
+
schema.insert(
|
|
100
|
+
"option", "fileset",
|
|
101
|
+
Parameter(
|
|
102
|
+
"[str]",
|
|
103
|
+
scope=Scope.GLOBAL,
|
|
104
|
+
shorthelp="Option: Selected design filesets",
|
|
105
|
+
example=["api: project.set('option', 'fileset', 'rtl')"],
|
|
106
|
+
help=trim("""List of filesets to use from the selected design library""")))
|
|
107
|
+
|
|
108
|
+
schema.insert(
|
|
109
|
+
"option", "nodashboard",
|
|
110
|
+
Parameter(
|
|
111
|
+
"bool",
|
|
112
|
+
defvalue=False,
|
|
113
|
+
scope=Scope.GLOBAL,
|
|
114
|
+
switch=["-nodashboard <bool>"],
|
|
115
|
+
shorthelp="Option: Disables the dashboard",
|
|
116
|
+
example=["api: project.set('option', 'nodashboard', True)"],
|
|
117
|
+
help=trim("""Disables the dashboard during execution""")))
|
|
118
|
+
|
|
119
|
+
# Add history
|
|
120
|
+
schema.insert("history", BaseSchema())
|
|
121
|
+
|
|
122
|
+
# Init logger
|
|
123
|
+
self.__init_logger()
|
|
124
|
+
|
|
125
|
+
# Init fields
|
|
126
|
+
self.__cwd = os.getcwd()
|
|
127
|
+
|
|
128
|
+
if design:
|
|
129
|
+
if isinstance(design, str):
|
|
130
|
+
self.set("option", "design", design)
|
|
131
|
+
else:
|
|
132
|
+
self.set_design(design)
|
|
133
|
+
|
|
134
|
+
self.__init_dashboard()
|
|
135
|
+
|
|
136
|
+
def __init_logger(self):
|
|
137
|
+
"""
|
|
138
|
+
Initializes the project-specific logger.
|
|
139
|
+
"""
|
|
140
|
+
self.__logger = MPManager.logger().getChild(f"project_{uuid.uuid4().hex}")
|
|
141
|
+
self.__logger.setLevel(logging.INFO)
|
|
142
|
+
|
|
143
|
+
self._logger_console = logging.StreamHandler(stream=sys.stdout)
|
|
144
|
+
if SCColorLoggerFormatter.supports_color(sys.stdout):
|
|
145
|
+
self._logger_console.setFormatter(SCColorLoggerFormatter(SCLoggerFormatter()))
|
|
146
|
+
else:
|
|
147
|
+
self._logger_console.setFormatter(SCLoggerFormatter())
|
|
148
|
+
|
|
149
|
+
self.__logger.addHandler(self._logger_console)
|
|
150
|
+
|
|
151
|
+
def __init_dashboard(self):
|
|
152
|
+
"""
|
|
153
|
+
Initializes or disables the CLI dashboard for the project.
|
|
154
|
+
|
|
155
|
+
If the 'nodashboard' option is set to True, any existing dashboard
|
|
156
|
+
instance is stopped and set to None. Otherwise, a new `CliDashboard`
|
|
157
|
+
instance is created and assigned to the project.
|
|
158
|
+
"""
|
|
159
|
+
if self.get("option", "nodashboard"):
|
|
160
|
+
try:
|
|
161
|
+
if self.__dashboard:
|
|
162
|
+
self.__dashboard.stop()
|
|
163
|
+
except AttributeError:
|
|
164
|
+
pass
|
|
165
|
+
self.__dashboard = None
|
|
166
|
+
else:
|
|
167
|
+
self.__dashboard = CliDashboard(self)
|
|
168
|
+
|
|
169
|
+
def set(self, *args, field='value', clobber=True, step=None, index=None):
|
|
170
|
+
ret = super().set(*args, field=field, clobber=clobber, step=step, index=index)
|
|
171
|
+
|
|
172
|
+
# Special handling keys
|
|
173
|
+
if args[0:2] == ("option", "nodashboard"):
|
|
174
|
+
self.__init_dashboard()
|
|
175
|
+
|
|
176
|
+
return ret
|
|
177
|
+
|
|
178
|
+
@property
|
|
179
|
+
def logger(self) -> logging.Logger:
|
|
180
|
+
"""
|
|
181
|
+
Returns the logger for this project
|
|
182
|
+
"""
|
|
183
|
+
return self.__logger
|
|
184
|
+
|
|
185
|
+
@property
|
|
186
|
+
def name(self) -> str:
|
|
187
|
+
"""
|
|
188
|
+
Returns the name of the design
|
|
189
|
+
"""
|
|
190
|
+
return self.get("option", "design")
|
|
191
|
+
|
|
192
|
+
@property
|
|
193
|
+
def design(self) -> DesignSchema:
|
|
194
|
+
"""
|
|
195
|
+
Returns the design object
|
|
196
|
+
"""
|
|
197
|
+
design_name = self.name
|
|
198
|
+
if not design_name:
|
|
199
|
+
raise ValueError("design name is not set")
|
|
200
|
+
if not self.valid("library", design_name):
|
|
201
|
+
raise KeyError(f"{design_name} design has not been loaded")
|
|
202
|
+
|
|
203
|
+
return self.get("library", design_name, field="schema")
|
|
204
|
+
|
|
205
|
+
@property
|
|
206
|
+
def cwd(self) -> str:
|
|
207
|
+
"""
|
|
208
|
+
Returns the working directory for the project
|
|
209
|
+
"""
|
|
210
|
+
return self.__cwd
|
|
211
|
+
|
|
212
|
+
@classmethod
|
|
213
|
+
def convert(cls, obj: "Project") -> "Project":
|
|
214
|
+
"""
|
|
215
|
+
Convert a project from one type to another
|
|
216
|
+
|
|
217
|
+
Args:
|
|
218
|
+
obj: Source object to convert from
|
|
219
|
+
|
|
220
|
+
Returns:
|
|
221
|
+
new object of the new class type
|
|
222
|
+
"""
|
|
223
|
+
if not isinstance(obj, Project):
|
|
224
|
+
raise TypeError("source object must be a Project")
|
|
225
|
+
|
|
226
|
+
new_obj = cls()
|
|
227
|
+
|
|
228
|
+
root_keys = new_obj.getkeys()
|
|
229
|
+
import_keys = set(root_keys).intersection(obj.getkeys())
|
|
230
|
+
|
|
231
|
+
if not issubclass(cls, obj.__class__):
|
|
232
|
+
for rm in ("checklist", "flowgraph", "metric", "record", "tool", "schemaversion"):
|
|
233
|
+
try:
|
|
234
|
+
import_keys.remove(rm)
|
|
235
|
+
except KeyError:
|
|
236
|
+
pass
|
|
237
|
+
|
|
238
|
+
manifest = obj.getdict()
|
|
239
|
+
for key in list(manifest.keys()):
|
|
240
|
+
if key not in import_keys:
|
|
241
|
+
del manifest[key]
|
|
242
|
+
|
|
243
|
+
new_obj._from_dict(manifest, [])
|
|
244
|
+
|
|
245
|
+
return new_obj
|
|
246
|
+
|
|
247
|
+
@classmethod
|
|
248
|
+
def _getdict_type(cls) -> str:
|
|
249
|
+
"""
|
|
250
|
+
Returns the meta data for getdict
|
|
251
|
+
"""
|
|
252
|
+
|
|
253
|
+
return Project.__name__
|
|
254
|
+
|
|
255
|
+
def __populate_deps(self, obj: DependencySchema = None):
|
|
256
|
+
"""
|
|
257
|
+
Ensures that all loaded dependencies (like libraries) within the project
|
|
258
|
+
contain correct internal pointers back to the project's libraries.
|
|
259
|
+
This is crucial for maintaining a consistent and navigable schema graph.
|
|
260
|
+
|
|
261
|
+
Args:
|
|
262
|
+
obj (DependencySchema, optional): An optional dependency object to
|
|
263
|
+
reset and populate. If None, all existing library dependencies
|
|
264
|
+
in the project are processed. Defaults to None.
|
|
265
|
+
"""
|
|
266
|
+
if obj:
|
|
267
|
+
obj._reset_deps()
|
|
268
|
+
dep_map = {name: self.get("library", name, field="schema")
|
|
269
|
+
for name in self.getkeys("library")}
|
|
270
|
+
for obj in dep_map.values():
|
|
271
|
+
if isinstance(obj, DependencySchema):
|
|
272
|
+
obj._populate_deps(dep_map)
|
|
273
|
+
|
|
274
|
+
def _from_dict(self, manifest, keypath, version=None):
|
|
275
|
+
"""
|
|
276
|
+
Populates the project's schema from a dictionary representation.
|
|
277
|
+
|
|
278
|
+
This method is typically used during deserialization or when loading
|
|
279
|
+
a project state from a manifest. After loading the data, it ensures
|
|
280
|
+
that internal dependencies are correctly re-established.
|
|
281
|
+
|
|
282
|
+
Args:
|
|
283
|
+
manifest (dict): The dictionary containing the schema data.
|
|
284
|
+
keypath (list): The current keypath being processed (used internally
|
|
285
|
+
for recursive loading).
|
|
286
|
+
version (str, optional): The schema version of the manifest. Defaults to None.
|
|
287
|
+
|
|
288
|
+
Returns:
|
|
289
|
+
Any: The result of the superclass's `_from_dict` method.
|
|
290
|
+
"""
|
|
291
|
+
ret = super()._from_dict(manifest, keypath, version)
|
|
292
|
+
|
|
293
|
+
# Restore dependencies
|
|
294
|
+
self.__populate_deps()
|
|
295
|
+
|
|
296
|
+
return ret
|
|
297
|
+
|
|
298
|
+
def load_target(self, target: Union[str, Callable[["Project"], None]], **kwargs):
|
|
299
|
+
"""
|
|
300
|
+
Loads and executes a target function or method within the project context.
|
|
301
|
+
|
|
302
|
+
This method allows dynamically loading a Python function (e.g., a target
|
|
303
|
+
defined in a separate module) and executing it. It performs type checking
|
|
304
|
+
to ensure the target function accepts a Project object as its first
|
|
305
|
+
required argument and that the current project instance is compatible
|
|
306
|
+
with the target's expected Project type.
|
|
307
|
+
|
|
308
|
+
Args:
|
|
309
|
+
target (Union[str, Callable[["Project"], None]]):
|
|
310
|
+
The target to load. This can be:
|
|
311
|
+
- A string in the format "module.submodule.function_name"
|
|
312
|
+
- A callable Python function that accepts a Project object as its
|
|
313
|
+
first argument.
|
|
314
|
+
**kwargs: Arbitrary keyword arguments to pass to the target function.
|
|
315
|
+
|
|
316
|
+
Raises:
|
|
317
|
+
ValueError: If the target string path is incomplete, if the target
|
|
318
|
+
signature does not meet the requirements (e.g., no
|
|
319
|
+
required arguments, or more than one required argument).
|
|
320
|
+
TypeError: If the target does not take a Project object as its
|
|
321
|
+
first argument, or if the current project instance is
|
|
322
|
+
not compatible with the target's required Project type.
|
|
323
|
+
"""
|
|
324
|
+
if isinstance(target, str):
|
|
325
|
+
if "." not in target:
|
|
326
|
+
raise ValueError("unable to process incomplete function path")
|
|
327
|
+
|
|
328
|
+
*module, func = target.split(".")
|
|
329
|
+
module = ".".join(module)
|
|
330
|
+
|
|
331
|
+
mod = importlib.import_module(module)
|
|
332
|
+
target = getattr(mod, func)
|
|
333
|
+
|
|
334
|
+
func_spec = getfullargspec(target)
|
|
335
|
+
|
|
336
|
+
args_len = len(func_spec.args or []) - len(func_spec.defaults or [])
|
|
337
|
+
|
|
338
|
+
if args_len == 0 and not func_spec.args:
|
|
339
|
+
raise ValueError('target signature cannot must take at least one argument')
|
|
340
|
+
if args_len > 1:
|
|
341
|
+
raise ValueError('target signature cannot have more than one required argument')
|
|
342
|
+
|
|
343
|
+
proj_arg = func_spec.args[0]
|
|
344
|
+
required_type = func_spec.annotations.get(proj_arg, Project)
|
|
345
|
+
|
|
346
|
+
if not issubclass(required_type, Project):
|
|
347
|
+
raise TypeError("target must take in a Project object")
|
|
348
|
+
|
|
349
|
+
if not isinstance(self, required_type):
|
|
350
|
+
raise TypeError(f"target requires a {required_type.__name__} project")
|
|
351
|
+
|
|
352
|
+
target(self, **kwargs)
|
|
353
|
+
|
|
354
|
+
def add_dep(self, obj):
|
|
355
|
+
"""
|
|
356
|
+
Adds a dependency object (e.g., a DesignSchema, FlowgraphSchema, LibrarySchema,
|
|
357
|
+
or ChecklistSchema) to the project.
|
|
358
|
+
|
|
359
|
+
This method intelligently adds various types of schema objects to the
|
|
360
|
+
project's internal structure. It also handles recursive addition of
|
|
361
|
+
dependencies if the added object itself is a `DependencySchema`.
|
|
362
|
+
|
|
363
|
+
Args:
|
|
364
|
+
obj (Union[DesignSchema, FlowgraphSchema, LibrarySchema, ChecklistSchema,
|
|
365
|
+
List, Set, Tuple]):
|
|
366
|
+
The dependency object(s) to add. Can be a single schema object
|
|
367
|
+
or a collection (list, set, tuple) of schema objects.
|
|
368
|
+
|
|
369
|
+
Raises:
|
|
370
|
+
NotImplementedError: If the type of the object is not supported.
|
|
371
|
+
"""
|
|
372
|
+
if isinstance(obj, (list, set, tuple)):
|
|
373
|
+
for iobj in obj:
|
|
374
|
+
self.add_dep(iobj)
|
|
375
|
+
return
|
|
376
|
+
|
|
377
|
+
if isinstance(obj, DesignSchema):
|
|
378
|
+
if not self.has_library(obj.name):
|
|
379
|
+
EditableSchema(self).insert("library", obj.name, obj)
|
|
380
|
+
elif isinstance(obj, FlowgraphSchema):
|
|
381
|
+
self.__import_flow(obj)
|
|
382
|
+
elif isinstance(obj, LibrarySchema):
|
|
383
|
+
if not self.has_library(obj.name):
|
|
384
|
+
EditableSchema(self).insert("library", obj.name, obj)
|
|
385
|
+
elif isinstance(obj, ChecklistSchema):
|
|
386
|
+
if obj.name not in self.getkeys("checklist"):
|
|
387
|
+
EditableSchema(self).insert("checklist", obj.name, obj)
|
|
388
|
+
else:
|
|
389
|
+
raise NotImplementedError
|
|
390
|
+
|
|
391
|
+
# Copy dependencies into project
|
|
392
|
+
self._import_dep(obj)
|
|
393
|
+
|
|
394
|
+
def _import_dep(self, obj: DependencySchema):
|
|
395
|
+
# Copy dependencies into project
|
|
396
|
+
if isinstance(obj, DependencySchema):
|
|
397
|
+
for dep in obj.get_dep():
|
|
398
|
+
self.add_dep(dep)
|
|
399
|
+
|
|
400
|
+
# Rebuild dependencies to ensure instances are correct
|
|
401
|
+
self.__populate_deps(obj)
|
|
402
|
+
|
|
403
|
+
def __import_flow(self, flow: FlowgraphSchema):
|
|
404
|
+
"""
|
|
405
|
+
Imports a FlowgraphSchema into the project.
|
|
406
|
+
|
|
407
|
+
If the flowgraph with the given name is not already present, it is
|
|
408
|
+
added to the project's flowgraph schema. This method also instantiates
|
|
409
|
+
and registers all tasks defined within the imported flowgraph, ensuring
|
|
410
|
+
that the necessary tool and task schemas are available.
|
|
411
|
+
|
|
412
|
+
Args:
|
|
413
|
+
flow (FlowgraphSchema): The flowgraph schema object to import.
|
|
414
|
+
"""
|
|
415
|
+
if flow.name in self.getkeys("flowgraph"):
|
|
416
|
+
return
|
|
417
|
+
|
|
418
|
+
edit_schema = EditableSchema(self)
|
|
419
|
+
edit_schema.insert("flowgraph", flow.name, flow)
|
|
420
|
+
|
|
421
|
+
# Instantiate tasks
|
|
422
|
+
for task_cls in flow.get_all_tasks():
|
|
423
|
+
task = task_cls()
|
|
424
|
+
# TODO: this is not needed once tool moves
|
|
425
|
+
if not self.valid("tool", task.tool()):
|
|
426
|
+
edit_schema.insert("tool", task.tool(), ToolSchema())
|
|
427
|
+
if not self.valid("tool", task.tool(), "task", task.task()):
|
|
428
|
+
edit_schema.insert("tool", task.tool(), "task", task.task(), task)
|
|
429
|
+
|
|
430
|
+
def check_manifest(self) -> bool:
|
|
431
|
+
"""
|
|
432
|
+
Performs a comprehensive check of the project's manifest (configuration)
|
|
433
|
+
for consistency and validity.
|
|
434
|
+
|
|
435
|
+
This method verifies that essential options like 'design', 'fileset',
|
|
436
|
+
and 'flow' are properly set. It also checks if the specified design
|
|
437
|
+
and flowgraph are loaded, and if filesets within the selected design
|
|
438
|
+
are valid and have a top module defined. Additionally, it validates
|
|
439
|
+
any defined fileset aliases, ensuring that source and destination
|
|
440
|
+
libraries and filesets exist. Error messages are logged for any
|
|
441
|
+
detected inconsistencies.
|
|
442
|
+
|
|
443
|
+
Returns:
|
|
444
|
+
bool: True if the manifest is valid and all checks pass, False otherwise.
|
|
445
|
+
"""
|
|
446
|
+
error = False
|
|
447
|
+
|
|
448
|
+
# Assert design is set
|
|
449
|
+
design = self.get("option", "design")
|
|
450
|
+
if not design:
|
|
451
|
+
self.logger.error("[option,design] has not been set")
|
|
452
|
+
error = True
|
|
453
|
+
else:
|
|
454
|
+
# Assert design is a library
|
|
455
|
+
if not self.has_library(design):
|
|
456
|
+
self.logger.error(f"{design} has not been loaded")
|
|
457
|
+
error = True
|
|
458
|
+
|
|
459
|
+
# Assert fileset is set
|
|
460
|
+
filesets = self.get("option", "fileset")
|
|
461
|
+
if not filesets:
|
|
462
|
+
self.logger.error("[option,fileset] has not been set")
|
|
463
|
+
error = True
|
|
464
|
+
elif design: # Only check fileset in design if design is valid
|
|
465
|
+
# Assert fileset is in design
|
|
466
|
+
design_obj = self.design # This is a mock object
|
|
467
|
+
for fileset in filesets:
|
|
468
|
+
if not design_obj.has_fileset(fileset):
|
|
469
|
+
self.logger.error(f"{fileset} is not a valid fileset in {design}")
|
|
470
|
+
error = True
|
|
471
|
+
|
|
472
|
+
# Assert design has topmodule
|
|
473
|
+
# This check only happens if filesets are provided and design is valid
|
|
474
|
+
if filesets and design_obj.has_fileset(filesets[0]):
|
|
475
|
+
if not design_obj.get_topmodule(filesets[0]):
|
|
476
|
+
self.logger.error(f"topmodule has not been set in {design}/{filesets[0]}")
|
|
477
|
+
error = True
|
|
478
|
+
|
|
479
|
+
# Assert flow is set
|
|
480
|
+
flow = self.get("option", "flow")
|
|
481
|
+
if not flow:
|
|
482
|
+
self.logger.error("[option,flow] has not been set")
|
|
483
|
+
error = True
|
|
484
|
+
else:
|
|
485
|
+
if flow not in self.getkeys("flowgraph"):
|
|
486
|
+
self.logger.error(f"{flow} has not been loaded")
|
|
487
|
+
error = True
|
|
488
|
+
|
|
489
|
+
# Check that alias libraries exist
|
|
490
|
+
# Default to an empty list if 'alias' is not set, to avoid TypeError
|
|
491
|
+
aliases = self.get("option", "alias") or []
|
|
492
|
+
for src_lib, src_fileset, dst_lib, dst_fileset in aliases:
|
|
493
|
+
if not src_lib:
|
|
494
|
+
self.logger.error("source library in [option,alias] must be set")
|
|
495
|
+
error = True
|
|
496
|
+
continue
|
|
497
|
+
|
|
498
|
+
# If src_lib is not in getkeys("library"), skip further checks for this alias
|
|
499
|
+
# as the error would have been caught earlier if it was a 'design' check.
|
|
500
|
+
# This path is for aliases where src_lib itself might not be a primary design.
|
|
501
|
+
if not self.has_library(src_lib):
|
|
502
|
+
continue
|
|
503
|
+
|
|
504
|
+
if not self.get("library", src_lib, field="schema").has_fileset(src_fileset):
|
|
505
|
+
self.logger.error(f"{src_fileset} is not a valid fileset in {src_lib}")
|
|
506
|
+
error = True
|
|
507
|
+
continue
|
|
508
|
+
|
|
509
|
+
if not dst_lib:
|
|
510
|
+
continue
|
|
511
|
+
|
|
512
|
+
if not self.has_library(dst_lib):
|
|
513
|
+
self.logger.error(f"{dst_lib} has not been loaded")
|
|
514
|
+
error = True
|
|
515
|
+
continue
|
|
516
|
+
|
|
517
|
+
if dst_fileset and \
|
|
518
|
+
not self.get("library", dst_lib, field="schema").has_fileset(dst_fileset):
|
|
519
|
+
self.logger.error(f"{dst_fileset} is not a valid fileset in {dst_lib}")
|
|
520
|
+
error = True
|
|
521
|
+
continue
|
|
522
|
+
|
|
523
|
+
# Check flowgraph
|
|
524
|
+
# Check tasks have classes, cannot check post setup that is a runtime check
|
|
525
|
+
|
|
526
|
+
return not error
|
|
527
|
+
|
|
528
|
+
def _init_run(self):
|
|
529
|
+
"""
|
|
530
|
+
Method called before calling :meth:`.check_manifest` to provide a mechanism to
|
|
531
|
+
setup the project correctly.
|
|
532
|
+
"""
|
|
533
|
+
pass
|
|
534
|
+
|
|
535
|
+
def run(self, raise_exception=False):
|
|
536
|
+
'''
|
|
537
|
+
Executes tasks in a flowgraph.
|
|
538
|
+
|
|
539
|
+
The run function sets up tools and launches runs for every node
|
|
540
|
+
in the flowgraph starting with 'from' steps and ending at 'to' steps.
|
|
541
|
+
From/to are taken from the schema from/to parameters if defined,
|
|
542
|
+
otherwise from/to are defined as the entry/exit steps of the flowgraph.
|
|
543
|
+
Before starting the process, tool modules are loaded and setup up for each
|
|
544
|
+
step and index based on on the schema eda dictionary settings.
|
|
545
|
+
Once the tools have been set up, the manifest is checked using the
|
|
546
|
+
check_manifest() function and files in the manifest are hashed based
|
|
547
|
+
on the 'hashmode' schema setting.
|
|
548
|
+
|
|
549
|
+
Once launched, each process waits for preceding steps to complete,
|
|
550
|
+
as defined by the flowgraph 'inputs' parameter. Once a all inputs
|
|
551
|
+
are ready, previous steps are checked for errors before the
|
|
552
|
+
process entered a local working directory and starts to run
|
|
553
|
+
a tool or to execute a built in Chip function.
|
|
554
|
+
|
|
555
|
+
Fatal errors within a step/index process cause all subsequent
|
|
556
|
+
processes to exit before start, returning control to the the main
|
|
557
|
+
program which can then exit.
|
|
558
|
+
|
|
559
|
+
Args:
|
|
560
|
+
raise_exception (bool): if True, will rethrow errors that the flow raises,
|
|
561
|
+
otherwise will report the error and return False
|
|
562
|
+
|
|
563
|
+
Examples:
|
|
564
|
+
>>> run()
|
|
565
|
+
Runs the execution flow defined by the flowgraph dictionary.
|
|
566
|
+
'''
|
|
567
|
+
from siliconcompiler.remote import ClientScheduler
|
|
568
|
+
|
|
569
|
+
# Start dashboard
|
|
570
|
+
if self.__dashboard:
|
|
571
|
+
if not self.__dashboard.is_running():
|
|
572
|
+
self.__dashboard.open_dashboard()
|
|
573
|
+
# Attach logger
|
|
574
|
+
self.__dashboard.set_logger(self.logger)
|
|
575
|
+
|
|
576
|
+
try:
|
|
577
|
+
if self.get('option', 'remote'):
|
|
578
|
+
scheduler = ClientScheduler(self)
|
|
579
|
+
else:
|
|
580
|
+
scheduler = Scheduler(self)
|
|
581
|
+
scheduler.run()
|
|
582
|
+
except Exception as e:
|
|
583
|
+
if raise_exception:
|
|
584
|
+
raise e
|
|
585
|
+
self.logger.error(str(e))
|
|
586
|
+
return False
|
|
587
|
+
finally:
|
|
588
|
+
if self.__dashboard:
|
|
589
|
+
# Update dashboard
|
|
590
|
+
self.__dashboard.update_manifest()
|
|
591
|
+
self.__dashboard.end_of_run()
|
|
592
|
+
|
|
593
|
+
return True
|
|
594
|
+
|
|
595
|
+
def _getbuilddir(self) -> str:
|
|
596
|
+
"""
|
|
597
|
+
Returns the absolute path to the project's build directory.
|
|
598
|
+
|
|
599
|
+
This directory is where all intermediate and final compilation
|
|
600
|
+
artifacts are stored.
|
|
601
|
+
|
|
602
|
+
Returns:
|
|
603
|
+
str: The absolute path to the build directory.
|
|
604
|
+
"""
|
|
605
|
+
builddir = self.get('option', 'builddir')
|
|
606
|
+
if os.path.isabs(builddir):
|
|
607
|
+
return builddir
|
|
608
|
+
|
|
609
|
+
return os.path.join(self.cwd, builddir)
|
|
610
|
+
|
|
611
|
+
def getworkdir(self, step: str = None, index: Union[int, str] = None) -> str:
|
|
612
|
+
"""
|
|
613
|
+
Returns the absolute path to the working directory for a given
|
|
614
|
+
step and index within the project's job structure.
|
|
615
|
+
|
|
616
|
+
The directory structure is typically:
|
|
617
|
+
`<build_dir>/<design_name>/<job_name>/<step>/<index>/`
|
|
618
|
+
|
|
619
|
+
If `step` and `index` are not provided, the job directory is returned.
|
|
620
|
+
If `step` is provided but `index` is not, index '0' is assumed.
|
|
621
|
+
|
|
622
|
+
Args:
|
|
623
|
+
step (str, optional): The name of the flowgraph step (e.g., 'syn', 'place').
|
|
624
|
+
Defaults to None.
|
|
625
|
+
index (Union[int, str], optional): The index of the task within the step.
|
|
626
|
+
Defaults to None (implies '0' if step is set).
|
|
627
|
+
|
|
628
|
+
Returns:
|
|
629
|
+
str: The absolute path to the specified working directory.
|
|
630
|
+
|
|
631
|
+
Raises:
|
|
632
|
+
ValueError: If the design name is not set in the project.
|
|
633
|
+
"""
|
|
634
|
+
if not self.name:
|
|
635
|
+
raise ValueError("name has not been set")
|
|
636
|
+
|
|
637
|
+
dirlist = [self._getbuilddir(),
|
|
638
|
+
self.name,
|
|
639
|
+
self.get('option', 'jobname')]
|
|
640
|
+
|
|
641
|
+
# Return jobdirectory if no step defined
|
|
642
|
+
# Return index 0 by default
|
|
643
|
+
if step is not None:
|
|
644
|
+
dirlist.append(step)
|
|
645
|
+
|
|
646
|
+
if index is None:
|
|
647
|
+
index = '0'
|
|
648
|
+
|
|
649
|
+
dirlist.append(str(index))
|
|
650
|
+
return os.path.join(*dirlist)
|
|
651
|
+
|
|
652
|
+
def getcollectiondir(self):
|
|
653
|
+
"""
|
|
654
|
+
Returns the absolute path to the directory where collected files are stored.
|
|
655
|
+
|
|
656
|
+
This directory is typically located within the project's working directory
|
|
657
|
+
and is used to consolidate files marked for collection.
|
|
658
|
+
|
|
659
|
+
Returns:
|
|
660
|
+
str: The absolute path to the collected files directory.
|
|
661
|
+
"""
|
|
662
|
+
return os.path.join(self.getworkdir(), "sc_collected_files")
|
|
663
|
+
|
|
664
|
+
def collect(self,
|
|
665
|
+
directory: str = None,
|
|
666
|
+
verbose: bool = True,
|
|
667
|
+
whitelist: List[str] = None):
|
|
668
|
+
'''
|
|
669
|
+
Collects files found in the configuration dictionary and places
|
|
670
|
+
them in :meth:`.getcollectiondir`. The function only copies in files that have the 'copy'
|
|
671
|
+
field set as true.
|
|
672
|
+
|
|
673
|
+
Args:
|
|
674
|
+
directory (filepath): Output filepath
|
|
675
|
+
verbose (bool): Flag to indicate if logging should be used
|
|
676
|
+
whitelist (list[path]): List of directories that are allowed to be
|
|
677
|
+
collected. If a directory is is found that is not on this list
|
|
678
|
+
a RuntimeError will be raised.
|
|
679
|
+
'''
|
|
680
|
+
|
|
681
|
+
if not directory:
|
|
682
|
+
directory = self.getcollectiondir()
|
|
683
|
+
directory = os.path.abspath(directory)
|
|
684
|
+
|
|
685
|
+
# Remove existing directory
|
|
686
|
+
if os.path.exists(directory):
|
|
687
|
+
shutil.rmtree(directory)
|
|
688
|
+
os.makedirs(directory)
|
|
689
|
+
|
|
690
|
+
if verbose:
|
|
691
|
+
self.logger.info(f'Collecting files to: {directory}')
|
|
692
|
+
|
|
693
|
+
dirs = {}
|
|
694
|
+
files = {}
|
|
695
|
+
|
|
696
|
+
for key in self.allkeys():
|
|
697
|
+
if key[0] == 'history':
|
|
698
|
+
# skip history
|
|
699
|
+
continue
|
|
700
|
+
|
|
701
|
+
# Skip runtime directories
|
|
702
|
+
if key == ('option', 'builddir'):
|
|
703
|
+
# skip builddir
|
|
704
|
+
continue
|
|
705
|
+
if key == ('option', 'cachedir'):
|
|
706
|
+
# skip cache
|
|
707
|
+
continue
|
|
708
|
+
|
|
709
|
+
if key[0] == 'tool' and key[2] == 'task' and key[4] in ('input',
|
|
710
|
+
'report',
|
|
711
|
+
'output'):
|
|
712
|
+
# skip flow files files from builds
|
|
713
|
+
continue
|
|
714
|
+
|
|
715
|
+
leaftype = self.get(*key, field='type')
|
|
716
|
+
is_dir = "dir" in leaftype
|
|
717
|
+
is_file = "file" in leaftype
|
|
718
|
+
|
|
719
|
+
if not is_dir and not is_file:
|
|
720
|
+
continue
|
|
721
|
+
|
|
722
|
+
if not self.get(*key, field='copy'):
|
|
723
|
+
continue
|
|
724
|
+
|
|
725
|
+
for values, step, index in self.get(*key, field=None).getvalues(return_values=False):
|
|
726
|
+
if not values.has_value:
|
|
727
|
+
continue
|
|
728
|
+
|
|
729
|
+
if isinstance(values, (NodeSetValue, NodeListValue)):
|
|
730
|
+
values = values.values
|
|
731
|
+
else:
|
|
732
|
+
values = [values]
|
|
733
|
+
|
|
734
|
+
if is_dir:
|
|
735
|
+
dirs[(key, step, index)] = values
|
|
736
|
+
else:
|
|
737
|
+
files[(key, step, index)] = values
|
|
738
|
+
|
|
739
|
+
path_filter = FilterDirectories(self)
|
|
740
|
+
for key, step, index in sorted(dirs.keys()):
|
|
741
|
+
abs_paths = self.find_files(*key, step=step, index=index)
|
|
742
|
+
|
|
743
|
+
new_paths = set()
|
|
744
|
+
|
|
745
|
+
if not isinstance(abs_paths, (list, tuple, set)):
|
|
746
|
+
abs_paths = [abs_paths]
|
|
747
|
+
|
|
748
|
+
abs_paths = zip(abs_paths, dirs[(key, step, index)])
|
|
749
|
+
abs_paths = sorted(abs_paths, key=lambda p: p[0])
|
|
750
|
+
|
|
751
|
+
for abs_path, value in abs_paths:
|
|
752
|
+
if not abs_path:
|
|
753
|
+
raise FileNotFoundError(f"{value.get()} could not be copied")
|
|
754
|
+
|
|
755
|
+
if abs_path.startswith(directory):
|
|
756
|
+
# File already imported in directory
|
|
757
|
+
continue
|
|
758
|
+
|
|
759
|
+
imported = False
|
|
760
|
+
for new_path in new_paths:
|
|
761
|
+
if abs_path.startwith(new_path):
|
|
762
|
+
imported = True
|
|
763
|
+
break
|
|
764
|
+
if imported:
|
|
765
|
+
continue
|
|
766
|
+
|
|
767
|
+
new_paths.add(abs_path)
|
|
768
|
+
|
|
769
|
+
import_path = os.path.join(directory, value.get_hashed_filename())
|
|
770
|
+
if os.path.exists(import_path):
|
|
771
|
+
continue
|
|
772
|
+
|
|
773
|
+
if whitelist is not None and abs_path not in whitelist:
|
|
774
|
+
raise RuntimeError(f'{abs_path} is not on the approved collection list.')
|
|
775
|
+
|
|
776
|
+
if verbose:
|
|
777
|
+
self.logger.info(f" Collecting directory: {abs_path}")
|
|
778
|
+
path_filter.abspath = abs_path
|
|
779
|
+
shutil.copytree(abs_path, import_path, ignore=path_filter.filter)
|
|
780
|
+
path_filter.abspath = None
|
|
781
|
+
|
|
782
|
+
for key, step, index in sorted(files.keys()):
|
|
783
|
+
abs_paths = self.find_files(*key, step=step, index=index)
|
|
784
|
+
|
|
785
|
+
if not isinstance(abs_paths, (list, tuple, set)):
|
|
786
|
+
abs_paths = [abs_paths]
|
|
787
|
+
|
|
788
|
+
abs_paths = zip(abs_paths, files[(key, step, index)])
|
|
789
|
+
abs_paths = sorted(abs_paths, key=lambda p: p[0])
|
|
790
|
+
|
|
791
|
+
for abs_path, value in abs_paths:
|
|
792
|
+
if not abs_path:
|
|
793
|
+
raise FileNotFoundError(f"{value.get()} could not be copied")
|
|
794
|
+
|
|
795
|
+
if abs_path.startswith(directory):
|
|
796
|
+
# File already imported in directory
|
|
797
|
+
continue
|
|
798
|
+
|
|
799
|
+
import_path = os.path.join(directory, value.get_hashed_filename())
|
|
800
|
+
if os.path.exists(import_path):
|
|
801
|
+
continue
|
|
802
|
+
|
|
803
|
+
if verbose:
|
|
804
|
+
self.logger.info(f" Collecting file: {abs_path}")
|
|
805
|
+
shutil.copy2(abs_path, import_path)
|
|
806
|
+
|
|
807
|
+
def history(self, job: str) -> "Project":
|
|
808
|
+
'''
|
|
809
|
+
Returns a *mutable* reference to ['history', job] as a Project object.
|
|
810
|
+
|
|
811
|
+
Raises:
|
|
812
|
+
KeyError: if job does not currently exist in history
|
|
813
|
+
|
|
814
|
+
Args:
|
|
815
|
+
job (str): Name of historical job to return.
|
|
816
|
+
'''
|
|
817
|
+
|
|
818
|
+
if job not in self.getkeys("history"):
|
|
819
|
+
raise KeyError(f"{job} is not a valid job")
|
|
820
|
+
|
|
821
|
+
return self.get("history", job, field="schema")
|
|
822
|
+
|
|
823
|
+
def _record_history(self):
|
|
824
|
+
'''
|
|
825
|
+
Copies the current project into the history
|
|
826
|
+
'''
|
|
827
|
+
|
|
828
|
+
job = self.get("option", "jobname")
|
|
829
|
+
proj = self.copy()
|
|
830
|
+
|
|
831
|
+
# Remove history from proj
|
|
832
|
+
EditableSchema(proj).insert("history", BaseSchema(), clobber=True)
|
|
833
|
+
|
|
834
|
+
if job in self.getkeys("history"):
|
|
835
|
+
self.logger.warning(f"Overwriting job {job}")
|
|
836
|
+
|
|
837
|
+
EditableSchema(self).insert("history", job, proj, clobber=True)
|
|
838
|
+
|
|
839
|
+
def __getstate__(self):
|
|
840
|
+
# Ensure a copy of the state is used
|
|
841
|
+
state = self.__dict__.copy()
|
|
842
|
+
|
|
843
|
+
# Remove logger objects since they are not serializable
|
|
844
|
+
del state["_Project__logger"]
|
|
845
|
+
del state["_logger_console"]
|
|
846
|
+
|
|
847
|
+
# Remove dashboard
|
|
848
|
+
del state["_Project__dashboard"]
|
|
849
|
+
|
|
850
|
+
return state
|
|
851
|
+
|
|
852
|
+
def __setstate__(self, state):
|
|
853
|
+
self.__dict__ = state
|
|
854
|
+
|
|
855
|
+
# Reinitialize logger on restore
|
|
856
|
+
self.__init_logger()
|
|
857
|
+
|
|
858
|
+
# Restore dashboard
|
|
859
|
+
self.__init_dashboard()
|
|
860
|
+
|
|
861
|
+
def get_filesets(self) -> List[Tuple[NamedSchema, str]]:
|
|
862
|
+
"""
|
|
863
|
+
Returns the filesets selected for this project
|
|
864
|
+
"""
|
|
865
|
+
# Build alias mapping
|
|
866
|
+
alias = {}
|
|
867
|
+
for src_lib, src_fileset, dst_lib, dst_fileset in self.get("option", "alias"):
|
|
868
|
+
if dst_lib:
|
|
869
|
+
if not self.has_library(dst_lib):
|
|
870
|
+
raise KeyError(f"{dst_lib} is not a loaded library")
|
|
871
|
+
dst_obj = self.get("library", dst_lib, field="schema")
|
|
872
|
+
else:
|
|
873
|
+
dst_obj = None
|
|
874
|
+
if not dst_fileset:
|
|
875
|
+
dst_fileset = None
|
|
876
|
+
alias[(src_lib, src_fileset)] = (dst_obj, dst_fileset)
|
|
877
|
+
|
|
878
|
+
return self.design.get_fileset(self.get("option", "fileset"), alias=alias)
|
|
879
|
+
|
|
880
|
+
def get_task(self,
|
|
881
|
+
tool: str = None,
|
|
882
|
+
task: str = None,
|
|
883
|
+
filter: Union[Type[TaskSchema], Callable[[TaskSchema], bool]] = None) -> \
|
|
884
|
+
Union[Set[TaskSchema], TaskSchema]:
|
|
885
|
+
"""Retrieves tasks based on specified criteria.
|
|
886
|
+
|
|
887
|
+
This method allows you to fetch tasks by tool name, task name, or by applying a custom
|
|
888
|
+
filter. If a single task matches the criteria, that task object is returned directly.
|
|
889
|
+
If multiple tasks match, a set of :class:`TaskSchema` objects is returned.
|
|
890
|
+
If no criteria are provided, all available tasks are returned.
|
|
891
|
+
|
|
892
|
+
Args:
|
|
893
|
+
tool (str, optional): The name of the tool to filter tasks by. Defaults to None.
|
|
894
|
+
task (str, optional): The name of the task to filter by. Defaults to None.
|
|
895
|
+
filter (Union[Type[TaskSchema], Callable[[TaskSchema], bool]], optional):
|
|
896
|
+
A filter to apply to the tasks. This can be:
|
|
897
|
+
- A `Type[TaskSchema]`: Only tasks that are instances of this type will be returned.
|
|
898
|
+
- A `Callable[[TaskSchema], bool]`: A function that takes a `TaskSchema` object
|
|
899
|
+
and returns `True` if the task should be included, `False` otherwise.
|
|
900
|
+
Defaults to None.
|
|
901
|
+
|
|
902
|
+
Returns:
|
|
903
|
+
Union[Set[TaskSchema], TaskSchema]:
|
|
904
|
+
- If exactly one task matches the criteria, returns that single `TaskSchema` object.
|
|
905
|
+
- If multiple tasks match or no specific tool/task is provided (and thus all tasks
|
|
906
|
+
are considered), returns a `Set[TaskSchema]` containing the matching tasks.
|
|
907
|
+
"""
|
|
908
|
+
all_tasks: Set[TaskSchema] = set()
|
|
909
|
+
for tool_name in self.getkeys("tool"):
|
|
910
|
+
for task_name in self.getkeys("tool", tool_name, "task"):
|
|
911
|
+
all_tasks.add(self.get("tool", tool_name, "task", task_name, field="schema"))
|
|
912
|
+
|
|
913
|
+
tasks = set()
|
|
914
|
+
for task_obj in all_tasks:
|
|
915
|
+
if tool and task_obj.tool() != tool:
|
|
916
|
+
continue
|
|
917
|
+
if task and task_obj.task() != task:
|
|
918
|
+
continue
|
|
919
|
+
if filter:
|
|
920
|
+
if inspect.isclass(filter):
|
|
921
|
+
if not isinstance(task_obj, filter):
|
|
922
|
+
continue
|
|
923
|
+
elif callable(filter):
|
|
924
|
+
if not filter(task_obj):
|
|
925
|
+
continue
|
|
926
|
+
tasks.add(task_obj)
|
|
927
|
+
|
|
928
|
+
if len(tasks) == 1:
|
|
929
|
+
return list(tasks)[0]
|
|
930
|
+
return tasks
|
|
931
|
+
|
|
932
|
+
def set_design(self, design: Union[DesignSchema, str]):
|
|
933
|
+
"""
|
|
934
|
+
Sets the active design for this project.
|
|
935
|
+
|
|
936
|
+
This method allows you to specify the primary design that the project
|
|
937
|
+
will operate on. If a `DesignSchema` object is provided, it is first
|
|
938
|
+
added as a dependency.
|
|
939
|
+
|
|
940
|
+
Args:
|
|
941
|
+
design (Union[DesignSchema, str]): The design object or its name (string)
|
|
942
|
+
to be set as the current design.
|
|
943
|
+
|
|
944
|
+
Raises:
|
|
945
|
+
TypeError: If the provided `design` is not a string or a `DesignSchema` object.
|
|
946
|
+
"""
|
|
947
|
+
if isinstance(design, DesignSchema):
|
|
948
|
+
self.add_dep(design)
|
|
949
|
+
design = design.name
|
|
950
|
+
elif not isinstance(design, str):
|
|
951
|
+
raise TypeError("design must be string or Design object")
|
|
952
|
+
|
|
953
|
+
return self.set("option", "design", design)
|
|
954
|
+
|
|
955
|
+
def set_flow(self, flow: Union[FlowgraphSchema, str]):
|
|
956
|
+
"""
|
|
957
|
+
Sets the active flowgraph for this project.
|
|
958
|
+
|
|
959
|
+
This method allows you to specify the sequence of steps and tasks
|
|
960
|
+
(the flow) that the project will execute. If a `FlowgraphSchema` object
|
|
961
|
+
is provided, it is first added as a dependency.
|
|
962
|
+
|
|
963
|
+
Args:
|
|
964
|
+
flow (Union[FlowgraphSchema, str]): The flowgraph object or its name (string)
|
|
965
|
+
to be set as the current flow.
|
|
966
|
+
|
|
967
|
+
Raises:
|
|
968
|
+
TypeError: If the provided `flow` is not a string or a `FlowgraphSchema` object.
|
|
969
|
+
"""
|
|
970
|
+
if isinstance(flow, FlowgraphSchema):
|
|
971
|
+
self.add_dep(flow)
|
|
972
|
+
flow = flow.name
|
|
973
|
+
elif not isinstance(flow, str):
|
|
974
|
+
raise TypeError("flow must be string or Flowgraph object")
|
|
975
|
+
|
|
976
|
+
return self.set("option", "flow", flow)
|
|
977
|
+
|
|
978
|
+
def add_fileset(self, fileset: Union[List[str], str], clobber: bool = False):
|
|
979
|
+
"""
|
|
980
|
+
Adds one or more filesets to be used in this project.
|
|
981
|
+
|
|
982
|
+
Filesets are collections of related files within a design. This method
|
|
983
|
+
allows you to specify which filesets from the selected design library
|
|
984
|
+
should be included in the current project context.
|
|
985
|
+
|
|
986
|
+
Args:
|
|
987
|
+
fileset (Union[List[str], str]): The name(s) of the fileset(s) to add.
|
|
988
|
+
Can be a single string or a list of strings.
|
|
989
|
+
clobber (bool): If True, existing filesets will be replaced by the new ones.
|
|
990
|
+
If False, new filesets will be added to the existing list.
|
|
991
|
+
Defaults to False.
|
|
992
|
+
|
|
993
|
+
Raises:
|
|
994
|
+
TypeError: If `fileset` is not a string or a list/tuple/set of strings.
|
|
995
|
+
ValueError: If any of the specified filesets are not found in the currently
|
|
996
|
+
selected design.
|
|
997
|
+
"""
|
|
998
|
+
if not isinstance(fileset, str):
|
|
999
|
+
if isinstance(fileset, (list, tuple, set)):
|
|
1000
|
+
if not all([isinstance(v, str) for v in fileset]):
|
|
1001
|
+
raise TypeError("fileset must be a string or a list/tuple/set of strings")
|
|
1002
|
+
else:
|
|
1003
|
+
raise TypeError("fileset must be a string or a list/tuple/set of strings")
|
|
1004
|
+
|
|
1005
|
+
if isinstance(fileset, str):
|
|
1006
|
+
fileset = [fileset]
|
|
1007
|
+
|
|
1008
|
+
for fs in fileset:
|
|
1009
|
+
if not self.design.has_fileset(fs):
|
|
1010
|
+
raise ValueError(f"{fs} is not a valid fileset in {self.design.name}")
|
|
1011
|
+
|
|
1012
|
+
if clobber:
|
|
1013
|
+
return self.set("option", "fileset", fileset)
|
|
1014
|
+
else:
|
|
1015
|
+
return self.add("option", "fileset", fileset)
|
|
1016
|
+
|
|
1017
|
+
def add_alias(self,
|
|
1018
|
+
src_dep: Union[DesignSchema, str],
|
|
1019
|
+
src_fileset: str,
|
|
1020
|
+
alias_dep: Union[DesignSchema, str],
|
|
1021
|
+
alias_fileset: str,
|
|
1022
|
+
clobber: bool = False):
|
|
1023
|
+
"""
|
|
1024
|
+
Adds an aliased fileset mapping to the project.
|
|
1025
|
+
|
|
1026
|
+
This method allows you to redirect a fileset reference from a source
|
|
1027
|
+
library/fileset to a different destination library/fileset. This is
|
|
1028
|
+
useful for substituting design components or test environments without
|
|
1029
|
+
modifying the original design.
|
|
1030
|
+
|
|
1031
|
+
Args:
|
|
1032
|
+
src_dep (Union[DesignSchema, str]): The source design library (object or name)
|
|
1033
|
+
from which the fileset is being aliased.
|
|
1034
|
+
src_fileset (str): The name of the source fileset to alias.
|
|
1035
|
+
alias_dep (Union[DesignSchema, str]): The destination design library (object or name)
|
|
1036
|
+
to which the fileset is being redirected.
|
|
1037
|
+
Can be None or an empty string to indicate
|
|
1038
|
+
deletion.
|
|
1039
|
+
alias_fileset (str): The name of the destination fileset. Can be None or an empty string
|
|
1040
|
+
to indicate deletion of the fileset reference.
|
|
1041
|
+
clobber (bool): If True, any existing alias for `(src_dep, src_fileset)` will be
|
|
1042
|
+
overwritten. If False, the alias will be added (or updated if it's
|
|
1043
|
+
the same source). Defaults to False.
|
|
1044
|
+
|
|
1045
|
+
Raises:
|
|
1046
|
+
TypeError: If `src_dep` or `alias_dep` are not valid types (string or DesignSchema).
|
|
1047
|
+
KeyError: If `alias_dep` is a string but the corresponding library is not loaded.
|
|
1048
|
+
ValueError: If `src_fileset` is not found in `src_dep`, or if `alias_fileset` is
|
|
1049
|
+
not found in `alias_dep` (when `alias_fileset` is not None).
|
|
1050
|
+
"""
|
|
1051
|
+
|
|
1052
|
+
if isinstance(src_dep, str):
|
|
1053
|
+
if self.has_library(src_dep):
|
|
1054
|
+
src_dep = self.get("library", src_dep, field="schema")
|
|
1055
|
+
else:
|
|
1056
|
+
src_dep_name = src_dep
|
|
1057
|
+
src_dep = None
|
|
1058
|
+
|
|
1059
|
+
if src_dep is not None:
|
|
1060
|
+
if isinstance(src_dep, DesignSchema):
|
|
1061
|
+
src_dep_name = src_dep.name
|
|
1062
|
+
if not self.has_library(src_dep_name):
|
|
1063
|
+
self.add_dep(src_dep)
|
|
1064
|
+
else:
|
|
1065
|
+
raise TypeError("source dep is not a valid type")
|
|
1066
|
+
|
|
1067
|
+
if not src_dep.has_fileset(src_fileset):
|
|
1068
|
+
raise ValueError(f"{src_dep_name} does not have {src_fileset} as a fileset")
|
|
1069
|
+
|
|
1070
|
+
if alias_dep is None:
|
|
1071
|
+
alias_dep = ""
|
|
1072
|
+
|
|
1073
|
+
if alias_fileset == "":
|
|
1074
|
+
alias_fileset = None
|
|
1075
|
+
|
|
1076
|
+
if isinstance(alias_dep, str):
|
|
1077
|
+
if alias_dep == "":
|
|
1078
|
+
alias_dep = None
|
|
1079
|
+
alias_dep_name = None
|
|
1080
|
+
alias_fileset = None
|
|
1081
|
+
else:
|
|
1082
|
+
if not self.has_library(alias_dep):
|
|
1083
|
+
raise KeyError(f"{alias_dep} has not been loaded")
|
|
1084
|
+
|
|
1085
|
+
alias_dep = self.get("library", alias_dep, field="schema")
|
|
1086
|
+
|
|
1087
|
+
if alias_dep is not None:
|
|
1088
|
+
if isinstance(alias_dep, DesignSchema):
|
|
1089
|
+
alias_dep_name = alias_dep.name
|
|
1090
|
+
if not self.has_library(alias_dep_name):
|
|
1091
|
+
self.add_dep(alias_dep)
|
|
1092
|
+
else:
|
|
1093
|
+
raise TypeError("alias dep is not a valid type")
|
|
1094
|
+
|
|
1095
|
+
if alias_fileset is not None and not alias_dep.has_fileset(alias_fileset):
|
|
1096
|
+
raise ValueError(f"{alias_dep_name} does not have {alias_fileset} as a fileset")
|
|
1097
|
+
|
|
1098
|
+
alias = (src_dep_name, src_fileset, alias_dep_name, alias_fileset)
|
|
1099
|
+
if clobber:
|
|
1100
|
+
return self.set("option", "alias", alias)
|
|
1101
|
+
else:
|
|
1102
|
+
return self.add("option", "alias", alias)
|
|
1103
|
+
|
|
1104
|
+
def has_library(self, library: str) -> bool:
|
|
1105
|
+
"""
|
|
1106
|
+
Checks if a library with the given name exists and is loaded in the project.
|
|
1107
|
+
|
|
1108
|
+
Args:
|
|
1109
|
+
library (Union[str, NamedSchema]): The name of the library (string)
|
|
1110
|
+
or a `NamedSchema` object representing the library.
|
|
1111
|
+
|
|
1112
|
+
Returns:
|
|
1113
|
+
bool: True if the library exists, False otherwise.
|
|
1114
|
+
"""
|
|
1115
|
+
|
|
1116
|
+
if isinstance(library, NamedSchema):
|
|
1117
|
+
library = library.name
|
|
1118
|
+
|
|
1119
|
+
return library in self.getkeys("library")
|
|
1120
|
+
|
|
1121
|
+
def _summary_headers(self) -> List[Tuple[str, str]]:
|
|
1122
|
+
"""
|
|
1123
|
+
Generates a list of key-value pairs representing project-specific headers
|
|
1124
|
+
to be included in the summary report.
|
|
1125
|
+
|
|
1126
|
+
This method provides information about the selected design, filesets,
|
|
1127
|
+
any active aliases, and the job directory. Projects can extend this
|
|
1128
|
+
method to add custom information to their summaries.
|
|
1129
|
+
|
|
1130
|
+
Returns:
|
|
1131
|
+
List[Tuple[str, str]]: A list of tuples, where each tuple contains
|
|
1132
|
+
a header name (str) and its corresponding value (str).
|
|
1133
|
+
"""
|
|
1134
|
+
|
|
1135
|
+
alias = []
|
|
1136
|
+
for src, src_fs, dst, dst_fs in self.get("option", "alias"):
|
|
1137
|
+
if not self.has_library(src):
|
|
1138
|
+
continue
|
|
1139
|
+
if dst and not self.has_library(dst):
|
|
1140
|
+
continue
|
|
1141
|
+
|
|
1142
|
+
aliased = f"{src} ({src_fs}) -> "
|
|
1143
|
+
if not dst:
|
|
1144
|
+
aliased += "deleted"
|
|
1145
|
+
elif not dst_fs:
|
|
1146
|
+
aliased += "deleted"
|
|
1147
|
+
else:
|
|
1148
|
+
aliased += f"{dst} ({dst_fs})"
|
|
1149
|
+
alias.append(aliased)
|
|
1150
|
+
|
|
1151
|
+
filesets = self.get("option", "fileset")
|
|
1152
|
+
|
|
1153
|
+
headers = [
|
|
1154
|
+
("design", self.get("option", "design"))
|
|
1155
|
+
]
|
|
1156
|
+
if filesets:
|
|
1157
|
+
headers.append(("filesets", ", ".join(filesets)))
|
|
1158
|
+
if alias:
|
|
1159
|
+
headers.append(("alias", ", ".join(alias)))
|
|
1160
|
+
headers.append(("jobdir", self.getworkdir()))
|
|
1161
|
+
|
|
1162
|
+
return headers
|
|
1163
|
+
|
|
1164
|
+
def _snapshot_info(self) -> List[Tuple[str, str]]:
|
|
1165
|
+
"""
|
|
1166
|
+
Generates a list of key-value pairs representing project-specific
|
|
1167
|
+
information to be included in snapshots.
|
|
1168
|
+
|
|
1169
|
+
This method provides basic information about the design used in the
|
|
1170
|
+
snapshot. Projects can extend this method to add custom information.
|
|
1171
|
+
|
|
1172
|
+
Returns:
|
|
1173
|
+
List[Tuple[str, str]]: A list of tuples, where each tuple contains
|
|
1174
|
+
an information label (str) and its corresponding value (str).
|
|
1175
|
+
"""
|
|
1176
|
+
|
|
1177
|
+
info = [
|
|
1178
|
+
("Design", self.get("option", "design"))
|
|
1179
|
+
]
|
|
1180
|
+
|
|
1181
|
+
return info
|
|
1182
|
+
|
|
1183
|
+
def summary(self, jobname: str = None, fd: TextIO = None) -> None:
|
|
1184
|
+
'''
|
|
1185
|
+
Prints a summary of the compilation manifest.
|
|
1186
|
+
|
|
1187
|
+
Metrics from the flowgraph nodes, or from/to parameter if
|
|
1188
|
+
defined, are printed out on a per step basis.
|
|
1189
|
+
|
|
1190
|
+
Args:
|
|
1191
|
+
jobname (str): If provided prints uses this job to print summary,
|
|
1192
|
+
otherwise the value in :keypath:`option,jobname` will be used.
|
|
1193
|
+
fd (TextIO): If provided prints to this file descriptor instead of stdout.
|
|
1194
|
+
|
|
1195
|
+
Examples:
|
|
1196
|
+
>>> chip.summary()
|
|
1197
|
+
Prints out a summary of the run to stdout.
|
|
1198
|
+
'''
|
|
1199
|
+
histories = self.getkeys("history")
|
|
1200
|
+
|
|
1201
|
+
if not histories:
|
|
1202
|
+
raise ValueError("no history to summarize")
|
|
1203
|
+
|
|
1204
|
+
if jobname is None:
|
|
1205
|
+
jobname = self.get("option", "jobname")
|
|
1206
|
+
if jobname not in histories:
|
|
1207
|
+
org_job = jobname
|
|
1208
|
+
jobname = histories[0]
|
|
1209
|
+
self.logger.warning(f"{org_job} not found in history, picking {jobname}")
|
|
1210
|
+
|
|
1211
|
+
history = self.history(jobname)
|
|
1212
|
+
history.get("metric", field='schema').summary(
|
|
1213
|
+
headers=history._summary_headers(),
|
|
1214
|
+
fd=fd)
|
|
1215
|
+
|
|
1216
|
+
def find_result(self,
|
|
1217
|
+
filetype: str = None, step: str = None,
|
|
1218
|
+
index: str = "0", directory: str = "outputs",
|
|
1219
|
+
filename: str = None) -> str:
|
|
1220
|
+
"""
|
|
1221
|
+
Returns the absolute path of a compilation result file.
|
|
1222
|
+
|
|
1223
|
+
This utility function constructs and returns the absolute path to a
|
|
1224
|
+
result file based on the provided arguments. The typical result
|
|
1225
|
+
directory structure is:
|
|
1226
|
+
`<build_dir>/<design_name>/<job_name>/<step>/<index>/<directory>/<design>.<filetype>`
|
|
1227
|
+
|
|
1228
|
+
Args:
|
|
1229
|
+
filetype (str, optional): The file extension (e.g., 'v', 'def', 'gds').
|
|
1230
|
+
Required if `filename` is not provided.
|
|
1231
|
+
step (str, optional): The name of the task step (e.g., 'syn', 'place').
|
|
1232
|
+
Required.
|
|
1233
|
+
index (str, optional): The task index within the step. Defaults to "0".
|
|
1234
|
+
directory (str, optional): The node directory within the step to search
|
|
1235
|
+
(e.g., 'outputs', 'reports'). Defaults to "outputs".
|
|
1236
|
+
filename (str, optional): The exact filename to search for. If provided,
|
|
1237
|
+
`filetype` is ignored for constructing the path.
|
|
1238
|
+
Defaults to None.
|
|
1239
|
+
|
|
1240
|
+
Returns:
|
|
1241
|
+
str: The absolute path to the found file, or None if the file is not found.
|
|
1242
|
+
|
|
1243
|
+
Raises:
|
|
1244
|
+
ValueError: If `step` is not provided, or if `[option,fileset]` is not set
|
|
1245
|
+
when `filename` is not provided.
|
|
1246
|
+
|
|
1247
|
+
Examples:
|
|
1248
|
+
>>> vg_filepath = chip.find_result('vg', 'syn')
|
|
1249
|
+
Returns the absolute path to the gate level verilog.
|
|
1250
|
+
"""
|
|
1251
|
+
|
|
1252
|
+
if filename and step is None:
|
|
1253
|
+
step = filetype
|
|
1254
|
+
|
|
1255
|
+
if step is None:
|
|
1256
|
+
raise ValueError("step is required")
|
|
1257
|
+
|
|
1258
|
+
workdir = self.getworkdir(step, index)
|
|
1259
|
+
|
|
1260
|
+
if not filename:
|
|
1261
|
+
fileset = self.get("option", "fileset")
|
|
1262
|
+
if not fileset:
|
|
1263
|
+
raise ValueError("[option,fileset] is not set")
|
|
1264
|
+
design_name = self.design.get_topmodule(fileset[0])
|
|
1265
|
+
|
|
1266
|
+
checkfiles = [
|
|
1267
|
+
os.path.join(workdir, directory, f'{design_name}.{filetype}'),
|
|
1268
|
+
os.path.join(workdir, directory, f'{design_name}.{filetype}.gz')
|
|
1269
|
+
]
|
|
1270
|
+
else:
|
|
1271
|
+
checkfiles = [
|
|
1272
|
+
os.path.join(workdir, directory, filename)
|
|
1273
|
+
]
|
|
1274
|
+
|
|
1275
|
+
for filename in checkfiles:
|
|
1276
|
+
self.logger.debug(f"Finding node file: {filename}")
|
|
1277
|
+
if os.path.exists(filename):
|
|
1278
|
+
return os.path.abspath(filename)
|
|
1279
|
+
|
|
1280
|
+
return None
|
|
1281
|
+
|
|
1282
|
+
def snapshot(self, path: str = None, display: bool = True) -> None:
|
|
1283
|
+
'''
|
|
1284
|
+
Creates a snapshot image summarizing the job's progress and key information.
|
|
1285
|
+
|
|
1286
|
+
This function generates a PNG image that provides a visual overview
|
|
1287
|
+
of the compilation job. The image can be saved to a specified path
|
|
1288
|
+
and optionally displayed after generation.
|
|
1289
|
+
|
|
1290
|
+
Args:
|
|
1291
|
+
path (str, optional): The file path where the snapshot image should be saved.
|
|
1292
|
+
If not provided, it defaults to
|
|
1293
|
+
`<job_directory>/<design_name>.png`.
|
|
1294
|
+
display (bool, optional): If True, the generated image will be opened for viewing
|
|
1295
|
+
if the system supports it and `option,nodisplay` is False.
|
|
1296
|
+
Defaults to True.
|
|
1297
|
+
|
|
1298
|
+
Examples:
|
|
1299
|
+
>>> chip.snapshot()
|
|
1300
|
+
Creates a snapshot image in the default location.
|
|
1301
|
+
'''
|
|
1302
|
+
from siliconcompiler.report import generate_summary_image, _open_summary_image
|
|
1303
|
+
|
|
1304
|
+
if not path:
|
|
1305
|
+
path = os.path.join(self.getworkdir(), f'{self.design.name}.png')
|
|
1306
|
+
|
|
1307
|
+
if os.path.exists(path):
|
|
1308
|
+
os.remove(path)
|
|
1309
|
+
|
|
1310
|
+
generate_summary_image(self, path, self._snapshot_info())
|
|
1311
|
+
|
|
1312
|
+
if os.path.isfile(path) and not self.get('option', 'nodisplay') and display:
|
|
1313
|
+
_open_summary_image(path)
|
|
1314
|
+
|
|
1315
|
+
def show(self, filename=None, screenshot=False, extension=None) -> str:
|
|
1316
|
+
'''
|
|
1317
|
+
Opens a graphical viewer for a specified file or the last generated layout.
|
|
1318
|
+
|
|
1319
|
+
The `show` function identifies an appropriate viewer tool based on the
|
|
1320
|
+
file's extension and the registered showtools. Display settings and
|
|
1321
|
+
technology-specific viewing configurations are read from the project's
|
|
1322
|
+
in-memory schema. All temporary rendering and display files are stored
|
|
1323
|
+
in a dedicated `_show_<jobname>` directory within the build directory.
|
|
1324
|
+
|
|
1325
|
+
If no `filename` is provided, the method attempts to automatically find
|
|
1326
|
+
the last generated layout file in the build directory based on supported
|
|
1327
|
+
extensions from registered showtools.
|
|
1328
|
+
|
|
1329
|
+
Args:
|
|
1330
|
+
filename (path, optional): The path to the file to display. If None,
|
|
1331
|
+
the system attempts to find the most recent
|
|
1332
|
+
layout file. Defaults to None.
|
|
1333
|
+
screenshot (bool, optional): If True, the operation is treated as a
|
|
1334
|
+
screenshot request, using `ScreenshotTaskSchema`
|
|
1335
|
+
instead of `ShowTaskSchema`. Defaults to False.
|
|
1336
|
+
extension (str, optional): The specific file extension to search for when
|
|
1337
|
+
automatically finding a file (e.g., 'gds', 'lef').
|
|
1338
|
+
Used only if `filename` is None. Defaults to None.
|
|
1339
|
+
|
|
1340
|
+
Returns:
|
|
1341
|
+
str: The path to the generated screenshot file if `screenshot` is True,
|
|
1342
|
+
otherwise None.
|
|
1343
|
+
|
|
1344
|
+
Examples:
|
|
1345
|
+
>>> show('build/oh_add/job0/write.gds/0/outputs/oh_add.gds')
|
|
1346
|
+
Displays a GDS file using a viewer assigned by the showtool.
|
|
1347
|
+
'''
|
|
1348
|
+
|
|
1349
|
+
tool_cls = ScreenshotTaskSchema if screenshot else ShowTaskSchema
|
|
1350
|
+
|
|
1351
|
+
sc_jobname = self.get("option", "jobname")
|
|
1352
|
+
sc_step = None
|
|
1353
|
+
sc_index = None
|
|
1354
|
+
|
|
1355
|
+
has_filename = filename is not None
|
|
1356
|
+
# Finding last layout if no argument specified
|
|
1357
|
+
if filename is None:
|
|
1358
|
+
try:
|
|
1359
|
+
search_obj = self.history(sc_jobname)
|
|
1360
|
+
except KeyError:
|
|
1361
|
+
search_obj = self
|
|
1362
|
+
|
|
1363
|
+
self.logger.info('Searching build directory for layout to show.')
|
|
1364
|
+
|
|
1365
|
+
search_nodes = []
|
|
1366
|
+
flow = search_obj.get("option", "flow")
|
|
1367
|
+
if flow:
|
|
1368
|
+
flow_obj = search_obj.get("flowgraph", flow, field="schema")
|
|
1369
|
+
for nodes in flow_obj.get_execution_order(reverse=True):
|
|
1370
|
+
search_nodes.extend(nodes)
|
|
1371
|
+
|
|
1372
|
+
exts = set()
|
|
1373
|
+
for cls in tool_cls.get_task(None):
|
|
1374
|
+
try:
|
|
1375
|
+
exts.update(cls().get_supported_show_extentions())
|
|
1376
|
+
except NotImplementedError:
|
|
1377
|
+
pass
|
|
1378
|
+
|
|
1379
|
+
for ext in exts:
|
|
1380
|
+
if extension and extension != ext:
|
|
1381
|
+
continue
|
|
1382
|
+
|
|
1383
|
+
for step, index in search_nodes:
|
|
1384
|
+
filename = search_obj.find_result(ext,
|
|
1385
|
+
step=step,
|
|
1386
|
+
index=index)
|
|
1387
|
+
if filename:
|
|
1388
|
+
sc_step = step
|
|
1389
|
+
sc_index = index
|
|
1390
|
+
break
|
|
1391
|
+
if filename:
|
|
1392
|
+
break
|
|
1393
|
+
|
|
1394
|
+
if filename is None:
|
|
1395
|
+
self.logger.error('Unable to automatically find layout in build directory.')
|
|
1396
|
+
self.logger.error('Try passing in a full path to show() instead.')
|
|
1397
|
+
return
|
|
1398
|
+
|
|
1399
|
+
filepath = os.path.abspath(filename)
|
|
1400
|
+
|
|
1401
|
+
if not has_filename:
|
|
1402
|
+
self.logger.info(f'Showing file {filename}')
|
|
1403
|
+
|
|
1404
|
+
# Check that file exists
|
|
1405
|
+
if not os.path.exists(filepath):
|
|
1406
|
+
self.logger.error(f"Invalid filepath {filepath}.")
|
|
1407
|
+
return
|
|
1408
|
+
|
|
1409
|
+
filetype = get_file_ext(filepath)
|
|
1410
|
+
|
|
1411
|
+
task = tool_cls.get_task(filetype)
|
|
1412
|
+
if task is None:
|
|
1413
|
+
self.logger.error(f"Filetype '{filetype}' not available in the registered showtools.")
|
|
1414
|
+
return False
|
|
1415
|
+
|
|
1416
|
+
# Create copy of project to avoid changing user project
|
|
1417
|
+
proj = self.copy()
|
|
1418
|
+
|
|
1419
|
+
nodename = "screenshot" if screenshot else "show"
|
|
1420
|
+
|
|
1421
|
+
class ShowFlow(FlowgraphSchema):
|
|
1422
|
+
"""
|
|
1423
|
+
Small auto created flow to build a single node show/screenshot flow
|
|
1424
|
+
"""
|
|
1425
|
+
def __init__(self, nodename, task):
|
|
1426
|
+
super().__init__()
|
|
1427
|
+
self.set_name("showflow")
|
|
1428
|
+
|
|
1429
|
+
self.node(nodename, task)
|
|
1430
|
+
|
|
1431
|
+
proj.set_flow(ShowFlow(nodename, task))
|
|
1432
|
+
|
|
1433
|
+
# Setup options:
|
|
1434
|
+
for option, value in [
|
|
1435
|
+
("track", False),
|
|
1436
|
+
("hash", False),
|
|
1437
|
+
("nodisplay", False),
|
|
1438
|
+
("continue", True),
|
|
1439
|
+
("quiet", False),
|
|
1440
|
+
("clean", True)]:
|
|
1441
|
+
proj.set("option", option, value)
|
|
1442
|
+
proj.unset("arg", "step")
|
|
1443
|
+
proj.unset("arg", "index")
|
|
1444
|
+
proj.unset("option", "to")
|
|
1445
|
+
proj.unset("option", "prune")
|
|
1446
|
+
proj.unset("option", "from")
|
|
1447
|
+
|
|
1448
|
+
jobname = f"_{nodename}_{sc_jobname}_{sc_step}_{sc_index}_{task.tool()}"
|
|
1449
|
+
proj.set("option", "jobname", jobname)
|
|
1450
|
+
|
|
1451
|
+
# Setup in task variables
|
|
1452
|
+
task: ShowTaskSchema = proj.get_task(filter=task.__class__)
|
|
1453
|
+
task.set_showfilepath(filename)
|
|
1454
|
+
task.set_showfiletype(filetype)
|
|
1455
|
+
task.set_shownode(jobname=sc_jobname, step=sc_step, index=sc_index)
|
|
1456
|
+
|
|
1457
|
+
# run show flow
|
|
1458
|
+
proj.run(raise_exception=True)
|
|
1459
|
+
if screenshot:
|
|
1460
|
+
return proj.find_result('png', step=nodename)
|