siliconcompiler 0.34.0__py3-none-any.whl → 0.34.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- siliconcompiler/__init__.py +14 -2
- siliconcompiler/_metadata.py +1 -1
- siliconcompiler/apps/_common.py +1 -1
- siliconcompiler/apps/sc.py +1 -1
- siliconcompiler/apps/sc_issue.py +1 -1
- siliconcompiler/apps/sc_remote.py +3 -3
- siliconcompiler/apps/sc_show.py +3 -3
- siliconcompiler/apps/utils/replay.py +4 -4
- siliconcompiler/checklist.py +203 -2
- siliconcompiler/constraints/__init__.py +17 -0
- siliconcompiler/constraints/asic_component.py +378 -0
- siliconcompiler/constraints/asic_floorplan.py +449 -0
- siliconcompiler/constraints/asic_pins.py +489 -0
- siliconcompiler/constraints/asic_timing.py +517 -0
- siliconcompiler/core.py +31 -249
- siliconcompiler/data/templates/email/general.j2 +3 -3
- siliconcompiler/data/templates/email/summary.j2 +1 -1
- siliconcompiler/data/templates/issue/README.txt +1 -1
- siliconcompiler/data/templates/report/sc_report.j2 +7 -7
- siliconcompiler/dependencyschema.py +10 -174
- siliconcompiler/design.py +325 -114
- siliconcompiler/flowgraph.py +63 -15
- siliconcompiler/library.py +133 -0
- siliconcompiler/metric.py +94 -72
- siliconcompiler/metrics/__init__.py +7 -0
- siliconcompiler/metrics/asic.py +245 -0
- siliconcompiler/metrics/fpga.py +220 -0
- siliconcompiler/optimizer/vizier.py +2 -2
- siliconcompiler/package/__init__.py +138 -35
- siliconcompiler/package/github.py +6 -10
- siliconcompiler/packageschema.py +256 -12
- siliconcompiler/pathschema.py +226 -0
- siliconcompiler/pdk.py +5 -5
- siliconcompiler/project.py +459 -0
- siliconcompiler/remote/client.py +18 -12
- siliconcompiler/remote/server.py +2 -2
- siliconcompiler/report/dashboard/cli/__init__.py +6 -6
- siliconcompiler/report/dashboard/cli/board.py +3 -3
- siliconcompiler/report/dashboard/web/components/__init__.py +5 -5
- siliconcompiler/report/dashboard/web/components/flowgraph.py +4 -4
- siliconcompiler/report/dashboard/web/components/graph.py +2 -2
- siliconcompiler/report/dashboard/web/state.py +1 -1
- siliconcompiler/report/dashboard/web/utils/__init__.py +5 -5
- siliconcompiler/report/html_report.py +1 -1
- siliconcompiler/report/report.py +4 -4
- siliconcompiler/report/summary_table.py +2 -2
- siliconcompiler/report/utils.py +5 -5
- siliconcompiler/scheduler/docker.py +4 -10
- siliconcompiler/scheduler/run_node.py +4 -8
- siliconcompiler/scheduler/scheduler.py +18 -24
- siliconcompiler/scheduler/schedulernode.py +161 -143
- siliconcompiler/scheduler/send_messages.py +3 -3
- siliconcompiler/scheduler/slurm.py +5 -3
- siliconcompiler/scheduler/taskscheduler.py +10 -8
- siliconcompiler/schema/__init__.py +0 -2
- siliconcompiler/schema/baseschema.py +148 -26
- siliconcompiler/schema/editableschema.py +14 -6
- siliconcompiler/schema/journal.py +23 -15
- siliconcompiler/schema/namedschema.py +30 -4
- siliconcompiler/schema/parameter.py +34 -19
- siliconcompiler/schema/parametertype.py +2 -0
- siliconcompiler/schema/parametervalue.py +198 -15
- siliconcompiler/schema/schema_cfg.py +18 -14
- siliconcompiler/schema_obj.py +5 -3
- siliconcompiler/tool.py +591 -179
- siliconcompiler/tools/__init__.py +2 -0
- siliconcompiler/tools/builtin/_common.py +5 -5
- siliconcompiler/tools/builtin/concatenate.py +5 -5
- siliconcompiler/tools/builtin/minimum.py +4 -4
- siliconcompiler/tools/builtin/mux.py +4 -4
- siliconcompiler/tools/builtin/nop.py +4 -4
- siliconcompiler/tools/builtin/verify.py +7 -7
- siliconcompiler/tools/execute/exec_input.py +1 -1
- siliconcompiler/tools/genfasm/genfasm.py +1 -6
- siliconcompiler/tools/openroad/_apr.py +5 -1
- siliconcompiler/tools/openroad/antenna_repair.py +1 -1
- siliconcompiler/tools/openroad/macro_placement.py +1 -1
- siliconcompiler/tools/openroad/power_grid.py +1 -1
- siliconcompiler/tools/openroad/scripts/common/procs.tcl +5 -0
- siliconcompiler/tools/opensta/timing.py +26 -3
- siliconcompiler/tools/slang/__init__.py +2 -2
- siliconcompiler/tools/surfer/__init__.py +0 -0
- siliconcompiler/tools/surfer/show.py +53 -0
- siliconcompiler/tools/surfer/surfer.py +30 -0
- siliconcompiler/tools/vpr/route.py +27 -14
- siliconcompiler/tools/vpr/vpr.py +23 -6
- siliconcompiler/tools/yosys/__init__.py +1 -1
- siliconcompiler/tools/yosys/scripts/procs.tcl +143 -0
- siliconcompiler/tools/yosys/{sc_synth_asic.tcl → scripts/sc_synth_asic.tcl} +4 -0
- siliconcompiler/tools/yosys/{sc_synth_fpga.tcl → scripts/sc_synth_fpga.tcl} +24 -77
- siliconcompiler/tools/yosys/syn_fpga.py +14 -0
- siliconcompiler/toolscripts/_tools.json +9 -13
- siliconcompiler/toolscripts/rhel9/install-vpr.sh +0 -2
- siliconcompiler/toolscripts/ubuntu22/install-surfer.sh +33 -0
- siliconcompiler/toolscripts/ubuntu24/install-surfer.sh +33 -0
- siliconcompiler/utils/__init__.py +2 -1
- siliconcompiler/utils/flowgraph.py +24 -23
- siliconcompiler/utils/issue.py +23 -29
- siliconcompiler/utils/logging.py +35 -6
- siliconcompiler/utils/showtools.py +6 -1
- {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.2.dist-info}/METADATA +15 -25
- {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.2.dist-info}/RECORD +109 -97
- siliconcompiler/schema/packageschema.py +0 -101
- siliconcompiler/tools/yosys/procs.tcl +0 -71
- siliconcompiler/toolscripts/rhel9/install-yosys-parmys.sh +0 -68
- siliconcompiler/toolscripts/ubuntu22/install-yosys-parmys.sh +0 -68
- siliconcompiler/toolscripts/ubuntu24/install-yosys-parmys.sh +0 -68
- /siliconcompiler/tools/yosys/{sc_lec.tcl → scripts/sc_lec.tcl} +0 -0
- /siliconcompiler/tools/yosys/{sc_screenshot.tcl → scripts/sc_screenshot.tcl} +0 -0
- /siliconcompiler/tools/yosys/{syn_strategies.tcl → scripts/syn_strategies.tcl} +0 -0
- {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.2.dist-info}/WHEEL +0 -0
- {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.2.dist-info}/entry_points.txt +0 -0
- {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.2.dist-info}/licenses/LICENSE +0 -0
- {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.2.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,226 @@
|
|
|
1
|
+
import contextlib
|
|
2
|
+
import logging
|
|
3
|
+
|
|
4
|
+
import os.path
|
|
5
|
+
|
|
6
|
+
from siliconcompiler.schema.baseschema import BaseSchema
|
|
7
|
+
from siliconcompiler.schema.editableschema import EditableSchema
|
|
8
|
+
from siliconcompiler.schema.parameter import Parameter, Scope
|
|
9
|
+
from siliconcompiler.schema.utils import trim
|
|
10
|
+
|
|
11
|
+
from siliconcompiler.package import Resolver
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class PathSchemaBase(BaseSchema):
|
|
15
|
+
'''
|
|
16
|
+
Schema extension to add simpler find_files and check_filepaths
|
|
17
|
+
'''
|
|
18
|
+
|
|
19
|
+
def find_files(self, *keypath,
|
|
20
|
+
missing_ok=False,
|
|
21
|
+
step=None, index=None):
|
|
22
|
+
"""
|
|
23
|
+
Returns absolute paths to files or directories based on the keypath
|
|
24
|
+
provided.
|
|
25
|
+
|
|
26
|
+
The keypath provided must point to a schema parameter of type file, dir,
|
|
27
|
+
or lists of either. Otherwise, it will trigger an error.
|
|
28
|
+
|
|
29
|
+
Args:
|
|
30
|
+
keypath (list of str): Variable length schema key list.
|
|
31
|
+
missing_ok (bool): If True, silently return None when files aren't
|
|
32
|
+
found. If False, print an error and set the error flag.
|
|
33
|
+
step (str): Step name to access for parameters that may be specified
|
|
34
|
+
on a per-node basis.
|
|
35
|
+
index (str): Index name to access for parameters that may be specified
|
|
36
|
+
on a per-node basis.
|
|
37
|
+
|
|
38
|
+
Returns:
|
|
39
|
+
If keys points to a scalar entry, returns an absolute path to that
|
|
40
|
+
file/directory, or None if not found. It keys points to a list
|
|
41
|
+
entry, returns a list of either the absolute paths or None for each
|
|
42
|
+
entry, depending on whether it is found.
|
|
43
|
+
|
|
44
|
+
Examples:
|
|
45
|
+
>>> schema.find_files('input', 'verilog')
|
|
46
|
+
Returns a list of absolute paths to source files, as specified in
|
|
47
|
+
the schema.
|
|
48
|
+
"""
|
|
49
|
+
schema_root = self._parent(root=True)
|
|
50
|
+
cwd = getattr(schema_root, "cwd", os.getcwd())
|
|
51
|
+
collection_dir = getattr(schema_root, "collection_dir", None)
|
|
52
|
+
if collection_dir:
|
|
53
|
+
collection_dir = collection_dir()
|
|
54
|
+
|
|
55
|
+
return super().find_files(*keypath,
|
|
56
|
+
missing_ok=missing_ok,
|
|
57
|
+
step=step, index=index,
|
|
58
|
+
collection_dir=collection_dir,
|
|
59
|
+
cwd=cwd)
|
|
60
|
+
|
|
61
|
+
def check_filepaths(self, ignore_keys=None):
|
|
62
|
+
'''
|
|
63
|
+
Verifies that paths to all files in manifest are valid.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
ignore_keys (list of keypaths): list of keypaths to ignore while checking
|
|
67
|
+
|
|
68
|
+
Returns:
|
|
69
|
+
True if all file paths are valid, otherwise False.
|
|
70
|
+
'''
|
|
71
|
+
schema_root = self._parent(root=True)
|
|
72
|
+
cwd = getattr(schema_root, "cwd", os.getcwd())
|
|
73
|
+
logger = getattr(schema_root,
|
|
74
|
+
"logger",
|
|
75
|
+
logging.getLogger("siliconcompiler.check_filepaths"))
|
|
76
|
+
collection_dir = getattr(schema_root, "collection_dir", None)
|
|
77
|
+
if collection_dir:
|
|
78
|
+
collection_dir = collection_dir()
|
|
79
|
+
|
|
80
|
+
return super().check_filepaths(
|
|
81
|
+
ignore_keys=ignore_keys,
|
|
82
|
+
logger=logger,
|
|
83
|
+
collection_dir=collection_dir,
|
|
84
|
+
cwd=cwd)
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
class PathSchema(PathSchemaBase):
|
|
88
|
+
'''
|
|
89
|
+
Schema extension to add support for path handling with dataroots
|
|
90
|
+
'''
|
|
91
|
+
|
|
92
|
+
def __init__(self):
|
|
93
|
+
super().__init__()
|
|
94
|
+
|
|
95
|
+
schema = EditableSchema(self)
|
|
96
|
+
|
|
97
|
+
schema.insert(
|
|
98
|
+
'dataroot', 'default', 'path',
|
|
99
|
+
Parameter(
|
|
100
|
+
'str',
|
|
101
|
+
scope=Scope.GLOBAL,
|
|
102
|
+
shorthelp="Data directory path",
|
|
103
|
+
example=[
|
|
104
|
+
"api: chip.set('dataroot', "
|
|
105
|
+
"'freepdk45_data', 'path', 'ssh://git@github.com/siliconcompiler/freepdk45/')"],
|
|
106
|
+
help=trim("""
|
|
107
|
+
Data directory path, this points the location where the data can be
|
|
108
|
+
retrieved or accessed.
|
|
109
|
+
Allowed roots:
|
|
110
|
+
|
|
111
|
+
* /path/on/network/drive
|
|
112
|
+
* file:///path/on/network/drive
|
|
113
|
+
* git+https://github.com/xyz/xyz
|
|
114
|
+
* git://github.com/xyz/xyz
|
|
115
|
+
* git+ssh://github.com/xyz/xyz
|
|
116
|
+
* ssh://github.com/xyz/xyz
|
|
117
|
+
* https://github.com/xyz/xyz/archive
|
|
118
|
+
* https://zeroasic.com/xyz.tar.gz
|
|
119
|
+
* github://siliconcompiler/lambdapdk/v1.0/asap7.tar.gz
|
|
120
|
+
* python://siliconcompiler
|
|
121
|
+
""")))
|
|
122
|
+
|
|
123
|
+
schema.insert(
|
|
124
|
+
'dataroot', 'default', 'tag',
|
|
125
|
+
Parameter(
|
|
126
|
+
'str',
|
|
127
|
+
scope=Scope.GLOBAL,
|
|
128
|
+
shorthelp="Data directory reference tag/version",
|
|
129
|
+
example=[
|
|
130
|
+
"api: chip.set('dataroot', 'freepdk45_data', 'tag', '07ec4aa')"],
|
|
131
|
+
help=trim("""
|
|
132
|
+
Data directory reference tag. The meaning of the this tag depends on the
|
|
133
|
+
context of the path.
|
|
134
|
+
For git, this can be a tag, branch, or commit id. For https this is the version
|
|
135
|
+
of the file that will be downloaded.
|
|
136
|
+
""")))
|
|
137
|
+
|
|
138
|
+
def set_dataroot(self, name: str, path: str, tag: str = None):
|
|
139
|
+
"""
|
|
140
|
+
Registers a data directory by its name with the root and associated tag. If the path
|
|
141
|
+
provided is a file, the path recorded will be the directory the file is located in.
|
|
142
|
+
|
|
143
|
+
Args:
|
|
144
|
+
name (str): Data directory name
|
|
145
|
+
path (str): Path to the root of the data directory, can be directory, git url,
|
|
146
|
+
archive url, or path to a file
|
|
147
|
+
tag (str): Reference of the sources, can be commitid, branch name, tag
|
|
148
|
+
|
|
149
|
+
Examples:
|
|
150
|
+
>>> schema.set_dataroot('siliconcompiler_data',
|
|
151
|
+
'git+https://github.com/siliconcompiler/siliconcompiler',
|
|
152
|
+
'v1.0.0')
|
|
153
|
+
Records the data directory for siliconcompiler_data as a git clone for tag v1.0.0
|
|
154
|
+
>>> schema.set_dataroot('file_data', __file__)
|
|
155
|
+
Records the data directory for file_data as the directory that __file__ is found in.
|
|
156
|
+
"""
|
|
157
|
+
|
|
158
|
+
if os.path.isfile(path):
|
|
159
|
+
path = os.path.dirname(os.path.abspath(path))
|
|
160
|
+
|
|
161
|
+
BaseSchema.set(self, "dataroot", name, "path", path)
|
|
162
|
+
if tag:
|
|
163
|
+
BaseSchema.set(self, "dataroot", name, "tag", tag)
|
|
164
|
+
|
|
165
|
+
def get_dataroot(self, name: str) -> str:
|
|
166
|
+
"""
|
|
167
|
+
Returns absolute path to the data directory.
|
|
168
|
+
|
|
169
|
+
Raises:
|
|
170
|
+
ValueError: is data directory is not found
|
|
171
|
+
|
|
172
|
+
Args:
|
|
173
|
+
name (str): name of the data directory to find.
|
|
174
|
+
|
|
175
|
+
Returns:
|
|
176
|
+
Path to the directory root.
|
|
177
|
+
|
|
178
|
+
Examples:
|
|
179
|
+
>>> schema.get_dataroot('siliconcompiler')
|
|
180
|
+
Returns the path to the root of the siliconcompiler data directory.
|
|
181
|
+
"""
|
|
182
|
+
|
|
183
|
+
if not BaseSchema.valid(self, "dataroot", name):
|
|
184
|
+
raise ValueError(f"{name} is not a recognized source")
|
|
185
|
+
|
|
186
|
+
path = BaseSchema.get(self, "dataroot", name, "path")
|
|
187
|
+
tag = BaseSchema.get(self, "dataroot", name, "tag")
|
|
188
|
+
|
|
189
|
+
resolver = Resolver.find_resolver(path)
|
|
190
|
+
return resolver(name, self._parent(root=True), path, tag).get_path()
|
|
191
|
+
|
|
192
|
+
def _find_files_dataroot_resolvers(self):
|
|
193
|
+
"""
|
|
194
|
+
Returns a dictionary of path resolevrs data directory handling for find_files
|
|
195
|
+
|
|
196
|
+
Returns:
|
|
197
|
+
dictionary of str to resolver mapping
|
|
198
|
+
"""
|
|
199
|
+
schema_root = self._parent(root=True)
|
|
200
|
+
resolver_map = {}
|
|
201
|
+
for dataroot in self.getkeys("dataroot"):
|
|
202
|
+
path = BaseSchema.get(self, "dataroot", dataroot, "path")
|
|
203
|
+
tag = BaseSchema.get(self, "dataroot", dataroot, "tag")
|
|
204
|
+
resolver = Resolver.find_resolver(path)
|
|
205
|
+
resolver_map[dataroot] = resolver(dataroot, schema_root, path, tag).get_path
|
|
206
|
+
return resolver_map
|
|
207
|
+
|
|
208
|
+
@contextlib.contextmanager
|
|
209
|
+
def active_dataroot(self, dataroot: str = None):
|
|
210
|
+
'''
|
|
211
|
+
Use this context to set the dataroot parameter on files and directory parameters.
|
|
212
|
+
|
|
213
|
+
Args:
|
|
214
|
+
dataroot (str): name of the dataroot
|
|
215
|
+
|
|
216
|
+
Example:
|
|
217
|
+
>>> with schema.active_dataroot("lambdalib"):
|
|
218
|
+
... schema.set("file", "top.v")
|
|
219
|
+
Sets the file to top.v and associates lambdalib as the dataroot.
|
|
220
|
+
'''
|
|
221
|
+
|
|
222
|
+
if dataroot and dataroot not in self.getkeys("dataroot"):
|
|
223
|
+
raise ValueError(f"{dataroot} is not a recognized dataroot")
|
|
224
|
+
|
|
225
|
+
with self._active(package=dataroot):
|
|
226
|
+
yield
|
siliconcompiler/pdk.py
CHANGED
|
@@ -1,12 +1,12 @@
|
|
|
1
|
-
from siliconcompiler.schema import NamedSchema
|
|
1
|
+
from siliconcompiler.schema import NamedSchema
|
|
2
2
|
from siliconcompiler.schema import EditableSchema, Parameter, Scope
|
|
3
3
|
from siliconcompiler.schema.utils import trim
|
|
4
4
|
|
|
5
5
|
|
|
6
|
-
class PDKSchema(NamedSchema
|
|
7
|
-
def __init__(self, name
|
|
8
|
-
|
|
9
|
-
|
|
6
|
+
class PDKSchema(NamedSchema):
|
|
7
|
+
def __init__(self, name=None):
|
|
8
|
+
super().__init__()
|
|
9
|
+
self.set_name(name)
|
|
10
10
|
|
|
11
11
|
schema_pdk(self)
|
|
12
12
|
|
|
@@ -0,0 +1,459 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import os
|
|
3
|
+
import sys
|
|
4
|
+
import uuid
|
|
5
|
+
|
|
6
|
+
import os.path
|
|
7
|
+
|
|
8
|
+
from typing import Union, List, Tuple
|
|
9
|
+
|
|
10
|
+
from siliconcompiler.schema import BaseSchema, NamedSchema, EditableSchema, Parameter
|
|
11
|
+
|
|
12
|
+
from siliconcompiler import DesignSchema
|
|
13
|
+
from siliconcompiler import FlowgraphSchema
|
|
14
|
+
from siliconcompiler import RecordSchema
|
|
15
|
+
from siliconcompiler import MetricSchema
|
|
16
|
+
from siliconcompiler import ChecklistSchema
|
|
17
|
+
from siliconcompiler import ToolSchema, TaskSchema
|
|
18
|
+
|
|
19
|
+
from siliconcompiler.pathschema import PathSchemaBase
|
|
20
|
+
|
|
21
|
+
from siliconcompiler.schema.schema_cfg import schema_option_runtime, schema_arg, schema_version
|
|
22
|
+
|
|
23
|
+
from siliconcompiler.scheduler.scheduler import Scheduler
|
|
24
|
+
from siliconcompiler.utils.logging import SCColorLoggerFormatter, SCLoggerFormatter
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class Project(PathSchemaBase, BaseSchema):
|
|
28
|
+
"""
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
def __init__(self, design: Union[DesignSchema, str] = None):
|
|
32
|
+
super().__init__()
|
|
33
|
+
|
|
34
|
+
# Initialize schema
|
|
35
|
+
schema = EditableSchema(self)
|
|
36
|
+
schema_version(schema)
|
|
37
|
+
schema_arg(schema)
|
|
38
|
+
|
|
39
|
+
schema.insert("checklist", "default", ChecklistSchema())
|
|
40
|
+
schema.insert("library", "default", DesignSchema())
|
|
41
|
+
schema.insert("flowgraph", "default", FlowgraphSchema())
|
|
42
|
+
schema.insert("metric", MetricSchema())
|
|
43
|
+
schema.insert("record", RecordSchema())
|
|
44
|
+
schema.insert("tool", "default", ToolSchema())
|
|
45
|
+
|
|
46
|
+
# Add options
|
|
47
|
+
schema_option_runtime(schema)
|
|
48
|
+
schema.insert("option", "env", "default", Parameter("str"))
|
|
49
|
+
|
|
50
|
+
schema.insert("option", "alias", Parameter("[(str,str,str,str)]"))
|
|
51
|
+
schema.insert("option", "fileset", Parameter("[str]"))
|
|
52
|
+
schema.insert("option", "design", Parameter("str"))
|
|
53
|
+
|
|
54
|
+
# Add history
|
|
55
|
+
schema.insert("history", BaseSchema())
|
|
56
|
+
|
|
57
|
+
# Init logger
|
|
58
|
+
self.__init_logger()
|
|
59
|
+
|
|
60
|
+
# Init fields
|
|
61
|
+
self.__cwd = os.getcwd()
|
|
62
|
+
|
|
63
|
+
if design:
|
|
64
|
+
if isinstance(design, str):
|
|
65
|
+
self.set("option", "design", design)
|
|
66
|
+
else:
|
|
67
|
+
self.set_design(design)
|
|
68
|
+
|
|
69
|
+
def __init_logger(self):
|
|
70
|
+
sc_logger = logging.getLogger("siliconcompiler")
|
|
71
|
+
sc_logger.propagate = False
|
|
72
|
+
self.__logger = sc_logger.getChild(f"project_{uuid.uuid4().hex}")
|
|
73
|
+
self.__logger.propagate = False
|
|
74
|
+
self.__logger.setLevel(logging.INFO)
|
|
75
|
+
|
|
76
|
+
self._logger_console = logging.StreamHandler(stream=sys.stdout)
|
|
77
|
+
if SCColorLoggerFormatter.supports_color(sys.stdout):
|
|
78
|
+
self._logger_console.setFormatter(SCColorLoggerFormatter(SCLoggerFormatter()))
|
|
79
|
+
else:
|
|
80
|
+
self._logger_console.setFormatter(SCLoggerFormatter())
|
|
81
|
+
|
|
82
|
+
self.__logger.addHandler(self._logger_console)
|
|
83
|
+
|
|
84
|
+
@property
|
|
85
|
+
def logger(self) -> logging.Logger:
|
|
86
|
+
"""
|
|
87
|
+
Returns the logger for this project
|
|
88
|
+
"""
|
|
89
|
+
return self.__logger
|
|
90
|
+
|
|
91
|
+
@property
|
|
92
|
+
def name(self) -> str:
|
|
93
|
+
"""
|
|
94
|
+
Returns the name of the design
|
|
95
|
+
"""
|
|
96
|
+
return self.get("option", "design")
|
|
97
|
+
|
|
98
|
+
@property
|
|
99
|
+
def design(self) -> DesignSchema:
|
|
100
|
+
"""
|
|
101
|
+
Returns the design object
|
|
102
|
+
"""
|
|
103
|
+
design_name = self.name
|
|
104
|
+
if not design_name:
|
|
105
|
+
raise ValueError("design name is not set")
|
|
106
|
+
if not self.valid("library", design_name):
|
|
107
|
+
raise KeyError(f"{design_name} design has not been loaded")
|
|
108
|
+
|
|
109
|
+
return self.get("library", design_name, field="schema")
|
|
110
|
+
|
|
111
|
+
@property
|
|
112
|
+
def cwd(self) -> str:
|
|
113
|
+
"""
|
|
114
|
+
Returns the working directory for the project
|
|
115
|
+
"""
|
|
116
|
+
return self.__cwd
|
|
117
|
+
|
|
118
|
+
def add_dep(self, obj):
|
|
119
|
+
if isinstance(obj, DesignSchema):
|
|
120
|
+
self.__import_design(obj)
|
|
121
|
+
elif isinstance(obj, FlowgraphSchema):
|
|
122
|
+
self.__import_flow(obj)
|
|
123
|
+
else:
|
|
124
|
+
raise NotImplementedError
|
|
125
|
+
|
|
126
|
+
def __import_design(self, design: DesignSchema):
|
|
127
|
+
edit_schema = EditableSchema(self)
|
|
128
|
+
edit_schema.insert("library", design.name(), design, clobber=True)
|
|
129
|
+
|
|
130
|
+
# Copy dependencies into project
|
|
131
|
+
for dep in design.get_dep():
|
|
132
|
+
self.add_dep(dep)
|
|
133
|
+
|
|
134
|
+
def __import_flow(self, flow: FlowgraphSchema):
|
|
135
|
+
edit_schema = EditableSchema(self)
|
|
136
|
+
edit_schema.insert("flowgraph", flow.name(), flow, clobber=True)
|
|
137
|
+
|
|
138
|
+
# Instantiate tasks
|
|
139
|
+
for task_cls in flow.get_all_tasks():
|
|
140
|
+
task = task_cls()
|
|
141
|
+
# TODO: this is not needed once tool moves
|
|
142
|
+
edit_schema.insert("tool", task.tool(), ToolSchema(), clobber=True)
|
|
143
|
+
edit_schema.insert("tool", task.tool(), "task", task.task(), task, clobber=True)
|
|
144
|
+
|
|
145
|
+
def check_manifest(self):
|
|
146
|
+
# Assert design is set
|
|
147
|
+
# Assert fileset is set
|
|
148
|
+
# Assert flow is set
|
|
149
|
+
|
|
150
|
+
# Assert design is a library
|
|
151
|
+
# Assert fileset is in design
|
|
152
|
+
# Assert design has topmodule
|
|
153
|
+
|
|
154
|
+
# Check that alias libraries exist
|
|
155
|
+
|
|
156
|
+
# Check flowgraph
|
|
157
|
+
# Check tasks have classes, cannot check post setup that is a runtime check
|
|
158
|
+
|
|
159
|
+
return True
|
|
160
|
+
|
|
161
|
+
def run(self, raise_exception=False):
|
|
162
|
+
'''
|
|
163
|
+
Executes tasks in a flowgraph.
|
|
164
|
+
|
|
165
|
+
The run function sets up tools and launches runs for every node
|
|
166
|
+
in the flowgraph starting with 'from' steps and ending at 'to' steps.
|
|
167
|
+
From/to are taken from the schema from/to parameters if defined,
|
|
168
|
+
otherwise from/to are defined as the entry/exit steps of the flowgraph.
|
|
169
|
+
Before starting the process, tool modules are loaded and setup up for each
|
|
170
|
+
step and index based on on the schema eda dictionary settings.
|
|
171
|
+
Once the tools have been set up, the manifest is checked using the
|
|
172
|
+
check_manifest() function and files in the manifest are hashed based
|
|
173
|
+
on the 'hashmode' schema setting.
|
|
174
|
+
|
|
175
|
+
Once launched, each process waits for preceding steps to complete,
|
|
176
|
+
as defined by the flowgraph 'inputs' parameter. Once a all inputs
|
|
177
|
+
are ready, previous steps are checked for errors before the
|
|
178
|
+
process entered a local working directory and starts to run
|
|
179
|
+
a tool or to execute a built in Chip function.
|
|
180
|
+
|
|
181
|
+
Fatal errors within a step/index process cause all subsequent
|
|
182
|
+
processes to exit before start, returning control to the the main
|
|
183
|
+
program which can then exit.
|
|
184
|
+
|
|
185
|
+
Args:
|
|
186
|
+
raise_exception (bool): if True, will rethrow errors that the flow raises,
|
|
187
|
+
otherwise will report the error and return False
|
|
188
|
+
|
|
189
|
+
Examples:
|
|
190
|
+
>>> run()
|
|
191
|
+
Runs the execution flow defined by the flowgraph dictionary.
|
|
192
|
+
'''
|
|
193
|
+
from siliconcompiler.remote.client import ClientScheduler
|
|
194
|
+
|
|
195
|
+
try:
|
|
196
|
+
if self.get('option', 'remote'):
|
|
197
|
+
scheduler = ClientScheduler(self)
|
|
198
|
+
else:
|
|
199
|
+
scheduler = Scheduler(self)
|
|
200
|
+
scheduler.run()
|
|
201
|
+
except Exception as e:
|
|
202
|
+
if raise_exception:
|
|
203
|
+
raise e
|
|
204
|
+
self.logger.error(str(e))
|
|
205
|
+
return False
|
|
206
|
+
finally:
|
|
207
|
+
pass
|
|
208
|
+
# Update dashboard if running
|
|
209
|
+
# if self._dash:
|
|
210
|
+
# self._dash.update_manifest()
|
|
211
|
+
# self._dash.end_of_run()
|
|
212
|
+
|
|
213
|
+
return True
|
|
214
|
+
|
|
215
|
+
def __getbuilddir(self) -> str:
|
|
216
|
+
"""
|
|
217
|
+
Returns the path to the build directory
|
|
218
|
+
"""
|
|
219
|
+
builddir = self.get('option', 'builddir')
|
|
220
|
+
if os.path.isabs(builddir):
|
|
221
|
+
return builddir
|
|
222
|
+
|
|
223
|
+
return os.path.join(self.cwd, builddir)
|
|
224
|
+
|
|
225
|
+
def getworkdir(self, step: str = None, index: Union[int, str] = None) -> str:
|
|
226
|
+
"""
|
|
227
|
+
Returns absolute path to the work directory for a given step/index,
|
|
228
|
+
if step/index not given, job directory is returned.
|
|
229
|
+
|
|
230
|
+
Args:
|
|
231
|
+
step (str): Node step name
|
|
232
|
+
index (str/int): Node index
|
|
233
|
+
"""
|
|
234
|
+
if not self.name:
|
|
235
|
+
raise ValueError("name has not been set")
|
|
236
|
+
|
|
237
|
+
dirlist = [self.__getbuilddir(),
|
|
238
|
+
self.name,
|
|
239
|
+
self.get('option', 'jobname')]
|
|
240
|
+
|
|
241
|
+
# Return jobdirectory if no step defined
|
|
242
|
+
# Return index 0 by default
|
|
243
|
+
if step is not None:
|
|
244
|
+
dirlist.append(step)
|
|
245
|
+
|
|
246
|
+
if index is None:
|
|
247
|
+
index = '0'
|
|
248
|
+
|
|
249
|
+
dirlist.append(str(index))
|
|
250
|
+
return os.path.join(*dirlist)
|
|
251
|
+
|
|
252
|
+
def getcollectiondir(self):
|
|
253
|
+
"""
|
|
254
|
+
Returns absolute path to collected files directory
|
|
255
|
+
"""
|
|
256
|
+
return os.path.join(self.getworkdir(), "sc_collected_files")
|
|
257
|
+
|
|
258
|
+
def collect(self, **kwargs):
|
|
259
|
+
pass
|
|
260
|
+
|
|
261
|
+
def history(self, job: str) -> "Project":
|
|
262
|
+
'''
|
|
263
|
+
Returns a *mutable* reference to ['history', job] as a Project object.
|
|
264
|
+
|
|
265
|
+
Raises:
|
|
266
|
+
KeyError: if job does not currently exist in history
|
|
267
|
+
|
|
268
|
+
Args:
|
|
269
|
+
job (str): Name of historical job to return.
|
|
270
|
+
'''
|
|
271
|
+
|
|
272
|
+
if job not in self.getkeys("history"):
|
|
273
|
+
raise KeyError(f"{job} is not a valid job")
|
|
274
|
+
|
|
275
|
+
return self.get("history", job, field="schema")
|
|
276
|
+
|
|
277
|
+
def _record_history(self):
|
|
278
|
+
'''
|
|
279
|
+
Copies the current project into the history
|
|
280
|
+
'''
|
|
281
|
+
|
|
282
|
+
job = self.get("option", "jobname")
|
|
283
|
+
proj = self.copy()
|
|
284
|
+
|
|
285
|
+
# Remove history from proj
|
|
286
|
+
EditableSchema(proj).insert("history", BaseSchema(), clobber=True)
|
|
287
|
+
|
|
288
|
+
if job in self.getkeys("history"):
|
|
289
|
+
self.logger.warning(f"Overwriting job {job}")
|
|
290
|
+
|
|
291
|
+
EditableSchema(self).insert("history", job, proj, clobber=True)
|
|
292
|
+
|
|
293
|
+
def __getstate__(self):
|
|
294
|
+
# Ensure a copy of the state is used
|
|
295
|
+
state = self.__dict__.copy()
|
|
296
|
+
|
|
297
|
+
# Remove logger objects since they are not serializable
|
|
298
|
+
del state["_Project__logger"]
|
|
299
|
+
del state["_logger_console"]
|
|
300
|
+
|
|
301
|
+
return state
|
|
302
|
+
|
|
303
|
+
def __setstate__(self, state):
|
|
304
|
+
self.__dict__ = state
|
|
305
|
+
|
|
306
|
+
# Reinitialize logger on restore
|
|
307
|
+
self.__init_logger()
|
|
308
|
+
|
|
309
|
+
def get_filesets(self) -> List[Tuple[NamedSchema, str]]:
|
|
310
|
+
"""
|
|
311
|
+
Returns the filesets selected for this project
|
|
312
|
+
"""
|
|
313
|
+
# Build alias mapping
|
|
314
|
+
alias = {}
|
|
315
|
+
for src_lib, src_fileset, dst_lib, dst_fileset in self.get("option", "alias"):
|
|
316
|
+
if dst_lib:
|
|
317
|
+
if not self.valid("library", dst_lib):
|
|
318
|
+
raise KeyError(f"{dst_lib} is not a loaded library")
|
|
319
|
+
dst_obj = self.get("library", dst_lib, field="schema")
|
|
320
|
+
else:
|
|
321
|
+
dst_obj = None
|
|
322
|
+
if not dst_fileset:
|
|
323
|
+
dst_fileset = None
|
|
324
|
+
alias[(src_lib, src_fileset)] = (dst_obj, dst_fileset)
|
|
325
|
+
|
|
326
|
+
return self.design.get_fileset(self.get("option", "fileset"), alias=alias)
|
|
327
|
+
|
|
328
|
+
def get_task(self,
|
|
329
|
+
tool: str,
|
|
330
|
+
task: str,
|
|
331
|
+
step: str = None,
|
|
332
|
+
index: Union[str, int] = None) -> TaskSchema:
|
|
333
|
+
if self.valid("tool", tool, "task", task):
|
|
334
|
+
obj: TaskSchema = self.get("tool", tool, "task", task, field="schema")
|
|
335
|
+
if step or index:
|
|
336
|
+
with obj.runtime(None, step, index) as obj:
|
|
337
|
+
return obj
|
|
338
|
+
return obj
|
|
339
|
+
raise KeyError(f"{tool}/{task} has not been loaded")
|
|
340
|
+
|
|
341
|
+
def set_design(self, design: Union[DesignSchema, str]):
|
|
342
|
+
"""
|
|
343
|
+
Set the design for this project
|
|
344
|
+
|
|
345
|
+
Args:
|
|
346
|
+
design (:class:`DesignSchema` or str): design object or name
|
|
347
|
+
"""
|
|
348
|
+
if isinstance(design, DesignSchema):
|
|
349
|
+
self.add_dep(design)
|
|
350
|
+
design = design.name()
|
|
351
|
+
elif not isinstance(design, str):
|
|
352
|
+
raise TypeError("design must be string or Design object")
|
|
353
|
+
|
|
354
|
+
return self.set("option", "design", design)
|
|
355
|
+
|
|
356
|
+
def set_flow(self, flow: Union[FlowgraphSchema, str]):
|
|
357
|
+
"""
|
|
358
|
+
Set the flow for this project
|
|
359
|
+
|
|
360
|
+
Args:
|
|
361
|
+
design (:class:`FlowgraphSchema` or str): flow object or name
|
|
362
|
+
"""
|
|
363
|
+
if isinstance(flow, FlowgraphSchema):
|
|
364
|
+
self.add_dep(flow)
|
|
365
|
+
flow = flow.name()
|
|
366
|
+
elif not isinstance(flow, str):
|
|
367
|
+
raise TypeError("flow must be string or Flowgraph object")
|
|
368
|
+
|
|
369
|
+
return self.set("option", "flow", flow)
|
|
370
|
+
|
|
371
|
+
def add_fileset(self, fileset: Union[List[str], str], clobber: bool = False):
|
|
372
|
+
"""
|
|
373
|
+
Add a fileset to use in this project
|
|
374
|
+
|
|
375
|
+
Args:
|
|
376
|
+
fileset (list of str): name of fileset from the design
|
|
377
|
+
clobber (bool): if True, replace the filesets
|
|
378
|
+
"""
|
|
379
|
+
if not isinstance(fileset, str):
|
|
380
|
+
if isinstance(fileset, (list, tuple, set)):
|
|
381
|
+
if not all([isinstance(v, str) for v in fileset]):
|
|
382
|
+
raise TypeError("fileset must be a string")
|
|
383
|
+
else:
|
|
384
|
+
raise TypeError("fileset must be a string")
|
|
385
|
+
|
|
386
|
+
if isinstance(fileset, str):
|
|
387
|
+
fileset = [fileset]
|
|
388
|
+
|
|
389
|
+
for fs in fileset:
|
|
390
|
+
if fs not in self.design.getkeys("fileset"):
|
|
391
|
+
raise ValueError(f"{fs} is not a valid fileset in {self.design.name()}")
|
|
392
|
+
|
|
393
|
+
if clobber:
|
|
394
|
+
return self.set("option", "fileset", fileset)
|
|
395
|
+
else:
|
|
396
|
+
return self.add("option", "fileset", fileset)
|
|
397
|
+
|
|
398
|
+
def add_alias(self,
|
|
399
|
+
src_dep: Union[DesignSchema, str],
|
|
400
|
+
src_fileset: str,
|
|
401
|
+
alias_dep: Union[DesignSchema, str],
|
|
402
|
+
alias_fileset: str,
|
|
403
|
+
clobber: bool = False):
|
|
404
|
+
"""
|
|
405
|
+
Add an aliased fileset.
|
|
406
|
+
|
|
407
|
+
Args:
|
|
408
|
+
src_dep (:class:`DesignSchema` or str): source design to alias
|
|
409
|
+
src_fileset (str): source fileset to alias
|
|
410
|
+
alias_dep (:class:`DesignSchema` or str): replacement design
|
|
411
|
+
alias_fileset (str): replacement fileset
|
|
412
|
+
clobber (bool): overwrite existing values
|
|
413
|
+
"""
|
|
414
|
+
|
|
415
|
+
if isinstance(src_dep, str):
|
|
416
|
+
if src_dep not in self.getkeys("library"):
|
|
417
|
+
raise KeyError(f"{src_dep} has not been loaded")
|
|
418
|
+
|
|
419
|
+
src_dep = self.get("library", src_dep, field="schema")
|
|
420
|
+
if isinstance(src_dep, DesignSchema):
|
|
421
|
+
src_dep_name = src_dep.name()
|
|
422
|
+
if src_dep_name not in self.getkeys("library"):
|
|
423
|
+
raise KeyError(f"{src_dep_name} has not been loaded")
|
|
424
|
+
else:
|
|
425
|
+
raise TypeError("source dep is not a valid type")
|
|
426
|
+
|
|
427
|
+
if src_fileset not in src_dep.getkeys("fileset"):
|
|
428
|
+
raise ValueError(f"{src_dep_name} does not have {src_fileset} as a fileset")
|
|
429
|
+
|
|
430
|
+
if alias_dep is None:
|
|
431
|
+
alias_dep = ""
|
|
432
|
+
|
|
433
|
+
if isinstance(alias_dep, str):
|
|
434
|
+
if alias_dep == "":
|
|
435
|
+
alias_dep = None
|
|
436
|
+
alias_dep_name = ""
|
|
437
|
+
alias_fileset = ""
|
|
438
|
+
else:
|
|
439
|
+
if alias_dep not in self.getkeys("library"):
|
|
440
|
+
raise KeyError(f"{alias_dep} has not been loaded")
|
|
441
|
+
|
|
442
|
+
alias_dep = self.get("library", alias_dep, field="schema")
|
|
443
|
+
|
|
444
|
+
if alias_dep is not None:
|
|
445
|
+
if isinstance(alias_dep, DesignSchema):
|
|
446
|
+
alias_dep_name = alias_dep.name()
|
|
447
|
+
if alias_dep_name not in self.getkeys("library"):
|
|
448
|
+
self.add_dep(alias_dep)
|
|
449
|
+
else:
|
|
450
|
+
raise TypeError("alias dep is not a valid type")
|
|
451
|
+
|
|
452
|
+
if alias_fileset != "" and alias_fileset not in alias_dep.getkeys("fileset"):
|
|
453
|
+
raise ValueError(f"{alias_dep_name} does not have {alias_fileset} as a fileset")
|
|
454
|
+
|
|
455
|
+
alias = (src_dep_name, src_fileset, alias_dep_name, alias_fileset)
|
|
456
|
+
if clobber:
|
|
457
|
+
return self.set("option", "alias", alias)
|
|
458
|
+
else:
|
|
459
|
+
return self.add("option", "alias", alias)
|