siliconcompiler 0.36.2__py3-none-any.whl → 0.36.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- siliconcompiler/_metadata.py +1 -1
- siliconcompiler/apps/sc_install.py +23 -2
- siliconcompiler/apps/sc_issue.py +4 -0
- siliconcompiler/flows/dvflow.py +13 -0
- siliconcompiler/package/__init__.py +17 -12
- siliconcompiler/package/git.py +2 -1
- siliconcompiler/project.py +6 -4
- siliconcompiler/schema/baseschema.py +18 -6
- siliconcompiler/schema/docschema.py +3 -3
- siliconcompiler/schema/editableschema.py +1 -1
- siliconcompiler/schema/namedschema.py +6 -6
- siliconcompiler/schema_support/cmdlineschema.py +8 -5
- siliconcompiler/tool.py +4 -2
- siliconcompiler/tools/_common/cocotb/__init__.py +0 -0
- siliconcompiler/tools/_common/cocotb/cocotb_task.py +286 -0
- siliconcompiler/tools/builtin/wait.py +152 -0
- siliconcompiler/tools/icarus/cocotb_exec.py +53 -0
- siliconcompiler/tools/icarus/compile.py +47 -1
- siliconcompiler/tools/klayout/export.py +0 -2
- siliconcompiler/tools/klayout/merge.py +95 -0
- siliconcompiler/tools/klayout/scripts/klayout_merge.py +79 -0
- siliconcompiler/tools/openroad/_apr.py +13 -0
- siliconcompiler/tools/openroad/scripts/apr/sc_write_data.tcl +9 -2
- siliconcompiler/tools/verilator/cocotb_compile.py +55 -0
- siliconcompiler/tools/verilator/cocotb_exec.py +52 -0
- siliconcompiler/tools/verilator/compile.py +12 -8
- siliconcompiler/tools/vpr/__init__.py +1 -1
- siliconcompiler/tools/yosys/scripts/sc_synth_asic.tcl +1 -1
- siliconcompiler/tools/yosys/syn_asic.py +3 -3
- siliconcompiler/toolscripts/_tools.json +4 -4
- siliconcompiler/toolscripts/rhel8/install-icarus.sh +2 -2
- siliconcompiler/toolscripts/rhel8/install-magic.sh +1 -1
- siliconcompiler/toolscripts/rhel8/install-netgen.sh +1 -1
- siliconcompiler/toolscripts/rhel8/install-surelog.sh +1 -1
- siliconcompiler/toolscripts/rhel8/install-sv2v.sh +1 -1
- siliconcompiler/toolscripts/rhel8/install-verilator.sh +2 -2
- siliconcompiler/toolscripts/rhel8/install-xyce.sh +3 -3
- siliconcompiler/toolscripts/rhel9/install-ghdl.sh +1 -1
- siliconcompiler/toolscripts/rhel9/install-gtkwave.sh +2 -2
- siliconcompiler/toolscripts/rhel9/install-icarus.sh +2 -2
- siliconcompiler/toolscripts/rhel9/install-magic.sh +1 -1
- siliconcompiler/toolscripts/rhel9/install-netgen.sh +1 -1
- siliconcompiler/toolscripts/rhel9/install-openroad.sh +1 -1
- siliconcompiler/toolscripts/rhel9/install-opensta.sh +5 -5
- siliconcompiler/toolscripts/rhel9/install-surelog.sh +1 -1
- siliconcompiler/toolscripts/rhel9/install-sv2v.sh +1 -1
- siliconcompiler/toolscripts/rhel9/install-verilator.sh +2 -2
- siliconcompiler/toolscripts/rhel9/install-vpr.sh +1 -1
- siliconcompiler/toolscripts/rhel9/install-wildebeest.sh +1 -1
- siliconcompiler/toolscripts/rhel9/install-xyce.sh +3 -3
- siliconcompiler/toolscripts/rhel9/install-yosys-moosic.sh +1 -1
- siliconcompiler/toolscripts/rhel9/install-yosys-slang.sh +1 -1
- siliconcompiler/toolscripts/rhel9/install-yosys.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-bambu.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-bluespec.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-ghdl.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-gtkwave.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-icarus.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-icepack.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-magic.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-netgen.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-openroad.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-opensta.sh +5 -5
- siliconcompiler/toolscripts/ubuntu20/install-slurm.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-surelog.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-sv2v.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-verilator.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-xyce.sh +3 -3
- siliconcompiler/toolscripts/ubuntu22/install-bambu.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-bluespec.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-ghdl.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-gtkwave.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-icarus.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-icepack.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-keplerformal.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-magic.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-netgen.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-nextpnr.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-openroad.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-opensta.sh +5 -5
- siliconcompiler/toolscripts/ubuntu22/install-slurm.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-surelog.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-surfer.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-sv2v.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-verilator.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-vpr.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-wildebeest.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-xyce.sh +3 -3
- siliconcompiler/toolscripts/ubuntu22/install-yosys-moosic.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-yosys-slang.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-yosys.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-bambu.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-bluespec.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-ghdl.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-gtkwave.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-icarus.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-icepack.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-keplerformal.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-magic.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-netgen.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-nextpnr.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-openroad.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-opensta.sh +5 -5
- siliconcompiler/toolscripts/ubuntu24/install-slurm.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-surelog.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-surfer.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-sv2v.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-verilator.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-vpr.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-wildebeest.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-xyce.sh +3 -3
- siliconcompiler/toolscripts/ubuntu24/install-yosys-moosic.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-yosys-slang.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-yosys.sh +1 -1
- {siliconcompiler-0.36.2.dist-info → siliconcompiler-0.36.4.dist-info}/METADATA +4 -2
- {siliconcompiler-0.36.2.dist-info → siliconcompiler-0.36.4.dist-info}/RECORD +120 -112
- {siliconcompiler-0.36.2.dist-info → siliconcompiler-0.36.4.dist-info}/WHEEL +1 -1
- {siliconcompiler-0.36.2.dist-info → siliconcompiler-0.36.4.dist-info}/entry_points.txt +0 -0
- {siliconcompiler-0.36.2.dist-info → siliconcompiler-0.36.4.dist-info}/licenses/LICENSE +0 -0
- {siliconcompiler-0.36.2.dist-info → siliconcompiler-0.36.4.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Optional, Union, Dict
|
|
5
|
+
import xml.etree.ElementTree as ET
|
|
6
|
+
|
|
7
|
+
from siliconcompiler import Task
|
|
8
|
+
|
|
9
|
+
try:
|
|
10
|
+
import cocotb_tools.config
|
|
11
|
+
from cocotb_tools.runner import Runner
|
|
12
|
+
_has_cocotb = True
|
|
13
|
+
except ModuleNotFoundError:
|
|
14
|
+
_has_cocotb = False
|
|
15
|
+
Runner = object
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def get_cocotb_config(sim="icarus"):
|
|
19
|
+
if not _has_cocotb:
|
|
20
|
+
raise NotImplementedError("COCOTB must be installed to use get_cocotb_config")
|
|
21
|
+
|
|
22
|
+
libs_dir = cocotb_tools.config.libs_dir
|
|
23
|
+
lib_name = cocotb_tools.config.lib_name("vpi", sim)
|
|
24
|
+
share_dir = cocotb_tools.config.share_dir
|
|
25
|
+
|
|
26
|
+
return libs_dir, lib_name, share_dir
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class CocotbRunnerDummy(Runner):
|
|
30
|
+
"""
|
|
31
|
+
A minimal Runner subclass used solely to retrieve the libpython path.
|
|
32
|
+
|
|
33
|
+
This class provides access to the libpython shared library location
|
|
34
|
+
without adding ``find_libpython`` as a direct dependency. It leverages
|
|
35
|
+
cocotb's existing Runner infrastructure, which handles libpython
|
|
36
|
+
discovery internally via the ``_set_env()`` method.
|
|
37
|
+
|
|
38
|
+
The abstract methods required by the Runner base class are implemented
|
|
39
|
+
as no-ops or raise NotImplementedError, as they are not intended to be
|
|
40
|
+
called. This class should only be instantiated to call
|
|
41
|
+
``get_libpython_path()``.
|
|
42
|
+
|
|
43
|
+
Example:
|
|
44
|
+
>>> libpython = CocotbRunnerDummy().get_libpython_path()
|
|
45
|
+
>>> print(libpython)
|
|
46
|
+
/usr/lib/x86_64-linux-gnu/libpython3.10.so
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
def __init__(self):
|
|
50
|
+
if not _has_cocotb:
|
|
51
|
+
raise NotImplementedError("COCOTB must be installed to use get_cocotb_config")
|
|
52
|
+
|
|
53
|
+
super().__init__()
|
|
54
|
+
# These attributes are required by _set_env() which uses them to
|
|
55
|
+
# populate environment variables.
|
|
56
|
+
self.sim_hdl_toplevel = ""
|
|
57
|
+
self.test_module = ""
|
|
58
|
+
self.hdl_toplevel_lang = ""
|
|
59
|
+
|
|
60
|
+
def _simulator_in_path(self):
|
|
61
|
+
# No-op: This dummy class doesn't require any simulator executable.
|
|
62
|
+
pass
|
|
63
|
+
|
|
64
|
+
def _build_command(self):
|
|
65
|
+
raise NotImplementedError(
|
|
66
|
+
"CocotbRunnerDummy is not intended for building HDL sources")
|
|
67
|
+
|
|
68
|
+
def _test_command(self):
|
|
69
|
+
raise NotImplementedError(
|
|
70
|
+
"CocotbRunnerDummy is not intended for running tests")
|
|
71
|
+
|
|
72
|
+
def _get_define_options(self, defines):
|
|
73
|
+
raise NotImplementedError(
|
|
74
|
+
"CocotbRunnerDummy is not intended for HDL compilation")
|
|
75
|
+
|
|
76
|
+
def _get_include_options(self, includes):
|
|
77
|
+
raise NotImplementedError(
|
|
78
|
+
"CocotbRunnerDummy is not intended for HDL compilation")
|
|
79
|
+
|
|
80
|
+
def _get_parameter_options(self, parameters):
|
|
81
|
+
raise NotImplementedError(
|
|
82
|
+
"CocotbRunnerDummy is not intended for HDL compilation")
|
|
83
|
+
|
|
84
|
+
def get_libpython_path(self):
|
|
85
|
+
"""
|
|
86
|
+
Retrieve the path to the libpython shared library.
|
|
87
|
+
|
|
88
|
+
This method uses cocotb's ``Runner._set_env()`` which internally
|
|
89
|
+
calls ``find_libpython.find_libpython()`` to locate the library.
|
|
90
|
+
|
|
91
|
+
Returns:
|
|
92
|
+
str: Absolute path to the libpython shared library.
|
|
93
|
+
|
|
94
|
+
Raises:
|
|
95
|
+
ValueError: If libpython cannot be found.
|
|
96
|
+
"""
|
|
97
|
+
self._set_env()
|
|
98
|
+
return self.env["LIBPYTHON_LOC"]
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
class CocotbTask(Task):
|
|
102
|
+
|
|
103
|
+
def __init__(self):
|
|
104
|
+
super().__init__()
|
|
105
|
+
|
|
106
|
+
self.add_parameter("cocotb_random_seed", "int",
|
|
107
|
+
'Random seed for cocotb test reproducibility. '
|
|
108
|
+
'If not set, cocotb will generate a random seed.')
|
|
109
|
+
|
|
110
|
+
def set_cocotb_randomseed(
|
|
111
|
+
self, seed: int,
|
|
112
|
+
step: Optional[str] = None,
|
|
113
|
+
index: Optional[Union[str, int]] = None
|
|
114
|
+
):
|
|
115
|
+
"""
|
|
116
|
+
Sets the random seed for cocotb tests.
|
|
117
|
+
|
|
118
|
+
Args:
|
|
119
|
+
seed (int): The random seed value.
|
|
120
|
+
step (str, optional): The specific step to apply this configuration to.
|
|
121
|
+
index (str, optional): The specific index to apply this configuration to.
|
|
122
|
+
"""
|
|
123
|
+
self.set("var", "cocotb_random_seed", seed, step=step, index=index)
|
|
124
|
+
|
|
125
|
+
def task(self):
|
|
126
|
+
return "exec_cocotb"
|
|
127
|
+
|
|
128
|
+
def _get_test_modules(self):
|
|
129
|
+
"""
|
|
130
|
+
Get cocotb test module names from Python files in filesets.
|
|
131
|
+
|
|
132
|
+
Returns:
|
|
133
|
+
tuple: (module_names, module_dirs) where module_names is a
|
|
134
|
+
comma-separated string and module_dirs is a list of
|
|
135
|
+
directories containing the modules.
|
|
136
|
+
"""
|
|
137
|
+
module_names = []
|
|
138
|
+
module_dirs = []
|
|
139
|
+
seen_dirs = set()
|
|
140
|
+
|
|
141
|
+
for lib, fileset in self.project.get_filesets():
|
|
142
|
+
for pyfile in lib.get_file(fileset=fileset, filetype="python"):
|
|
143
|
+
path = Path(pyfile)
|
|
144
|
+
# Module name is the filename without .py extension
|
|
145
|
+
module_name = path.stem
|
|
146
|
+
module_names.append(module_name)
|
|
147
|
+
# Track the directory for PYTHONPATH
|
|
148
|
+
dir_path = str(path.parent.resolve())
|
|
149
|
+
if dir_path not in seen_dirs:
|
|
150
|
+
seen_dirs.add(dir_path)
|
|
151
|
+
module_dirs.append(dir_path)
|
|
152
|
+
|
|
153
|
+
return ",".join(module_names), module_dirs
|
|
154
|
+
|
|
155
|
+
def _get_toplevel_lang(self):
|
|
156
|
+
"""
|
|
157
|
+
Determine the HDL toplevel language from the design schema.
|
|
158
|
+
|
|
159
|
+
For Icarus Verilog, this is always "verilog" since Icarus
|
|
160
|
+
doesn't support VHDL. SystemVerilog is treated as Verilog
|
|
161
|
+
for cocotb's TOPLEVEL_LANG.
|
|
162
|
+
|
|
163
|
+
Returns:
|
|
164
|
+
str: The toplevel language ("verilog").
|
|
165
|
+
"""
|
|
166
|
+
# Icarus only supports Verilog/SystemVerilog, not VHDL
|
|
167
|
+
# cocotb uses "verilog" for both Verilog and SystemVerilog
|
|
168
|
+
return "verilog"
|
|
169
|
+
|
|
170
|
+
def __setup_cocotb_environment(self):
|
|
171
|
+
"""
|
|
172
|
+
Set up all required environment variables for cocotb execution.
|
|
173
|
+
"""
|
|
174
|
+
|
|
175
|
+
test_modules, _ = self._get_test_modules()
|
|
176
|
+
libpython_path = CocotbRunnerDummy().get_libpython_path()
|
|
177
|
+
|
|
178
|
+
# LIBPYTHON_LOC: path to libpython shared library
|
|
179
|
+
self.set_environmentalvariable("LIBPYTHON_LOC", libpython_path)
|
|
180
|
+
self.add_required_key("env", "LIBPYTHON_LOC")
|
|
181
|
+
|
|
182
|
+
# COCOTB_TOPLEVEL: the HDL toplevel module name
|
|
183
|
+
self.set_environmentalvariable("COCOTB_TOPLEVEL", self.design_topmodule)
|
|
184
|
+
self.add_required_key("env", "COCOTB_TOPLEVEL")
|
|
185
|
+
|
|
186
|
+
# COCOTB_TEST_MODULES: comma-separated list of Python test modules
|
|
187
|
+
self.set_environmentalvariable("COCOTB_TEST_MODULES", test_modules)
|
|
188
|
+
self.add_required_key("env", "COCOTB_TEST_MODULES")
|
|
189
|
+
|
|
190
|
+
# TOPLEVEL_LANG: HDL language of the toplevel
|
|
191
|
+
self.set_environmentalvariable("TOPLEVEL_LANG", self._get_toplevel_lang())
|
|
192
|
+
self.add_required_key("env", "TOPLEVEL_LANG")
|
|
193
|
+
|
|
194
|
+
# COCOTB_RESULTS_FILE: path to xUnit XML results
|
|
195
|
+
self.set_environmentalvariable("COCOTB_RESULTS_FILE", "outputs/results.xml")
|
|
196
|
+
self.add_required_key("env", "COCOTB_RESULTS_FILE")
|
|
197
|
+
|
|
198
|
+
# COCOTB_RANDOM_SEED: optional random seed for reproducibility
|
|
199
|
+
random_seed = self.get("var", "cocotb_random_seed")
|
|
200
|
+
if random_seed is not None:
|
|
201
|
+
self.set_environmentalvariable("COCOTB_RANDOM_SEED", str(random_seed))
|
|
202
|
+
self.add_required_key("env", "COCOTB_RANDOM_SEED")
|
|
203
|
+
|
|
204
|
+
def setup(self):
|
|
205
|
+
super().setup()
|
|
206
|
+
|
|
207
|
+
if not _has_cocotb:
|
|
208
|
+
raise RuntimeError("Cocotb is not installed; cannot run test.")
|
|
209
|
+
|
|
210
|
+
# Output: xUnit XML results file
|
|
211
|
+
self.add_output_file(file="results.xml")
|
|
212
|
+
|
|
213
|
+
self.add_required_key("option", "design")
|
|
214
|
+
self.add_required_key("option", "fileset")
|
|
215
|
+
if self.project.get("option", "alias"):
|
|
216
|
+
self.add_required_key("option", "alias")
|
|
217
|
+
|
|
218
|
+
# Require Python test modules
|
|
219
|
+
for lib, fileset in self.project.get_filesets():
|
|
220
|
+
if lib.has_file(fileset=fileset, filetype="python"):
|
|
221
|
+
self.add_required_key(lib, "fileset", fileset, "file", "python")
|
|
222
|
+
|
|
223
|
+
if self.get("var", "cocotb_random_seed") is not None:
|
|
224
|
+
self.add_required_key("var", "cocotb_random_seed")
|
|
225
|
+
|
|
226
|
+
# Set up cocotb environment variables
|
|
227
|
+
self.__setup_cocotb_environment()
|
|
228
|
+
|
|
229
|
+
def get_runtime_environmental_variables(self, include_path: bool = True) -> Dict[str, str]:
|
|
230
|
+
envs = super().get_runtime_environmental_variables(include_path)
|
|
231
|
+
|
|
232
|
+
_, module_dirs = self._get_test_modules()
|
|
233
|
+
libs_dir = cocotb_tools.config.libs_dir
|
|
234
|
+
|
|
235
|
+
# PATH: add cocotb libs directory
|
|
236
|
+
current_path = os.environ.get("PATH", "")
|
|
237
|
+
new_path = f"{libs_dir}{os.pathsep}{current_path}"
|
|
238
|
+
envs["PATH"] = new_path
|
|
239
|
+
|
|
240
|
+
# PYTHONPATH: add directories containing test modules
|
|
241
|
+
current_pythonpath = os.environ.get("PYTHONPATH", "")
|
|
242
|
+
pythonpath_parts = module_dirs + ([current_pythonpath] if current_pythonpath else [])
|
|
243
|
+
envs["PYTHONPATH"] = os.pathsep.join(pythonpath_parts)
|
|
244
|
+
|
|
245
|
+
# PYGPI_PYTHON_BIN: Python executable
|
|
246
|
+
envs["PYGPI_PYTHON_BIN"] = sys.executable
|
|
247
|
+
|
|
248
|
+
return envs
|
|
249
|
+
|
|
250
|
+
def _parse_cocotb_results(self, results_file: Path):
|
|
251
|
+
"""
|
|
252
|
+
Parse the cocotb xUnit XML results file and extract metrics.
|
|
253
|
+
|
|
254
|
+
Args:
|
|
255
|
+
results_file: Path to the results.xml file.
|
|
256
|
+
"""
|
|
257
|
+
try:
|
|
258
|
+
tree = ET.parse(results_file)
|
|
259
|
+
root = tree.getroot()
|
|
260
|
+
|
|
261
|
+
# Count testcases and failures/errors
|
|
262
|
+
testcases = root.findall(".//testcase")
|
|
263
|
+
tests = len(testcases)
|
|
264
|
+
failures = len(root.findall(".//failure"))
|
|
265
|
+
errors = len(root.findall(".//error"))
|
|
266
|
+
|
|
267
|
+
passed = tests - failures - errors
|
|
268
|
+
|
|
269
|
+
self.logger.info(f"Cocotb results: {passed}/{tests} tests passed")
|
|
270
|
+
if failures > 0:
|
|
271
|
+
self.logger.warning(f"Cocotb: {failures} test(s) failed")
|
|
272
|
+
if errors > 0:
|
|
273
|
+
self.logger.warning(f"Cocotb: {errors} test(s) had errors")
|
|
274
|
+
|
|
275
|
+
self.record_metric("errors", errors + failures, source_file=results_file)
|
|
276
|
+
|
|
277
|
+
except Exception as e:
|
|
278
|
+
self.logger.warning(f"Failed to parse cocotb results: {e}")
|
|
279
|
+
|
|
280
|
+
def post_process(self):
|
|
281
|
+
super().post_process()
|
|
282
|
+
|
|
283
|
+
# Parse cocotb results XML and report metrics
|
|
284
|
+
results_file = Path("outputs/results.xml")
|
|
285
|
+
if results_file.exists():
|
|
286
|
+
self._parse_cocotb_results(results_file)
|
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
from typing import TYPE_CHECKING
|
|
2
|
+
|
|
3
|
+
from siliconcompiler.tools.builtin import BuiltinTask
|
|
4
|
+
|
|
5
|
+
if TYPE_CHECKING:
|
|
6
|
+
from siliconcompiler import Flowgraph
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class Wait(BuiltinTask):
|
|
10
|
+
'''
|
|
11
|
+
A wait task that stalls the flow until all inputs are available.
|
|
12
|
+
'''
|
|
13
|
+
def __init__(self):
|
|
14
|
+
super().__init__()
|
|
15
|
+
|
|
16
|
+
def _set_io_files(self):
|
|
17
|
+
# No file IO needed for wait task
|
|
18
|
+
return
|
|
19
|
+
|
|
20
|
+
def task(self):
|
|
21
|
+
return "wait"
|
|
22
|
+
|
|
23
|
+
@staticmethod
|
|
24
|
+
def __has_path(flowgraph: "Flowgraph", source: tuple, target: tuple) -> bool:
|
|
25
|
+
'''
|
|
26
|
+
Helper method to check if there's any path from source to target node.
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
flowgraph: The flowgraph to search.
|
|
30
|
+
source: Tuple of (step, index) for the source node.
|
|
31
|
+
target: Tuple of (step, index) for the target node.
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
bool: True if there's a path from source to target, False otherwise.
|
|
35
|
+
'''
|
|
36
|
+
if source == target:
|
|
37
|
+
return True
|
|
38
|
+
|
|
39
|
+
visited = set()
|
|
40
|
+
to_visit = [source]
|
|
41
|
+
|
|
42
|
+
while to_visit:
|
|
43
|
+
current = to_visit.pop(0)
|
|
44
|
+
if current == target:
|
|
45
|
+
return True
|
|
46
|
+
if current in visited:
|
|
47
|
+
continue
|
|
48
|
+
visited.add(current)
|
|
49
|
+
|
|
50
|
+
# Add all output nodes to the queue
|
|
51
|
+
outputs = flowgraph.get_node_outputs(current[0], current[1])
|
|
52
|
+
to_visit.extend(outputs)
|
|
53
|
+
|
|
54
|
+
return False
|
|
55
|
+
|
|
56
|
+
@staticmethod
|
|
57
|
+
def serialize_tool_tasks(flowgraph: "Flowgraph", tool_name: str) -> None:
|
|
58
|
+
'''
|
|
59
|
+
Adds wait tasks between nodes of the same tool that could execute in parallel.
|
|
60
|
+
|
|
61
|
+
This method inserts `Wait` task nodes between nodes that use the same
|
|
62
|
+
tool, but only when those nodes could execute in parallel (i.e., there's
|
|
63
|
+
no dependency path between them). This ensures that tool instances don't
|
|
64
|
+
execute in parallel, while avoiding unnecessary wait tasks for nodes
|
|
65
|
+
that already have a dependency relationship.
|
|
66
|
+
|
|
67
|
+
The method modifies the flowgraph in-place by:
|
|
68
|
+
1. Finding all nodes that use the specified tool
|
|
69
|
+
2. For each pair of tool nodes with no dependency path, inserting a wait task
|
|
70
|
+
3. Using naming convention: {target_step}.wait
|
|
71
|
+
(named after the node being delayed)
|
|
72
|
+
|
|
73
|
+
Args:
|
|
74
|
+
flowgraph: The flowgraph to modify.
|
|
75
|
+
tool_name (str): The name of the tool (e.g., 'openroad', 'yosys').
|
|
76
|
+
All nodes using this tool will be serialized.
|
|
77
|
+
|
|
78
|
+
Raises:
|
|
79
|
+
ValueError: If the flowgraph is invalid or tool_name is empty.
|
|
80
|
+
|
|
81
|
+
Example:
|
|
82
|
+
>>> flow = Flowgraph("myflow")
|
|
83
|
+
>>> flow.node("place1", openroad.Place, index=0)
|
|
84
|
+
>>> flow.node("syn", yosys.Syn, index=0)
|
|
85
|
+
>>> flow.node("place2", openroad.Place, index=1)
|
|
86
|
+
>>> flow.edge("place1", "syn")
|
|
87
|
+
>>> flow.edge("syn", "place2")
|
|
88
|
+
>>> Wait.serialize_tool_tasks(flow, "openroad")
|
|
89
|
+
>>> # No wait task added - place1 and place2 already have a dependency path
|
|
90
|
+
'''
|
|
91
|
+
if not tool_name or not isinstance(tool_name, str):
|
|
92
|
+
raise ValueError(f"tool_name must be a non-empty string, not {tool_name}")
|
|
93
|
+
|
|
94
|
+
if not flowgraph.validate():
|
|
95
|
+
raise ValueError(f"Flowgraph '{flowgraph.name}' is invalid before serialization")
|
|
96
|
+
|
|
97
|
+
# Find all nodes using the specified tool
|
|
98
|
+
tool_nodes = []
|
|
99
|
+
for step, index in flowgraph.get_nodes():
|
|
100
|
+
graph_node = flowgraph.get_graph_node(step, index)
|
|
101
|
+
if graph_node.get_tool() == tool_name:
|
|
102
|
+
tool_nodes.append((step, index))
|
|
103
|
+
|
|
104
|
+
if not tool_nodes or len(tool_nodes) < 2:
|
|
105
|
+
# Need at least 2 nodes to create wait tasks
|
|
106
|
+
return
|
|
107
|
+
|
|
108
|
+
# Sort nodes by execution order to establish a consistent serialization
|
|
109
|
+
execution_order = flowgraph.get_execution_order()
|
|
110
|
+
node_order_map = {}
|
|
111
|
+
for level_idx, level in enumerate(execution_order):
|
|
112
|
+
for node_idx, node in enumerate(level):
|
|
113
|
+
node_order_map[node] = (level_idx, node_idx)
|
|
114
|
+
|
|
115
|
+
sorted_tool_nodes = sorted(
|
|
116
|
+
tool_nodes, key=lambda n: node_order_map.get(n, (float('inf'), 0)))
|
|
117
|
+
|
|
118
|
+
# Check if there are pairs without dependency paths that need serialization
|
|
119
|
+
needs_serialization = False
|
|
120
|
+
for i in range(len(sorted_tool_nodes)):
|
|
121
|
+
for j in range(i + 1, len(sorted_tool_nodes)):
|
|
122
|
+
node1 = sorted_tool_nodes[i]
|
|
123
|
+
node2 = sorted_tool_nodes[j]
|
|
124
|
+
if not Wait.__has_path(flowgraph, node1, node2):
|
|
125
|
+
needs_serialization = True
|
|
126
|
+
break
|
|
127
|
+
if needs_serialization:
|
|
128
|
+
break
|
|
129
|
+
|
|
130
|
+
if not needs_serialization:
|
|
131
|
+
# All tool nodes already have dependency paths, no wait nodes needed
|
|
132
|
+
return
|
|
133
|
+
|
|
134
|
+
# Create wait nodes between each pair of consecutive tool nodes
|
|
135
|
+
# Chain: tool[0] -> wait_1 -> tool[1] -> wait_2 -> tool[2] -> ...
|
|
136
|
+
for i in range(len(sorted_tool_nodes) - 1):
|
|
137
|
+
curr_node = sorted_tool_nodes[i]
|
|
138
|
+
next_node = sorted_tool_nodes[i + 1]
|
|
139
|
+
|
|
140
|
+
# Check if there's already a dependency path between them
|
|
141
|
+
if Wait.__has_path(flowgraph, curr_node, next_node):
|
|
142
|
+
# Already serialized, skip
|
|
143
|
+
continue
|
|
144
|
+
|
|
145
|
+
# Create wait node named after the next node (the one being delayed)
|
|
146
|
+
wait_step = f"{next_node[0]}.wait"
|
|
147
|
+
wait_index = next_node[1]
|
|
148
|
+
flowgraph.node(wait_step, Wait(), index=wait_index)
|
|
149
|
+
|
|
150
|
+
# Connect: curr_node -> wait_node -> next_node
|
|
151
|
+
flowgraph.edge(curr_node[0], wait_step, tail_index=curr_node[1], head_index=wait_index)
|
|
152
|
+
flowgraph.edge(wait_step, next_node[0], tail_index=wait_index, head_index=next_node[1])
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
from siliconcompiler.tools._common.cocotb.cocotb_task import (
|
|
2
|
+
CocotbTask,
|
|
3
|
+
get_cocotb_config
|
|
4
|
+
)
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class CocotbExecTask(CocotbTask):
|
|
8
|
+
'''
|
|
9
|
+
Run a cocotb testbench against a compiled Icarus Verilog simulation.
|
|
10
|
+
|
|
11
|
+
This task takes a compiled .vvp file from the icarus compile task and
|
|
12
|
+
executes it with the cocotb VPI module loaded, enabling Python-based
|
|
13
|
+
testbenches to interact with the simulation.
|
|
14
|
+
|
|
15
|
+
The task requires cocotb to be installed in the Python environment.
|
|
16
|
+
Test modules are specified by adding Python files to the fileset using
|
|
17
|
+
the "python" filetype.
|
|
18
|
+
'''
|
|
19
|
+
|
|
20
|
+
def tool(self):
|
|
21
|
+
return "icarus"
|
|
22
|
+
|
|
23
|
+
def parse_version(self, stdout):
|
|
24
|
+
# vvp version output: "Icarus Verilog runtime version 13.0 (devel) ..."
|
|
25
|
+
return stdout.split()[4]
|
|
26
|
+
|
|
27
|
+
def setup(self):
|
|
28
|
+
super().setup()
|
|
29
|
+
|
|
30
|
+
# vvp is the Icarus Verilog runtime
|
|
31
|
+
self.set_exe("vvp", vswitch="-V")
|
|
32
|
+
self.add_version(">=10.3")
|
|
33
|
+
|
|
34
|
+
self.set_threads()
|
|
35
|
+
|
|
36
|
+
# Input: compiled .vvp file from compile task
|
|
37
|
+
self.add_input_file(ext="vvp")
|
|
38
|
+
|
|
39
|
+
def runtime_options(self):
|
|
40
|
+
options = super().runtime_options()
|
|
41
|
+
|
|
42
|
+
libs_dir, lib_name, _ = get_cocotb_config("icarus")
|
|
43
|
+
|
|
44
|
+
# -M: VPI module search path
|
|
45
|
+
options.extend(["-M", str(libs_dir)])
|
|
46
|
+
|
|
47
|
+
# -m: VPI module to load
|
|
48
|
+
options.extend(["-m", lib_name])
|
|
49
|
+
|
|
50
|
+
# Input .vvp file
|
|
51
|
+
options.append(f"inputs/{self.design_topmodule}.vvp")
|
|
52
|
+
|
|
53
|
+
return options
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import Optional
|
|
1
|
+
from typing import Optional, Union
|
|
2
2
|
from siliconcompiler import Task
|
|
3
3
|
|
|
4
4
|
|
|
@@ -15,6 +15,11 @@ class CompileTask(Task):
|
|
|
15
15
|
'See the corresponding "-g" flags in the Icarus manual for more "'
|
|
16
16
|
'"information.')
|
|
17
17
|
|
|
18
|
+
self.add_parameter("trace", "bool",
|
|
19
|
+
'Enable waveform tracing. When enabled, a VCD dump module is '
|
|
20
|
+
'auto-generated and compiled with the design to capture all signals.',
|
|
21
|
+
defvalue=False)
|
|
22
|
+
|
|
18
23
|
def set_icarus_veriloggeneration(self, gen: str,
|
|
19
24
|
step: Optional[str] = None,
|
|
20
25
|
index: Optional[str] = None):
|
|
@@ -28,6 +33,22 @@ class CompileTask(Task):
|
|
|
28
33
|
"""
|
|
29
34
|
self.set("var", "verilog_generation", gen, step=step, index=index)
|
|
30
35
|
|
|
36
|
+
def set_trace_enabled(self, enabled: bool = True,
|
|
37
|
+
step: Optional[str] = None,
|
|
38
|
+
index: Optional[Union[str, int]] = None):
|
|
39
|
+
"""
|
|
40
|
+
Enables or disables waveform tracing.
|
|
41
|
+
|
|
42
|
+
When enabled, a VCD dump module is auto-generated and compiled with
|
|
43
|
+
the design. The waveform file will be written to reports/<topmodule>.vcd.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
enabled (bool): Whether to enable tracing. Defaults to True.
|
|
47
|
+
step (str, optional): The specific step to apply this configuration to.
|
|
48
|
+
index (str, optional): The specific index to apply this configuration to.
|
|
49
|
+
"""
|
|
50
|
+
self.set("var", "trace", enabled, step=step, index=index)
|
|
51
|
+
|
|
31
52
|
def tool(self):
|
|
32
53
|
return "icarus"
|
|
33
54
|
|
|
@@ -74,6 +95,26 @@ class CompileTask(Task):
|
|
|
74
95
|
if self.get("var", "verilog_generation"):
|
|
75
96
|
self.add_required_key("var", "verilog_generation")
|
|
76
97
|
|
|
98
|
+
def pre_process(self):
|
|
99
|
+
super().pre_process()
|
|
100
|
+
|
|
101
|
+
# Generate VCD dump module if tracing is enabled
|
|
102
|
+
if self.get("var", "trace"):
|
|
103
|
+
self._generate_trace_module()
|
|
104
|
+
|
|
105
|
+
def _generate_trace_module(self):
|
|
106
|
+
trace_file = f"reports/{self.design_topmodule}.vcd"
|
|
107
|
+
dump_module = f"""// Auto-generated waveform dump module
|
|
108
|
+
module sc_trace_dump();
|
|
109
|
+
initial begin
|
|
110
|
+
$dumpfile("{trace_file}");
|
|
111
|
+
$dumpvars(0, {self.design_topmodule});
|
|
112
|
+
end
|
|
113
|
+
endmodule
|
|
114
|
+
"""
|
|
115
|
+
with open("sc_trace_dump.v", "w") as f:
|
|
116
|
+
f.write(dump_module)
|
|
117
|
+
|
|
77
118
|
def runtime_options(self):
|
|
78
119
|
options = super().runtime_options()
|
|
79
120
|
|
|
@@ -137,4 +178,9 @@ class CompileTask(Task):
|
|
|
137
178
|
for value in lib.get_file(fileset=fileset, filetype="verilog"):
|
|
138
179
|
options.append(value)
|
|
139
180
|
|
|
181
|
+
# Add trace dump module if tracing is enabled
|
|
182
|
+
if self.get("var", "trace"):
|
|
183
|
+
options.append("sc_trace_dump.v")
|
|
184
|
+
options.extend(["-s", "sc_trace_dump"])
|
|
185
|
+
|
|
140
186
|
return options
|
|
@@ -79,8 +79,6 @@ class ExportTask(KLayoutTask, ScreenshotParams):
|
|
|
79
79
|
self.add_output_file(ext="lyt")
|
|
80
80
|
self.add_output_file(ext="lyp")
|
|
81
81
|
|
|
82
|
-
self.add_required_key("var", "stream")
|
|
83
|
-
|
|
84
82
|
sc_stream_order = [default_stream, *[s for s in ("gds", "oas") if s != default_stream]]
|
|
85
83
|
req_set = False
|
|
86
84
|
for s in sc_stream_order:
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
from typing import Optional, Union
|
|
2
|
+
|
|
3
|
+
from siliconcompiler.tools.klayout import KLayoutTask
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class Merge(KLayoutTask):
|
|
7
|
+
"""
|
|
8
|
+
Klayout task to merge multiple GDS files and provide prefixing for cell names.
|
|
9
|
+
"""
|
|
10
|
+
def __init__(self):
|
|
11
|
+
super().__init__()
|
|
12
|
+
|
|
13
|
+
self.add_parameter("reference", "(<fs,input>,str,str)",
|
|
14
|
+
"Reference fileset or input node for merge operation, structured as "
|
|
15
|
+
"(fs, library name, fileset) or (input, step, index)")
|
|
16
|
+
self.add_parameter("merge", "[(<fs,input>,str,str,str)]",
|
|
17
|
+
"Fileset or input node to be merge with prefix, structured as "
|
|
18
|
+
"(fs, library name, fileset) or (input, step, index) along with prefix "
|
|
19
|
+
"string")
|
|
20
|
+
|
|
21
|
+
def __fix_type(self, type: str) -> str:
|
|
22
|
+
if type == "fileset":
|
|
23
|
+
return "fs"
|
|
24
|
+
return type
|
|
25
|
+
|
|
26
|
+
def set_klayout_reference(self, type: str, source0: str, source1: str,
|
|
27
|
+
step: Optional[str] = None,
|
|
28
|
+
index: Optional[Union[str, int]] = None):
|
|
29
|
+
"""
|
|
30
|
+
Sets the reference file for the merge operation.
|
|
31
|
+
Args:
|
|
32
|
+
type (str): The reference fileset or input node.
|
|
33
|
+
source0 (str): The first part of the source (library name or step).
|
|
34
|
+
source1 (str): The second part of the source (fileset name or index).
|
|
35
|
+
step (Optional[str]): The specific step to apply this configuration to.
|
|
36
|
+
index (Optional[Union[str, int]]): The specific index to apply this configuration to.
|
|
37
|
+
"""
|
|
38
|
+
self.set("var", "reference", (self.__fix_type(type), source0, source1),
|
|
39
|
+
step=step, index=index)
|
|
40
|
+
|
|
41
|
+
def add_klayout_merge(self, type: str, source0: str, source1: str, prefix: str,
|
|
42
|
+
step: Optional[str] = None,
|
|
43
|
+
index: Optional[Union[str, int]] = None, clobber: bool = False):
|
|
44
|
+
"""
|
|
45
|
+
Adds a file to be merged with the reference file.
|
|
46
|
+
Args:
|
|
47
|
+
type (str): The fileset or input node to be merged.
|
|
48
|
+
prefix (str): The prefix to apply during the merge.
|
|
49
|
+
source0 (str): The first part of the source (library name or step).
|
|
50
|
+
source1 (str): The second part of the source (fileset name or index).
|
|
51
|
+
step (Optional[str]): The specific step to apply this configuration to.
|
|
52
|
+
index (Optional[Union[str, int]]): The specific index to apply this configuration to.
|
|
53
|
+
clobber (bool, optional): If True, overwrites the existing list of merge files.
|
|
54
|
+
If False, appends to the list. Defaults to False.
|
|
55
|
+
"""
|
|
56
|
+
if clobber:
|
|
57
|
+
self.set("var", "merge", (self.__fix_type(type), source0, source1, prefix),
|
|
58
|
+
step=step, index=index)
|
|
59
|
+
else:
|
|
60
|
+
self.add("var", "merge", (self.__fix_type(type), source0, source1, prefix),
|
|
61
|
+
step=step, index=index)
|
|
62
|
+
|
|
63
|
+
def task(self) -> str:
|
|
64
|
+
return 'merge'
|
|
65
|
+
|
|
66
|
+
def setup(self) -> None:
|
|
67
|
+
super().setup()
|
|
68
|
+
|
|
69
|
+
self.set_script("klayout_merge.py")
|
|
70
|
+
|
|
71
|
+
self.add_required_key("var", "reference")
|
|
72
|
+
self.add_required_key("var", "merge")
|
|
73
|
+
|
|
74
|
+
if self.get("var", "reference"):
|
|
75
|
+
ref_type, ref_source0, ref_source1 = self.get("var", "reference")
|
|
76
|
+
if ref_type == 'input':
|
|
77
|
+
step, index = ref_source0, ref_source1
|
|
78
|
+
self.add_input_file(
|
|
79
|
+
self.compute_input_file_node_name(f"{self.design_topmodule}.gds",
|
|
80
|
+
step, index))
|
|
81
|
+
else:
|
|
82
|
+
lib_name, fileset = ref_source0, ref_source1
|
|
83
|
+
self.add_required_key("library", lib_name, "fileset", fileset, "file", "gds")
|
|
84
|
+
for merge_entry in self.get("var", "merge"):
|
|
85
|
+
merge_type, merge_source0, merge_source1, _ = merge_entry
|
|
86
|
+
if merge_type == 'input':
|
|
87
|
+
step, index = merge_source0, merge_source1
|
|
88
|
+
self.add_input_file(
|
|
89
|
+
self.compute_input_file_node_name(f"{self.design_topmodule}.gds",
|
|
90
|
+
step, index))
|
|
91
|
+
else:
|
|
92
|
+
lib_name, fileset = merge_source0, merge_source1
|
|
93
|
+
self.add_required_key("library", lib_name, "fileset", fileset, "file", "gds")
|
|
94
|
+
|
|
95
|
+
self.add_output_file(ext="gds")
|