siliconcompiler 0.36.3__py3-none-any.whl → 0.36.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- siliconcompiler/_metadata.py +1 -1
- siliconcompiler/apps/sc_install.py +23 -2
- siliconcompiler/apps/sc_issue.py +4 -0
- siliconcompiler/flows/dvflow.py +13 -0
- siliconcompiler/package/__init__.py +17 -12
- siliconcompiler/package/git.py +2 -1
- siliconcompiler/schema/baseschema.py +11 -2
- siliconcompiler/schema_support/cmdlineschema.py +3 -2
- siliconcompiler/tools/_common/cocotb/__init__.py +0 -0
- siliconcompiler/tools/_common/cocotb/cocotb_task.py +286 -0
- siliconcompiler/tools/builtin/wait.py +136 -0
- siliconcompiler/tools/icarus/cocotb_exec.py +53 -0
- siliconcompiler/tools/icarus/compile.py +47 -1
- siliconcompiler/tools/openroad/_apr.py +13 -0
- siliconcompiler/tools/openroad/scripts/apr/sc_write_data.tcl +9 -2
- siliconcompiler/tools/verilator/cocotb_compile.py +55 -0
- siliconcompiler/tools/verilator/cocotb_exec.py +52 -0
- siliconcompiler/tools/verilator/compile.py +12 -8
- siliconcompiler/tools/vpr/__init__.py +1 -1
- siliconcompiler/tools/yosys/scripts/sc_synth_asic.tcl +1 -1
- siliconcompiler/tools/yosys/syn_asic.py +3 -3
- siliconcompiler/toolscripts/_tools.json +4 -4
- siliconcompiler/toolscripts/rhel8/install-icarus.sh +2 -2
- siliconcompiler/toolscripts/rhel8/install-magic.sh +1 -1
- siliconcompiler/toolscripts/rhel8/install-netgen.sh +1 -1
- siliconcompiler/toolscripts/rhel8/install-surelog.sh +1 -1
- siliconcompiler/toolscripts/rhel8/install-sv2v.sh +1 -1
- siliconcompiler/toolscripts/rhel8/install-verilator.sh +2 -2
- siliconcompiler/toolscripts/rhel8/install-xyce.sh +3 -3
- siliconcompiler/toolscripts/rhel9/install-ghdl.sh +1 -1
- siliconcompiler/toolscripts/rhel9/install-gtkwave.sh +2 -2
- siliconcompiler/toolscripts/rhel9/install-icarus.sh +2 -2
- siliconcompiler/toolscripts/rhel9/install-magic.sh +1 -1
- siliconcompiler/toolscripts/rhel9/install-netgen.sh +1 -1
- siliconcompiler/toolscripts/rhel9/install-openroad.sh +1 -1
- siliconcompiler/toolscripts/rhel9/install-opensta.sh +5 -5
- siliconcompiler/toolscripts/rhel9/install-surelog.sh +1 -1
- siliconcompiler/toolscripts/rhel9/install-sv2v.sh +1 -1
- siliconcompiler/toolscripts/rhel9/install-verilator.sh +2 -2
- siliconcompiler/toolscripts/rhel9/install-vpr.sh +1 -1
- siliconcompiler/toolscripts/rhel9/install-wildebeest.sh +1 -1
- siliconcompiler/toolscripts/rhel9/install-xyce.sh +3 -3
- siliconcompiler/toolscripts/rhel9/install-yosys-moosic.sh +1 -1
- siliconcompiler/toolscripts/rhel9/install-yosys-slang.sh +1 -1
- siliconcompiler/toolscripts/rhel9/install-yosys.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-bambu.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-bluespec.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-ghdl.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-gtkwave.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-icarus.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-icepack.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-magic.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-netgen.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-openroad.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-opensta.sh +5 -5
- siliconcompiler/toolscripts/ubuntu20/install-slurm.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-surelog.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-sv2v.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-verilator.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-xyce.sh +3 -3
- siliconcompiler/toolscripts/ubuntu22/install-bambu.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-bluespec.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-ghdl.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-gtkwave.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-icarus.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-icepack.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-keplerformal.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-magic.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-netgen.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-nextpnr.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-openroad.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-opensta.sh +5 -5
- siliconcompiler/toolscripts/ubuntu22/install-slurm.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-surelog.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-surfer.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-sv2v.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-verilator.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-vpr.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-wildebeest.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-xyce.sh +3 -3
- siliconcompiler/toolscripts/ubuntu22/install-yosys-moosic.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-yosys-slang.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-yosys.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-bambu.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-bluespec.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-ghdl.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-gtkwave.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-icarus.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-icepack.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-keplerformal.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-magic.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-netgen.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-nextpnr.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-openroad.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-opensta.sh +5 -5
- siliconcompiler/toolscripts/ubuntu24/install-slurm.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-surelog.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-surfer.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-sv2v.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-verilator.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-vpr.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-wildebeest.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-xyce.sh +3 -3
- siliconcompiler/toolscripts/ubuntu24/install-yosys-moosic.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-yosys-slang.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-yosys.sh +1 -1
- {siliconcompiler-0.36.3.dist-info → siliconcompiler-0.36.4.dist-info}/METADATA +3 -1
- {siliconcompiler-0.36.3.dist-info → siliconcompiler-0.36.4.dist-info}/RECORD +112 -107
- {siliconcompiler-0.36.3.dist-info → siliconcompiler-0.36.4.dist-info}/WHEEL +1 -1
- {siliconcompiler-0.36.3.dist-info → siliconcompiler-0.36.4.dist-info}/entry_points.txt +0 -0
- {siliconcompiler-0.36.3.dist-info → siliconcompiler-0.36.4.dist-info}/licenses/LICENSE +0 -0
- {siliconcompiler-0.36.3.dist-info → siliconcompiler-0.36.4.dist-info}/top_level.txt +0 -0
siliconcompiler/_metadata.py
CHANGED
|
@@ -92,7 +92,8 @@ class ChoiceOptional(Container):
|
|
|
92
92
|
return sorted(items)
|
|
93
93
|
|
|
94
94
|
|
|
95
|
-
def install_tool(tool: str, script: str, build_dir: str, prefix: str
|
|
95
|
+
def install_tool(tool: str, script: str, build_dir: str, prefix: str,
|
|
96
|
+
jobs: Optional[int] = None) -> bool:
|
|
96
97
|
"""
|
|
97
98
|
Prepare a build directory, configure the environment, and execute an install script for a tool.
|
|
98
99
|
|
|
@@ -102,6 +103,8 @@ def install_tool(tool: str, script: str, build_dir: str, prefix: str) -> bool:
|
|
|
102
103
|
build_dir (str): Base directory where a per-tool build directory will be created.
|
|
103
104
|
prefix (str): Installation prefix; added to PATH and used to determine whether
|
|
104
105
|
sudo is required.
|
|
106
|
+
jobs (Optional[int]): Maximum number of parallel jobs to use during build. If None,
|
|
107
|
+
defaults to the number of CPU cores.
|
|
105
108
|
|
|
106
109
|
Returns:
|
|
107
110
|
bool: `True` if the install script exited with status 0, `False` otherwise.
|
|
@@ -117,6 +120,8 @@ def install_tool(tool: str, script: str, build_dir: str, prefix: str) -> bool:
|
|
|
117
120
|
env["PATH"] = ":".join(path)
|
|
118
121
|
env["PREFIX"] = prefix
|
|
119
122
|
env["USE_SUDO_INSTALL"] = "no"
|
|
123
|
+
if jobs is not None:
|
|
124
|
+
env["NPROC"] = str(jobs)
|
|
120
125
|
try:
|
|
121
126
|
os.makedirs(prefix, exist_ok=True)
|
|
122
127
|
except PermissionError:
|
|
@@ -312,6 +317,12 @@ To install tools in a different location:
|
|
|
312
317
|
To build tools in a different location:
|
|
313
318
|
sc-install -build_dir /tmp yosys
|
|
314
319
|
|
|
320
|
+
To limit parallel build jobs (useful for memory-constrained systems):
|
|
321
|
+
sc-install yosys -jobs 4
|
|
322
|
+
|
|
323
|
+
To combine options (custom location with limited parallelism):
|
|
324
|
+
sc-install -prefix /opt/tools -jobs 8 openroad yosys
|
|
325
|
+
|
|
315
326
|
To show the install script:
|
|
316
327
|
sc-install -show openroad
|
|
317
328
|
|
|
@@ -359,6 +370,13 @@ Tool groups:
|
|
|
359
370
|
help="Directory to build the tool in",
|
|
360
371
|
metavar="<path>")
|
|
361
372
|
|
|
373
|
+
parser.add_argument(
|
|
374
|
+
"-jobs",
|
|
375
|
+
type=int,
|
|
376
|
+
default=None,
|
|
377
|
+
help="Maximum number of parallel build jobs (default: number of CPU cores)",
|
|
378
|
+
metavar="<int>")
|
|
379
|
+
|
|
362
380
|
parser.add_argument(
|
|
363
381
|
"-show",
|
|
364
382
|
action="store_true",
|
|
@@ -389,6 +407,9 @@ Tool groups:
|
|
|
389
407
|
|
|
390
408
|
tools_handled = set()
|
|
391
409
|
tools_completed = set()
|
|
410
|
+
if args.jobs is not None and args.jobs < 1:
|
|
411
|
+
print("Error: -jobs must be a positive integer", file=sys.stderr)
|
|
412
|
+
return 1
|
|
392
413
|
for tool in args.tool:
|
|
393
414
|
if tool in tools_handled:
|
|
394
415
|
continue
|
|
@@ -396,7 +417,7 @@ Tool groups:
|
|
|
396
417
|
if args.show:
|
|
397
418
|
show_tool(tool, tools[tool])
|
|
398
419
|
else:
|
|
399
|
-
if not install_tool(tool, tools[tool], args.build_dir, args.prefix):
|
|
420
|
+
if not install_tool(tool, tools[tool], args.build_dir, args.prefix, args.jobs):
|
|
400
421
|
notstarted = set(args.tool) - tools_completed - tools_handled
|
|
401
422
|
__print_summary(tools_completed, tool, notstarted)
|
|
402
423
|
return 1
|
siliconcompiler/apps/sc_issue.py
CHANGED
|
@@ -70,6 +70,10 @@ To run a testcase, use:
|
|
|
70
70
|
switchlist=switchlist)
|
|
71
71
|
|
|
72
72
|
if not issue.get("cmdarg", "run"):
|
|
73
|
+
if not issue.get("cmdarg", "cfg"):
|
|
74
|
+
issue.logger.error('-cfg must be provided')
|
|
75
|
+
return 1
|
|
76
|
+
|
|
73
77
|
project: Project = Project.from_manifest(filepath=issue.get("cmdarg", "cfg"))
|
|
74
78
|
|
|
75
79
|
# Determine abs path for build dir
|
siliconcompiler/flows/dvflow.py
CHANGED
|
@@ -1,5 +1,8 @@
|
|
|
1
1
|
from siliconcompiler.tools.icarus import compile as icarus_compile
|
|
2
|
+
from siliconcompiler.tools.icarus import cocotb_exec as icarus_cocotb
|
|
2
3
|
from siliconcompiler.tools.verilator import compile as verilator_compile
|
|
4
|
+
from siliconcompiler.tools.verilator import cocotb_compile as verilator_cocotb_compile
|
|
5
|
+
from siliconcompiler.tools.verilator import cocotb_exec as verilator_cocotb
|
|
3
6
|
from siliconcompiler.tools.execute import exec_input
|
|
4
7
|
from siliconcompiler.tools.xyce import simulate as xyce_simulate
|
|
5
8
|
from siliconcompiler.tools.xdm import convert as xdm_convert
|
|
@@ -24,7 +27,9 @@ class DVFlow(Flowgraph):
|
|
|
24
27
|
Supported tools are:
|
|
25
28
|
|
|
26
29
|
* 'icarus': Compiles and simulates with the Icarus Verilog simulator.
|
|
30
|
+
* 'icarus-cocotb': Compiles with Icarus and runs cocotb Python testbenches.
|
|
27
31
|
* 'verilator': Compiles and simulates with Verilator.
|
|
32
|
+
* 'verilator-cocotb': Compiles with Verilator and runs cocotb Python testbenches.
|
|
28
33
|
* 'xyce': Simulates a netlist with the Xyce circuit simulator.
|
|
29
34
|
* 'xdm-xyce': Converts a design to a Xyce-compatible format and simulates.
|
|
30
35
|
'''
|
|
@@ -50,10 +55,18 @@ class DVFlow(Flowgraph):
|
|
|
50
55
|
self.node("compile", icarus_compile.CompileTask())
|
|
51
56
|
sim_task = exec_input.ExecInputTask()
|
|
52
57
|
com_name = "compile"
|
|
58
|
+
elif tool == "icarus-cocotb":
|
|
59
|
+
self.node("compile", icarus_compile.CompileTask())
|
|
60
|
+
sim_task = icarus_cocotb.CocotbExecTask()
|
|
61
|
+
com_name = "compile"
|
|
53
62
|
elif tool == "verilator":
|
|
54
63
|
self.node("compile", verilator_compile.CompileTask())
|
|
55
64
|
sim_task = exec_input.ExecInputTask()
|
|
56
65
|
com_name = "compile"
|
|
66
|
+
elif tool == "verilator-cocotb":
|
|
67
|
+
self.node("compile", verilator_cocotb_compile.CocotbCompileTask())
|
|
68
|
+
sim_task = verilator_cocotb.CocotbExecTask()
|
|
69
|
+
com_name = "compile"
|
|
57
70
|
elif tool == "xyce":
|
|
58
71
|
sim_task = xyce_simulate.SimulateTask()
|
|
59
72
|
com_name = None
|
|
@@ -16,6 +16,7 @@ import logging
|
|
|
16
16
|
import os
|
|
17
17
|
import random
|
|
18
18
|
import re
|
|
19
|
+
import site
|
|
19
20
|
import shutil
|
|
20
21
|
import time
|
|
21
22
|
import threading
|
|
@@ -27,7 +28,7 @@ from typing import Optional, List, Dict, Type, Union, TYPE_CHECKING, Final
|
|
|
27
28
|
|
|
28
29
|
from fasteners import InterProcessLock
|
|
29
30
|
from importlib.metadata import distributions, distribution
|
|
30
|
-
from pathlib import Path
|
|
31
|
+
from pathlib import Path, PureWindowsPath
|
|
31
32
|
from urllib import parse as url_parse
|
|
32
33
|
|
|
33
34
|
from siliconcompiler.utils import get_plugins, default_cache_dir
|
|
@@ -647,21 +648,25 @@ class PythonPathResolver(Resolver):
|
|
|
647
648
|
return False
|
|
648
649
|
dist_name = dist_map[module_name][0]
|
|
649
650
|
|
|
650
|
-
is_editable = False
|
|
651
651
|
dist_obj = distribution(dist_name)
|
|
652
|
-
if not dist_obj
|
|
652
|
+
if not dist_obj:
|
|
653
653
|
return False
|
|
654
654
|
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
655
|
+
direct_url_content = dist_obj.read_text('direct_url.json')
|
|
656
|
+
if direct_url_content:
|
|
657
|
+
direct_url = json.loads(direct_url_content)
|
|
658
|
+
return direct_url.get('dir_info', {}).get('editable', False)
|
|
659
|
+
|
|
660
|
+
dist_loc = dist_obj.locate_file('')
|
|
661
|
+
site_paths = site.getsitepackages()
|
|
662
|
+
user_site_path = site.getusersitepackages()
|
|
663
|
+
if user_site_path:
|
|
664
|
+
site_paths.append(user_site_path)
|
|
665
|
+
if not dist_loc or not site_paths:
|
|
666
|
+
return False
|
|
663
667
|
|
|
664
|
-
|
|
668
|
+
dist_loc = PureWindowsPath(dist_loc).as_posix()
|
|
669
|
+
return dist_loc not in [PureWindowsPath(site_path).as_posix() for site_path in site_paths]
|
|
665
670
|
|
|
666
671
|
@staticmethod
|
|
667
672
|
def set_dataroot(root: "PathSchema",
|
siliconcompiler/package/git.py
CHANGED
|
@@ -151,7 +151,8 @@ class GitResolver(RemoteResolver):
|
|
|
151
151
|
for submodule in repo.submodules:
|
|
152
152
|
submodule.update(recursive=True, init=True, force=True)
|
|
153
153
|
except GitCommandError as e:
|
|
154
|
-
|
|
154
|
+
error_msg = str(e)
|
|
155
|
+
if 'Permission denied' in error_msg or 'could not read Username' in error_msg:
|
|
155
156
|
if self.urlscheme in ('ssh', 'git+ssh'):
|
|
156
157
|
raise RuntimeError('Failed to authenticate with Git. Please ensure your SSH '
|
|
157
158
|
'keys are set up correctly.')
|
|
@@ -8,6 +8,7 @@ import contextlib
|
|
|
8
8
|
import copy
|
|
9
9
|
import importlib
|
|
10
10
|
import logging
|
|
11
|
+
import pathlib
|
|
11
12
|
|
|
12
13
|
try:
|
|
13
14
|
import gzip
|
|
@@ -385,10 +386,18 @@ class BaseSchema:
|
|
|
385
386
|
fout = BaseSchema.__open_file(filepath, is_read=False)
|
|
386
387
|
|
|
387
388
|
try:
|
|
389
|
+
def default(obj: Any) -> Any:
|
|
390
|
+
if isinstance(obj, pathlib.PurePath):
|
|
391
|
+
# Cast everything to a windows path and convert to posix.
|
|
392
|
+
# https://stackoverflow.com/questions/73682260
|
|
393
|
+
return pathlib.PureWindowsPath(obj).as_posix()
|
|
394
|
+
raise TypeError
|
|
395
|
+
|
|
388
396
|
if _has_orjson:
|
|
389
|
-
manifest_str = json.dumps(self.getdict(), option=json.OPT_INDENT_2
|
|
397
|
+
manifest_str = json.dumps(self.getdict(), option=json.OPT_INDENT_2,
|
|
398
|
+
default=default).decode()
|
|
390
399
|
else:
|
|
391
|
-
manifest_str = json.dumps(self.getdict(), indent=2)
|
|
400
|
+
manifest_str = json.dumps(self.getdict(), indent=2, default=default)
|
|
392
401
|
fout.write(manifest_str)
|
|
393
402
|
finally:
|
|
394
403
|
fout.close()
|
|
@@ -143,7 +143,7 @@ class CommandLineSchema(BaseSchema):
|
|
|
143
143
|
# Grab config from argv
|
|
144
144
|
try:
|
|
145
145
|
cfg_index = sys.argv.index("-cfg", 1)
|
|
146
|
-
if cfg_index < len(sys.argv):
|
|
146
|
+
if cfg_index + 1 < len(sys.argv):
|
|
147
147
|
cfg_file = sys.argv[cfg_index + 1]
|
|
148
148
|
except ValueError:
|
|
149
149
|
pass
|
|
@@ -161,7 +161,8 @@ class CommandLineSchema(BaseSchema):
|
|
|
161
161
|
# Add commandline key for input files
|
|
162
162
|
if not isinstance(schema, CommandLineSchema):
|
|
163
163
|
raise TypeError("Schema is not a commandline class")
|
|
164
|
-
|
|
164
|
+
|
|
165
|
+
if not schema.valid("cmdarg", "input"):
|
|
165
166
|
schema._add_commandline_argument("input", "[file]", "input files", ...)
|
|
166
167
|
keyschema._add_commandline_argument("input", "[file]", "input files", ...)
|
|
167
168
|
|
|
File without changes
|
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Optional, Union, Dict
|
|
5
|
+
import xml.etree.ElementTree as ET
|
|
6
|
+
|
|
7
|
+
from siliconcompiler import Task
|
|
8
|
+
|
|
9
|
+
try:
|
|
10
|
+
import cocotb_tools.config
|
|
11
|
+
from cocotb_tools.runner import Runner
|
|
12
|
+
_has_cocotb = True
|
|
13
|
+
except ModuleNotFoundError:
|
|
14
|
+
_has_cocotb = False
|
|
15
|
+
Runner = object
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def get_cocotb_config(sim="icarus"):
|
|
19
|
+
if not _has_cocotb:
|
|
20
|
+
raise NotImplementedError("COCOTB must be installed to use get_cocotb_config")
|
|
21
|
+
|
|
22
|
+
libs_dir = cocotb_tools.config.libs_dir
|
|
23
|
+
lib_name = cocotb_tools.config.lib_name("vpi", sim)
|
|
24
|
+
share_dir = cocotb_tools.config.share_dir
|
|
25
|
+
|
|
26
|
+
return libs_dir, lib_name, share_dir
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class CocotbRunnerDummy(Runner):
|
|
30
|
+
"""
|
|
31
|
+
A minimal Runner subclass used solely to retrieve the libpython path.
|
|
32
|
+
|
|
33
|
+
This class provides access to the libpython shared library location
|
|
34
|
+
without adding ``find_libpython`` as a direct dependency. It leverages
|
|
35
|
+
cocotb's existing Runner infrastructure, which handles libpython
|
|
36
|
+
discovery internally via the ``_set_env()`` method.
|
|
37
|
+
|
|
38
|
+
The abstract methods required by the Runner base class are implemented
|
|
39
|
+
as no-ops or raise NotImplementedError, as they are not intended to be
|
|
40
|
+
called. This class should only be instantiated to call
|
|
41
|
+
``get_libpython_path()``.
|
|
42
|
+
|
|
43
|
+
Example:
|
|
44
|
+
>>> libpython = CocotbRunnerDummy().get_libpython_path()
|
|
45
|
+
>>> print(libpython)
|
|
46
|
+
/usr/lib/x86_64-linux-gnu/libpython3.10.so
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
def __init__(self):
|
|
50
|
+
if not _has_cocotb:
|
|
51
|
+
raise NotImplementedError("COCOTB must be installed to use get_cocotb_config")
|
|
52
|
+
|
|
53
|
+
super().__init__()
|
|
54
|
+
# These attributes are required by _set_env() which uses them to
|
|
55
|
+
# populate environment variables.
|
|
56
|
+
self.sim_hdl_toplevel = ""
|
|
57
|
+
self.test_module = ""
|
|
58
|
+
self.hdl_toplevel_lang = ""
|
|
59
|
+
|
|
60
|
+
def _simulator_in_path(self):
|
|
61
|
+
# No-op: This dummy class doesn't require any simulator executable.
|
|
62
|
+
pass
|
|
63
|
+
|
|
64
|
+
def _build_command(self):
|
|
65
|
+
raise NotImplementedError(
|
|
66
|
+
"CocotbRunnerDummy is not intended for building HDL sources")
|
|
67
|
+
|
|
68
|
+
def _test_command(self):
|
|
69
|
+
raise NotImplementedError(
|
|
70
|
+
"CocotbRunnerDummy is not intended for running tests")
|
|
71
|
+
|
|
72
|
+
def _get_define_options(self, defines):
|
|
73
|
+
raise NotImplementedError(
|
|
74
|
+
"CocotbRunnerDummy is not intended for HDL compilation")
|
|
75
|
+
|
|
76
|
+
def _get_include_options(self, includes):
|
|
77
|
+
raise NotImplementedError(
|
|
78
|
+
"CocotbRunnerDummy is not intended for HDL compilation")
|
|
79
|
+
|
|
80
|
+
def _get_parameter_options(self, parameters):
|
|
81
|
+
raise NotImplementedError(
|
|
82
|
+
"CocotbRunnerDummy is not intended for HDL compilation")
|
|
83
|
+
|
|
84
|
+
def get_libpython_path(self):
|
|
85
|
+
"""
|
|
86
|
+
Retrieve the path to the libpython shared library.
|
|
87
|
+
|
|
88
|
+
This method uses cocotb's ``Runner._set_env()`` which internally
|
|
89
|
+
calls ``find_libpython.find_libpython()`` to locate the library.
|
|
90
|
+
|
|
91
|
+
Returns:
|
|
92
|
+
str: Absolute path to the libpython shared library.
|
|
93
|
+
|
|
94
|
+
Raises:
|
|
95
|
+
ValueError: If libpython cannot be found.
|
|
96
|
+
"""
|
|
97
|
+
self._set_env()
|
|
98
|
+
return self.env["LIBPYTHON_LOC"]
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
class CocotbTask(Task):
|
|
102
|
+
|
|
103
|
+
def __init__(self):
|
|
104
|
+
super().__init__()
|
|
105
|
+
|
|
106
|
+
self.add_parameter("cocotb_random_seed", "int",
|
|
107
|
+
'Random seed for cocotb test reproducibility. '
|
|
108
|
+
'If not set, cocotb will generate a random seed.')
|
|
109
|
+
|
|
110
|
+
def set_cocotb_randomseed(
|
|
111
|
+
self, seed: int,
|
|
112
|
+
step: Optional[str] = None,
|
|
113
|
+
index: Optional[Union[str, int]] = None
|
|
114
|
+
):
|
|
115
|
+
"""
|
|
116
|
+
Sets the random seed for cocotb tests.
|
|
117
|
+
|
|
118
|
+
Args:
|
|
119
|
+
seed (int): The random seed value.
|
|
120
|
+
step (str, optional): The specific step to apply this configuration to.
|
|
121
|
+
index (str, optional): The specific index to apply this configuration to.
|
|
122
|
+
"""
|
|
123
|
+
self.set("var", "cocotb_random_seed", seed, step=step, index=index)
|
|
124
|
+
|
|
125
|
+
def task(self):
|
|
126
|
+
return "exec_cocotb"
|
|
127
|
+
|
|
128
|
+
def _get_test_modules(self):
|
|
129
|
+
"""
|
|
130
|
+
Get cocotb test module names from Python files in filesets.
|
|
131
|
+
|
|
132
|
+
Returns:
|
|
133
|
+
tuple: (module_names, module_dirs) where module_names is a
|
|
134
|
+
comma-separated string and module_dirs is a list of
|
|
135
|
+
directories containing the modules.
|
|
136
|
+
"""
|
|
137
|
+
module_names = []
|
|
138
|
+
module_dirs = []
|
|
139
|
+
seen_dirs = set()
|
|
140
|
+
|
|
141
|
+
for lib, fileset in self.project.get_filesets():
|
|
142
|
+
for pyfile in lib.get_file(fileset=fileset, filetype="python"):
|
|
143
|
+
path = Path(pyfile)
|
|
144
|
+
# Module name is the filename without .py extension
|
|
145
|
+
module_name = path.stem
|
|
146
|
+
module_names.append(module_name)
|
|
147
|
+
# Track the directory for PYTHONPATH
|
|
148
|
+
dir_path = str(path.parent.resolve())
|
|
149
|
+
if dir_path not in seen_dirs:
|
|
150
|
+
seen_dirs.add(dir_path)
|
|
151
|
+
module_dirs.append(dir_path)
|
|
152
|
+
|
|
153
|
+
return ",".join(module_names), module_dirs
|
|
154
|
+
|
|
155
|
+
def _get_toplevel_lang(self):
|
|
156
|
+
"""
|
|
157
|
+
Determine the HDL toplevel language from the design schema.
|
|
158
|
+
|
|
159
|
+
For Icarus Verilog, this is always "verilog" since Icarus
|
|
160
|
+
doesn't support VHDL. SystemVerilog is treated as Verilog
|
|
161
|
+
for cocotb's TOPLEVEL_LANG.
|
|
162
|
+
|
|
163
|
+
Returns:
|
|
164
|
+
str: The toplevel language ("verilog").
|
|
165
|
+
"""
|
|
166
|
+
# Icarus only supports Verilog/SystemVerilog, not VHDL
|
|
167
|
+
# cocotb uses "verilog" for both Verilog and SystemVerilog
|
|
168
|
+
return "verilog"
|
|
169
|
+
|
|
170
|
+
def __setup_cocotb_environment(self):
|
|
171
|
+
"""
|
|
172
|
+
Set up all required environment variables for cocotb execution.
|
|
173
|
+
"""
|
|
174
|
+
|
|
175
|
+
test_modules, _ = self._get_test_modules()
|
|
176
|
+
libpython_path = CocotbRunnerDummy().get_libpython_path()
|
|
177
|
+
|
|
178
|
+
# LIBPYTHON_LOC: path to libpython shared library
|
|
179
|
+
self.set_environmentalvariable("LIBPYTHON_LOC", libpython_path)
|
|
180
|
+
self.add_required_key("env", "LIBPYTHON_LOC")
|
|
181
|
+
|
|
182
|
+
# COCOTB_TOPLEVEL: the HDL toplevel module name
|
|
183
|
+
self.set_environmentalvariable("COCOTB_TOPLEVEL", self.design_topmodule)
|
|
184
|
+
self.add_required_key("env", "COCOTB_TOPLEVEL")
|
|
185
|
+
|
|
186
|
+
# COCOTB_TEST_MODULES: comma-separated list of Python test modules
|
|
187
|
+
self.set_environmentalvariable("COCOTB_TEST_MODULES", test_modules)
|
|
188
|
+
self.add_required_key("env", "COCOTB_TEST_MODULES")
|
|
189
|
+
|
|
190
|
+
# TOPLEVEL_LANG: HDL language of the toplevel
|
|
191
|
+
self.set_environmentalvariable("TOPLEVEL_LANG", self._get_toplevel_lang())
|
|
192
|
+
self.add_required_key("env", "TOPLEVEL_LANG")
|
|
193
|
+
|
|
194
|
+
# COCOTB_RESULTS_FILE: path to xUnit XML results
|
|
195
|
+
self.set_environmentalvariable("COCOTB_RESULTS_FILE", "outputs/results.xml")
|
|
196
|
+
self.add_required_key("env", "COCOTB_RESULTS_FILE")
|
|
197
|
+
|
|
198
|
+
# COCOTB_RANDOM_SEED: optional random seed for reproducibility
|
|
199
|
+
random_seed = self.get("var", "cocotb_random_seed")
|
|
200
|
+
if random_seed is not None:
|
|
201
|
+
self.set_environmentalvariable("COCOTB_RANDOM_SEED", str(random_seed))
|
|
202
|
+
self.add_required_key("env", "COCOTB_RANDOM_SEED")
|
|
203
|
+
|
|
204
|
+
def setup(self):
|
|
205
|
+
super().setup()
|
|
206
|
+
|
|
207
|
+
if not _has_cocotb:
|
|
208
|
+
raise RuntimeError("Cocotb is not installed; cannot run test.")
|
|
209
|
+
|
|
210
|
+
# Output: xUnit XML results file
|
|
211
|
+
self.add_output_file(file="results.xml")
|
|
212
|
+
|
|
213
|
+
self.add_required_key("option", "design")
|
|
214
|
+
self.add_required_key("option", "fileset")
|
|
215
|
+
if self.project.get("option", "alias"):
|
|
216
|
+
self.add_required_key("option", "alias")
|
|
217
|
+
|
|
218
|
+
# Require Python test modules
|
|
219
|
+
for lib, fileset in self.project.get_filesets():
|
|
220
|
+
if lib.has_file(fileset=fileset, filetype="python"):
|
|
221
|
+
self.add_required_key(lib, "fileset", fileset, "file", "python")
|
|
222
|
+
|
|
223
|
+
if self.get("var", "cocotb_random_seed") is not None:
|
|
224
|
+
self.add_required_key("var", "cocotb_random_seed")
|
|
225
|
+
|
|
226
|
+
# Set up cocotb environment variables
|
|
227
|
+
self.__setup_cocotb_environment()
|
|
228
|
+
|
|
229
|
+
def get_runtime_environmental_variables(self, include_path: bool = True) -> Dict[str, str]:
|
|
230
|
+
envs = super().get_runtime_environmental_variables(include_path)
|
|
231
|
+
|
|
232
|
+
_, module_dirs = self._get_test_modules()
|
|
233
|
+
libs_dir = cocotb_tools.config.libs_dir
|
|
234
|
+
|
|
235
|
+
# PATH: add cocotb libs directory
|
|
236
|
+
current_path = os.environ.get("PATH", "")
|
|
237
|
+
new_path = f"{libs_dir}{os.pathsep}{current_path}"
|
|
238
|
+
envs["PATH"] = new_path
|
|
239
|
+
|
|
240
|
+
# PYTHONPATH: add directories containing test modules
|
|
241
|
+
current_pythonpath = os.environ.get("PYTHONPATH", "")
|
|
242
|
+
pythonpath_parts = module_dirs + ([current_pythonpath] if current_pythonpath else [])
|
|
243
|
+
envs["PYTHONPATH"] = os.pathsep.join(pythonpath_parts)
|
|
244
|
+
|
|
245
|
+
# PYGPI_PYTHON_BIN: Python executable
|
|
246
|
+
envs["PYGPI_PYTHON_BIN"] = sys.executable
|
|
247
|
+
|
|
248
|
+
return envs
|
|
249
|
+
|
|
250
|
+
def _parse_cocotb_results(self, results_file: Path):
|
|
251
|
+
"""
|
|
252
|
+
Parse the cocotb xUnit XML results file and extract metrics.
|
|
253
|
+
|
|
254
|
+
Args:
|
|
255
|
+
results_file: Path to the results.xml file.
|
|
256
|
+
"""
|
|
257
|
+
try:
|
|
258
|
+
tree = ET.parse(results_file)
|
|
259
|
+
root = tree.getroot()
|
|
260
|
+
|
|
261
|
+
# Count testcases and failures/errors
|
|
262
|
+
testcases = root.findall(".//testcase")
|
|
263
|
+
tests = len(testcases)
|
|
264
|
+
failures = len(root.findall(".//failure"))
|
|
265
|
+
errors = len(root.findall(".//error"))
|
|
266
|
+
|
|
267
|
+
passed = tests - failures - errors
|
|
268
|
+
|
|
269
|
+
self.logger.info(f"Cocotb results: {passed}/{tests} tests passed")
|
|
270
|
+
if failures > 0:
|
|
271
|
+
self.logger.warning(f"Cocotb: {failures} test(s) failed")
|
|
272
|
+
if errors > 0:
|
|
273
|
+
self.logger.warning(f"Cocotb: {errors} test(s) had errors")
|
|
274
|
+
|
|
275
|
+
self.record_metric("errors", errors + failures, source_file=results_file)
|
|
276
|
+
|
|
277
|
+
except Exception as e:
|
|
278
|
+
self.logger.warning(f"Failed to parse cocotb results: {e}")
|
|
279
|
+
|
|
280
|
+
def post_process(self):
|
|
281
|
+
super().post_process()
|
|
282
|
+
|
|
283
|
+
# Parse cocotb results XML and report metrics
|
|
284
|
+
results_file = Path("outputs/results.xml")
|
|
285
|
+
if results_file.exists():
|
|
286
|
+
self._parse_cocotb_results(results_file)
|
|
@@ -1,5 +1,10 @@
|
|
|
1
|
+
from typing import TYPE_CHECKING
|
|
2
|
+
|
|
1
3
|
from siliconcompiler.tools.builtin import BuiltinTask
|
|
2
4
|
|
|
5
|
+
if TYPE_CHECKING:
|
|
6
|
+
from siliconcompiler import Flowgraph
|
|
7
|
+
|
|
3
8
|
|
|
4
9
|
class Wait(BuiltinTask):
|
|
5
10
|
'''
|
|
@@ -14,3 +19,134 @@ class Wait(BuiltinTask):
|
|
|
14
19
|
|
|
15
20
|
def task(self):
|
|
16
21
|
return "wait"
|
|
22
|
+
|
|
23
|
+
@staticmethod
|
|
24
|
+
def __has_path(flowgraph: "Flowgraph", source: tuple, target: tuple) -> bool:
|
|
25
|
+
'''
|
|
26
|
+
Helper method to check if there's any path from source to target node.
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
flowgraph: The flowgraph to search.
|
|
30
|
+
source: Tuple of (step, index) for the source node.
|
|
31
|
+
target: Tuple of (step, index) for the target node.
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
bool: True if there's a path from source to target, False otherwise.
|
|
35
|
+
'''
|
|
36
|
+
if source == target:
|
|
37
|
+
return True
|
|
38
|
+
|
|
39
|
+
visited = set()
|
|
40
|
+
to_visit = [source]
|
|
41
|
+
|
|
42
|
+
while to_visit:
|
|
43
|
+
current = to_visit.pop(0)
|
|
44
|
+
if current == target:
|
|
45
|
+
return True
|
|
46
|
+
if current in visited:
|
|
47
|
+
continue
|
|
48
|
+
visited.add(current)
|
|
49
|
+
|
|
50
|
+
# Add all output nodes to the queue
|
|
51
|
+
outputs = flowgraph.get_node_outputs(current[0], current[1])
|
|
52
|
+
to_visit.extend(outputs)
|
|
53
|
+
|
|
54
|
+
return False
|
|
55
|
+
|
|
56
|
+
@staticmethod
|
|
57
|
+
def serialize_tool_tasks(flowgraph: "Flowgraph", tool_name: str) -> None:
|
|
58
|
+
'''
|
|
59
|
+
Adds wait tasks between nodes of the same tool that could execute in parallel.
|
|
60
|
+
|
|
61
|
+
This method inserts `Wait` task nodes between nodes that use the same
|
|
62
|
+
tool, but only when those nodes could execute in parallel (i.e., there's
|
|
63
|
+
no dependency path between them). This ensures that tool instances don't
|
|
64
|
+
execute in parallel, while avoiding unnecessary wait tasks for nodes
|
|
65
|
+
that already have a dependency relationship.
|
|
66
|
+
|
|
67
|
+
The method modifies the flowgraph in-place by:
|
|
68
|
+
1. Finding all nodes that use the specified tool
|
|
69
|
+
2. For each pair of tool nodes with no dependency path, inserting a wait task
|
|
70
|
+
3. Using naming convention: {target_step}.wait
|
|
71
|
+
(named after the node being delayed)
|
|
72
|
+
|
|
73
|
+
Args:
|
|
74
|
+
flowgraph: The flowgraph to modify.
|
|
75
|
+
tool_name (str): The name of the tool (e.g., 'openroad', 'yosys').
|
|
76
|
+
All nodes using this tool will be serialized.
|
|
77
|
+
|
|
78
|
+
Raises:
|
|
79
|
+
ValueError: If the flowgraph is invalid or tool_name is empty.
|
|
80
|
+
|
|
81
|
+
Example:
|
|
82
|
+
>>> flow = Flowgraph("myflow")
|
|
83
|
+
>>> flow.node("place1", openroad.Place, index=0)
|
|
84
|
+
>>> flow.node("syn", yosys.Syn, index=0)
|
|
85
|
+
>>> flow.node("place2", openroad.Place, index=1)
|
|
86
|
+
>>> flow.edge("place1", "syn")
|
|
87
|
+
>>> flow.edge("syn", "place2")
|
|
88
|
+
>>> Wait.serialize_tool_tasks(flow, "openroad")
|
|
89
|
+
>>> # No wait task added - place1 and place2 already have a dependency path
|
|
90
|
+
'''
|
|
91
|
+
if not tool_name or not isinstance(tool_name, str):
|
|
92
|
+
raise ValueError(f"tool_name must be a non-empty string, not {tool_name}")
|
|
93
|
+
|
|
94
|
+
if not flowgraph.validate():
|
|
95
|
+
raise ValueError(f"Flowgraph '{flowgraph.name}' is invalid before serialization")
|
|
96
|
+
|
|
97
|
+
# Find all nodes using the specified tool
|
|
98
|
+
tool_nodes = []
|
|
99
|
+
for step, index in flowgraph.get_nodes():
|
|
100
|
+
graph_node = flowgraph.get_graph_node(step, index)
|
|
101
|
+
if graph_node.get_tool() == tool_name:
|
|
102
|
+
tool_nodes.append((step, index))
|
|
103
|
+
|
|
104
|
+
if not tool_nodes or len(tool_nodes) < 2:
|
|
105
|
+
# Need at least 2 nodes to create wait tasks
|
|
106
|
+
return
|
|
107
|
+
|
|
108
|
+
# Sort nodes by execution order to establish a consistent serialization
|
|
109
|
+
execution_order = flowgraph.get_execution_order()
|
|
110
|
+
node_order_map = {}
|
|
111
|
+
for level_idx, level in enumerate(execution_order):
|
|
112
|
+
for node_idx, node in enumerate(level):
|
|
113
|
+
node_order_map[node] = (level_idx, node_idx)
|
|
114
|
+
|
|
115
|
+
sorted_tool_nodes = sorted(
|
|
116
|
+
tool_nodes, key=lambda n: node_order_map.get(n, (float('inf'), 0)))
|
|
117
|
+
|
|
118
|
+
# Check if there are pairs without dependency paths that need serialization
|
|
119
|
+
needs_serialization = False
|
|
120
|
+
for i in range(len(sorted_tool_nodes)):
|
|
121
|
+
for j in range(i + 1, len(sorted_tool_nodes)):
|
|
122
|
+
node1 = sorted_tool_nodes[i]
|
|
123
|
+
node2 = sorted_tool_nodes[j]
|
|
124
|
+
if not Wait.__has_path(flowgraph, node1, node2):
|
|
125
|
+
needs_serialization = True
|
|
126
|
+
break
|
|
127
|
+
if needs_serialization:
|
|
128
|
+
break
|
|
129
|
+
|
|
130
|
+
if not needs_serialization:
|
|
131
|
+
# All tool nodes already have dependency paths, no wait nodes needed
|
|
132
|
+
return
|
|
133
|
+
|
|
134
|
+
# Create wait nodes between each pair of consecutive tool nodes
|
|
135
|
+
# Chain: tool[0] -> wait_1 -> tool[1] -> wait_2 -> tool[2] -> ...
|
|
136
|
+
for i in range(len(sorted_tool_nodes) - 1):
|
|
137
|
+
curr_node = sorted_tool_nodes[i]
|
|
138
|
+
next_node = sorted_tool_nodes[i + 1]
|
|
139
|
+
|
|
140
|
+
# Check if there's already a dependency path between them
|
|
141
|
+
if Wait.__has_path(flowgraph, curr_node, next_node):
|
|
142
|
+
# Already serialized, skip
|
|
143
|
+
continue
|
|
144
|
+
|
|
145
|
+
# Create wait node named after the next node (the one being delayed)
|
|
146
|
+
wait_step = f"{next_node[0]}.wait"
|
|
147
|
+
wait_index = next_node[1]
|
|
148
|
+
flowgraph.node(wait_step, Wait(), index=wait_index)
|
|
149
|
+
|
|
150
|
+
# Connect: curr_node -> wait_node -> next_node
|
|
151
|
+
flowgraph.edge(curr_node[0], wait_step, tail_index=curr_node[1], head_index=wait_index)
|
|
152
|
+
flowgraph.edge(wait_step, next_node[0], tail_index=wait_index, head_index=next_node[1])
|