siliconcompiler 0.34.0__py3-none-any.whl → 0.34.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- siliconcompiler/_metadata.py +1 -1
- siliconcompiler/apps/_common.py +1 -1
- siliconcompiler/apps/sc.py +1 -1
- siliconcompiler/apps/sc_issue.py +1 -1
- siliconcompiler/apps/sc_remote.py +3 -3
- siliconcompiler/apps/sc_show.py +2 -2
- siliconcompiler/apps/utils/replay.py +4 -4
- siliconcompiler/checklist.py +203 -2
- siliconcompiler/core.py +28 -246
- siliconcompiler/data/templates/email/general.j2 +3 -3
- siliconcompiler/data/templates/email/summary.j2 +1 -1
- siliconcompiler/data/templates/issue/README.txt +1 -1
- siliconcompiler/data/templates/report/sc_report.j2 +7 -7
- siliconcompiler/design.py +148 -54
- siliconcompiler/flowgraph.py +50 -15
- siliconcompiler/optimizer/vizier.py +2 -2
- siliconcompiler/pdk.py +5 -5
- siliconcompiler/remote/client.py +18 -12
- siliconcompiler/remote/server.py +2 -2
- siliconcompiler/report/dashboard/cli/__init__.py +6 -6
- siliconcompiler/report/dashboard/cli/board.py +3 -3
- siliconcompiler/report/dashboard/web/components/__init__.py +5 -5
- siliconcompiler/report/dashboard/web/components/flowgraph.py +4 -4
- siliconcompiler/report/dashboard/web/components/graph.py +2 -2
- siliconcompiler/report/dashboard/web/state.py +1 -1
- siliconcompiler/report/dashboard/web/utils/__init__.py +5 -5
- siliconcompiler/report/html_report.py +1 -1
- siliconcompiler/report/report.py +4 -4
- siliconcompiler/report/summary_table.py +2 -2
- siliconcompiler/report/utils.py +5 -5
- siliconcompiler/scheduler/docker.py +3 -8
- siliconcompiler/scheduler/run_node.py +2 -7
- siliconcompiler/scheduler/scheduler.py +14 -11
- siliconcompiler/scheduler/schedulernode.py +136 -126
- siliconcompiler/scheduler/send_messages.py +3 -3
- siliconcompiler/scheduler/slurm.py +5 -3
- siliconcompiler/scheduler/taskscheduler.py +8 -7
- siliconcompiler/schema/baseschema.py +1 -2
- siliconcompiler/schema/namedschema.py +26 -2
- siliconcompiler/tool.py +398 -175
- siliconcompiler/tools/__init__.py +2 -0
- siliconcompiler/tools/builtin/_common.py +5 -5
- siliconcompiler/tools/builtin/concatenate.py +5 -5
- siliconcompiler/tools/builtin/minimum.py +4 -4
- siliconcompiler/tools/builtin/mux.py +4 -4
- siliconcompiler/tools/builtin/nop.py +4 -4
- siliconcompiler/tools/builtin/verify.py +7 -7
- siliconcompiler/tools/execute/exec_input.py +1 -1
- siliconcompiler/tools/genfasm/genfasm.py +1 -6
- siliconcompiler/tools/openroad/_apr.py +5 -1
- siliconcompiler/tools/openroad/antenna_repair.py +1 -1
- siliconcompiler/tools/openroad/macro_placement.py +1 -1
- siliconcompiler/tools/openroad/power_grid.py +1 -1
- siliconcompiler/tools/openroad/scripts/common/procs.tcl +5 -0
- siliconcompiler/tools/opensta/timing.py +26 -3
- siliconcompiler/tools/slang/__init__.py +2 -2
- siliconcompiler/tools/surfer/__init__.py +0 -0
- siliconcompiler/tools/surfer/show.py +53 -0
- siliconcompiler/tools/surfer/surfer.py +30 -0
- siliconcompiler/tools/vpr/route.py +27 -14
- siliconcompiler/tools/vpr/vpr.py +23 -6
- siliconcompiler/tools/yosys/__init__.py +1 -1
- siliconcompiler/tools/yosys/scripts/procs.tcl +143 -0
- siliconcompiler/tools/yosys/{sc_synth_asic.tcl → scripts/sc_synth_asic.tcl} +4 -0
- siliconcompiler/tools/yosys/{sc_synth_fpga.tcl → scripts/sc_synth_fpga.tcl} +24 -77
- siliconcompiler/tools/yosys/syn_fpga.py +14 -0
- siliconcompiler/toolscripts/_tools.json +8 -12
- siliconcompiler/toolscripts/rhel9/install-vpr.sh +0 -2
- siliconcompiler/toolscripts/ubuntu22/install-surfer.sh +33 -0
- siliconcompiler/toolscripts/ubuntu24/install-surfer.sh +33 -0
- siliconcompiler/utils/__init__.py +2 -1
- siliconcompiler/utils/flowgraph.py +24 -23
- siliconcompiler/utils/issue.py +23 -29
- siliconcompiler/utils/logging.py +35 -6
- siliconcompiler/utils/showtools.py +6 -1
- {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.1.dist-info}/METADATA +15 -25
- {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.1.dist-info}/RECORD +84 -82
- siliconcompiler/tools/yosys/procs.tcl +0 -71
- siliconcompiler/toolscripts/rhel9/install-yosys-parmys.sh +0 -68
- siliconcompiler/toolscripts/ubuntu22/install-yosys-parmys.sh +0 -68
- siliconcompiler/toolscripts/ubuntu24/install-yosys-parmys.sh +0 -68
- /siliconcompiler/tools/yosys/{sc_lec.tcl → scripts/sc_lec.tcl} +0 -0
- /siliconcompiler/tools/yosys/{sc_screenshot.tcl → scripts/sc_screenshot.tcl} +0 -0
- /siliconcompiler/tools/yosys/{syn_strategies.tcl → scripts/syn_strategies.tcl} +0 -0
- {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.1.dist-info}/WHEEL +0 -0
- {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.1.dist-info}/entry_points.txt +0 -0
- {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.1.dist-info}/licenses/LICENSE +0 -0
- {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.1.dist-info}/top_level.txt +0 -0
siliconcompiler/design.py
CHANGED
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
import contextlib
|
|
2
2
|
import re
|
|
3
3
|
|
|
4
|
+
import os.path
|
|
5
|
+
|
|
4
6
|
from pathlib import Path
|
|
5
7
|
from typing import List
|
|
6
8
|
|
|
7
9
|
from siliconcompiler import utils
|
|
8
|
-
from siliconcompiler import SiliconCompilerError
|
|
9
10
|
|
|
10
11
|
from siliconcompiler.dependencyschema import DependencySchema
|
|
11
12
|
from siliconcompiler.schema import NamedSchema
|
|
@@ -16,9 +17,9 @@ from siliconcompiler.schema.utils import trim
|
|
|
16
17
|
###########################################################################
|
|
17
18
|
class DesignSchema(NamedSchema, DependencySchema):
|
|
18
19
|
|
|
19
|
-
def __init__(self, name: str):
|
|
20
|
-
|
|
21
|
-
|
|
20
|
+
def __init__(self, name: str = None):
|
|
21
|
+
super().__init__()
|
|
22
|
+
self.set_name(name)
|
|
22
23
|
|
|
23
24
|
schema_design(self)
|
|
24
25
|
|
|
@@ -38,9 +39,8 @@ class DesignSchema(NamedSchema, DependencySchema):
|
|
|
38
39
|
str: Topmodule name
|
|
39
40
|
|
|
40
41
|
Notes:
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
42
|
+
- first character must be letter or underscore
|
|
43
|
+
- remaining characters can be letters, digits, or underscores
|
|
44
44
|
"""
|
|
45
45
|
|
|
46
46
|
# topmodule safety check
|
|
@@ -58,7 +58,6 @@ class DesignSchema(NamedSchema, DependencySchema):
|
|
|
58
58
|
|
|
59
59
|
Returns:
|
|
60
60
|
str: Topmodule name
|
|
61
|
-
|
|
62
61
|
"""
|
|
63
62
|
return self.__get(fileset, 'topmodule')
|
|
64
63
|
|
|
@@ -78,9 +77,9 @@ class DesignSchema(NamedSchema, DependencySchema):
|
|
|
78
77
|
|
|
79
78
|
Returns:
|
|
80
79
|
list[str]: List of include directories
|
|
81
|
-
|
|
82
80
|
"""
|
|
83
|
-
return self.__set_add(fileset, 'idir', value, clobber, typelist=[str, list]
|
|
81
|
+
return self.__set_add(fileset, 'idir', value, clobber, typelist=[str, list],
|
|
82
|
+
package=package)
|
|
84
83
|
|
|
85
84
|
def get_idir(self, fileset: str = None) -> List[str]:
|
|
86
85
|
"""Returns include directories for a fileset.
|
|
@@ -90,7 +89,6 @@ class DesignSchema(NamedSchema, DependencySchema):
|
|
|
90
89
|
|
|
91
90
|
Returns:
|
|
92
91
|
list[str]: List of include directories
|
|
93
|
-
|
|
94
92
|
"""
|
|
95
93
|
return self.__get(fileset, 'idir')
|
|
96
94
|
|
|
@@ -120,7 +118,6 @@ class DesignSchema(NamedSchema, DependencySchema):
|
|
|
120
118
|
|
|
121
119
|
Returns:
|
|
122
120
|
list[str]: List of macro definitions
|
|
123
|
-
|
|
124
121
|
"""
|
|
125
122
|
return self.__get(fileset, 'define')
|
|
126
123
|
|
|
@@ -138,7 +135,6 @@ class DesignSchema(NamedSchema, DependencySchema):
|
|
|
138
135
|
|
|
139
136
|
Returns:
|
|
140
137
|
list[str]: List of macro (un)definitions
|
|
141
|
-
|
|
142
138
|
"""
|
|
143
139
|
return self.__set_add(fileset, 'undefine', value, clobber, typelist=[str, list])
|
|
144
140
|
|
|
@@ -170,9 +166,9 @@ class DesignSchema(NamedSchema, DependencySchema):
|
|
|
170
166
|
|
|
171
167
|
Returns:
|
|
172
168
|
list[str]: List of library directories.
|
|
173
|
-
|
|
174
169
|
"""
|
|
175
|
-
return self.__set_add(fileset, 'libdir', value, clobber, typelist=[str, list]
|
|
170
|
+
return self.__set_add(fileset, 'libdir', value, clobber, typelist=[str, list],
|
|
171
|
+
package=package)
|
|
176
172
|
|
|
177
173
|
def get_libdir(self, fileset: str = None) -> List[str]:
|
|
178
174
|
"""Returns dynamic library directories for a fileset.
|
|
@@ -182,7 +178,6 @@ class DesignSchema(NamedSchema, DependencySchema):
|
|
|
182
178
|
|
|
183
179
|
Returns:
|
|
184
180
|
list[str]: List of library directories.
|
|
185
|
-
|
|
186
181
|
"""
|
|
187
182
|
return self.__get(fileset, 'libdir')
|
|
188
183
|
|
|
@@ -200,7 +195,6 @@ class DesignSchema(NamedSchema, DependencySchema):
|
|
|
200
195
|
|
|
201
196
|
Returns:
|
|
202
197
|
list[str]: List of libraries.
|
|
203
|
-
|
|
204
198
|
"""
|
|
205
199
|
return self.__set_add(fileset, 'lib', value, clobber, typelist=[str, list])
|
|
206
200
|
|
|
@@ -212,7 +206,6 @@ class DesignSchema(NamedSchema, DependencySchema):
|
|
|
212
206
|
|
|
213
207
|
Returns:
|
|
214
208
|
list[str]: List of libraries.
|
|
215
|
-
|
|
216
209
|
"""
|
|
217
210
|
return self.__get(fileset, 'lib')
|
|
218
211
|
|
|
@@ -230,7 +223,6 @@ class DesignSchema(NamedSchema, DependencySchema):
|
|
|
230
223
|
|
|
231
224
|
Returns:
|
|
232
225
|
str: Parameter value
|
|
233
|
-
|
|
234
226
|
"""
|
|
235
227
|
|
|
236
228
|
if fileset is None:
|
|
@@ -273,8 +265,6 @@ class DesignSchema(NamedSchema, DependencySchema):
|
|
|
273
265
|
"""
|
|
274
266
|
Adds files to a fileset.
|
|
275
267
|
|
|
276
|
-
|
|
277
|
-
|
|
278
268
|
.v → (source, verilog)
|
|
279
269
|
.vhd → (source, vhdl)
|
|
280
270
|
.sdc → (constraint, sdc)
|
|
@@ -300,9 +290,7 @@ class DesignSchema(NamedSchema, DependencySchema):
|
|
|
300
290
|
- This method normalizes `filename` to a string for consistency.
|
|
301
291
|
|
|
302
292
|
- If no filetype is specified, filetype is inferred based on
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
293
|
+
the file extension via a mapping table. (eg. .v is verilog).
|
|
306
294
|
"""
|
|
307
295
|
|
|
308
296
|
if fileset is None:
|
|
@@ -337,15 +325,24 @@ class DesignSchema(NamedSchema, DependencySchema):
|
|
|
337
325
|
|
|
338
326
|
# final error checking
|
|
339
327
|
if not fileset or not filetype:
|
|
340
|
-
raise
|
|
328
|
+
raise ValueError(
|
|
341
329
|
f'Unable to infer fileset and/or filetype for '
|
|
342
330
|
f'{filename} based on file extension.')
|
|
343
331
|
|
|
344
332
|
# adding files to dictionary
|
|
345
333
|
if clobber:
|
|
346
|
-
|
|
334
|
+
params = self.set('fileset', fileset, 'file', filetype, filename)
|
|
347
335
|
else:
|
|
348
|
-
|
|
336
|
+
params = self.add('fileset', fileset, 'file', filetype, filename)
|
|
337
|
+
|
|
338
|
+
if package and params:
|
|
339
|
+
if not isinstance(params, (list, set, tuple)):
|
|
340
|
+
params = [params]
|
|
341
|
+
|
|
342
|
+
for param in params:
|
|
343
|
+
param.set(package, field="package")
|
|
344
|
+
|
|
345
|
+
return params
|
|
349
346
|
|
|
350
347
|
###############################################
|
|
351
348
|
def get_file(self,
|
|
@@ -383,6 +380,42 @@ class DesignSchema(NamedSchema, DependencySchema):
|
|
|
383
380
|
|
|
384
381
|
return filelist
|
|
385
382
|
|
|
383
|
+
def __write_flist(self, filename: str, filesets: list):
|
|
384
|
+
written_cmd = set()
|
|
385
|
+
|
|
386
|
+
with open(filename, "w") as f:
|
|
387
|
+
def write(cmd):
|
|
388
|
+
if cmd in written_cmd:
|
|
389
|
+
f.write(f"// {cmd}\n")
|
|
390
|
+
else:
|
|
391
|
+
written_cmd.add(cmd)
|
|
392
|
+
f.write(f"{cmd}\n")
|
|
393
|
+
|
|
394
|
+
def write_header(header):
|
|
395
|
+
f.write(f"// {header}\n")
|
|
396
|
+
|
|
397
|
+
for lib in [self, *self.get_dep()]:
|
|
398
|
+
write_header(f"{lib.name()}")
|
|
399
|
+
for fileset in filesets:
|
|
400
|
+
if not lib.valid('fileset', fileset):
|
|
401
|
+
continue
|
|
402
|
+
|
|
403
|
+
if lib.get('fileset', fileset, 'idir'):
|
|
404
|
+
write_header(f"{lib.name()} / {fileset} / include directories")
|
|
405
|
+
for idir in lib.find_files('fileset', fileset, 'idir'):
|
|
406
|
+
write(f"+incdir+{idir}")
|
|
407
|
+
|
|
408
|
+
if lib.get('fileset', fileset, 'define'):
|
|
409
|
+
write_header(f"{lib.name()} / {fileset} / defines")
|
|
410
|
+
for define in lib.get('fileset', fileset, 'define'):
|
|
411
|
+
write(f"+define+{define}")
|
|
412
|
+
|
|
413
|
+
for filetype in lib.getkeys('fileset', fileset, 'file'):
|
|
414
|
+
if lib.get('fileset', fileset, 'file', filetype):
|
|
415
|
+
write_header(f"{lib.name()} / {fileset} / {filetype} files")
|
|
416
|
+
for file in lib.find_files('fileset', fileset, 'file', filetype):
|
|
417
|
+
write(file)
|
|
418
|
+
|
|
386
419
|
###############################################
|
|
387
420
|
def write_fileset(self,
|
|
388
421
|
filename: str,
|
|
@@ -392,14 +425,12 @@ class DesignSchema(NamedSchema, DependencySchema):
|
|
|
392
425
|
|
|
393
426
|
Currently supports Verilog `flist` format only.
|
|
394
427
|
Intended to support other formats in the future.
|
|
428
|
+
Inferred from file extension if not given.
|
|
395
429
|
|
|
396
430
|
Args:
|
|
397
431
|
filename (str or Path): Output file name.
|
|
398
432
|
fileset (str or list[str]): Fileset(s) to export.
|
|
399
433
|
fileformat (str, optional): Export format.
|
|
400
|
-
|
|
401
|
-
Inferred from file extension if not given.
|
|
402
|
-
|
|
403
434
|
"""
|
|
404
435
|
|
|
405
436
|
if filename is None:
|
|
@@ -411,6 +442,10 @@ class DesignSchema(NamedSchema, DependencySchema):
|
|
|
411
442
|
if not isinstance(fileset, list):
|
|
412
443
|
fileset = [fileset]
|
|
413
444
|
|
|
445
|
+
for fset in fileset:
|
|
446
|
+
if not isinstance(fset, str):
|
|
447
|
+
raise ValueError("fileset key must be a string")
|
|
448
|
+
|
|
414
449
|
# file extension lookup
|
|
415
450
|
if not fileformat:
|
|
416
451
|
formats = {}
|
|
@@ -418,28 +453,79 @@ class DesignSchema(NamedSchema, DependencySchema):
|
|
|
418
453
|
fileformat = formats[Path(filename).suffix.strip('.')]
|
|
419
454
|
|
|
420
455
|
if fileformat == "flist":
|
|
421
|
-
|
|
422
|
-
# TODO: add source info for comments to flist.
|
|
423
|
-
with open(filename, "w") as f:
|
|
424
|
-
for i in fileset:
|
|
425
|
-
if not isinstance(i, str):
|
|
426
|
-
raise ValueError("fileset key must be a string")
|
|
427
|
-
for j in ['idir', 'define', 'file']:
|
|
428
|
-
if j == 'idir':
|
|
429
|
-
vals = self.get('fileset', i, 'idir')
|
|
430
|
-
cmd = "+incdir+"
|
|
431
|
-
elif j == 'define':
|
|
432
|
-
vals = self.get('fileset', i, 'define')
|
|
433
|
-
cmd = "+define+"
|
|
434
|
-
else:
|
|
435
|
-
vals = self.get('fileset', i, 'file', 'verilog')
|
|
436
|
-
cmd = ""
|
|
437
|
-
if vals:
|
|
438
|
-
for item in vals:
|
|
439
|
-
f.write(f"{cmd}{item}\n")
|
|
456
|
+
self.__write_flist(filename, fileset)
|
|
440
457
|
else:
|
|
441
458
|
raise ValueError(f"{fileformat} is not supported")
|
|
442
459
|
|
|
460
|
+
def __read_flist(self, filename: str, fileset: str):
|
|
461
|
+
# Extract information
|
|
462
|
+
rel_path = os.path.dirname(os.path.abspath(filename))
|
|
463
|
+
|
|
464
|
+
def expand_path(path):
|
|
465
|
+
path = os.path.expandvars(path)
|
|
466
|
+
path = os.path.expanduser(path)
|
|
467
|
+
if os.path.isabs(path):
|
|
468
|
+
return path
|
|
469
|
+
return os.path.join(rel_path, path)
|
|
470
|
+
|
|
471
|
+
include_dirs = []
|
|
472
|
+
defines = []
|
|
473
|
+
files = []
|
|
474
|
+
with utils.sc_open(filename) as f:
|
|
475
|
+
for line in f:
|
|
476
|
+
line = line.strip()
|
|
477
|
+
if not line:
|
|
478
|
+
continue
|
|
479
|
+
if line.startswith("//"):
|
|
480
|
+
continue
|
|
481
|
+
if line.startswith("+incdir+"):
|
|
482
|
+
include_dirs.append(expand_path(line[8:]))
|
|
483
|
+
elif line.startswith("+define+"):
|
|
484
|
+
defines.append(os.path.expandvars(line[8:]))
|
|
485
|
+
else:
|
|
486
|
+
files.append(expand_path(line))
|
|
487
|
+
|
|
488
|
+
# Create packages
|
|
489
|
+
all_paths = include_dirs + [os.path.dirname(f) for f in files]
|
|
490
|
+
all_paths = sorted(set(all_paths))
|
|
491
|
+
|
|
492
|
+
package_root_name = f'flist-{self.name()}-{fileset}-{os.path.basename(filename)}'
|
|
493
|
+
packages = {}
|
|
494
|
+
|
|
495
|
+
for path_dir in all_paths:
|
|
496
|
+
found = False
|
|
497
|
+
for pdir in packages:
|
|
498
|
+
if path_dir.startswith(pdir):
|
|
499
|
+
found = True
|
|
500
|
+
break
|
|
501
|
+
if not found:
|
|
502
|
+
package_name = f"{package_root_name}-{len(packages)}"
|
|
503
|
+
self.register_package(package_name, path_dir)
|
|
504
|
+
packages[path_dir] = package_name
|
|
505
|
+
|
|
506
|
+
def get_package(path):
|
|
507
|
+
for pdir, name in packages.items():
|
|
508
|
+
if path.startswith(pdir):
|
|
509
|
+
return name, pdir
|
|
510
|
+
return None, None
|
|
511
|
+
|
|
512
|
+
# Assign data
|
|
513
|
+
with self.active_fileset(fileset):
|
|
514
|
+
if defines:
|
|
515
|
+
self.add_define(defines)
|
|
516
|
+
if include_dirs:
|
|
517
|
+
for dir in include_dirs:
|
|
518
|
+
package_name, pdir = get_package(dir)
|
|
519
|
+
if package_name:
|
|
520
|
+
dir = os.path.relpath(dir, pdir)
|
|
521
|
+
self.add_idir(dir, package=package_name)
|
|
522
|
+
if files:
|
|
523
|
+
for f in files:
|
|
524
|
+
package_name, pdir = get_package(f)
|
|
525
|
+
if package_name:
|
|
526
|
+
f = os.path.relpath(f, pdir)
|
|
527
|
+
self.add_file(f, package=package_name)
|
|
528
|
+
|
|
443
529
|
################################################
|
|
444
530
|
def read_fileset(self,
|
|
445
531
|
filename: str,
|
|
@@ -454,7 +540,6 @@ class DesignSchema(NamedSchema, DependencySchema):
|
|
|
454
540
|
filename (str or Path): Output file name.
|
|
455
541
|
fileset (str or list[str]): Filesets to import.
|
|
456
542
|
fileformat (str, optional): Export format.
|
|
457
|
-
|
|
458
543
|
"""
|
|
459
544
|
|
|
460
545
|
if filename is None:
|
|
@@ -466,14 +551,14 @@ class DesignSchema(NamedSchema, DependencySchema):
|
|
|
466
551
|
fileformat = formats[Path(filename).suffix.strip('.')]
|
|
467
552
|
|
|
468
553
|
if fileformat == "flist":
|
|
469
|
-
|
|
554
|
+
self.__read_flist(filename, fileset)
|
|
470
555
|
else:
|
|
471
556
|
raise ValueError(f"{fileformat} is not supported")
|
|
472
557
|
|
|
473
558
|
################################################
|
|
474
559
|
# Helper Functions
|
|
475
560
|
################################################
|
|
476
|
-
def __set_add(self, fileset, option, value, clobber=False, typelist=None):
|
|
561
|
+
def __set_add(self, fileset, option, value, clobber=False, typelist=None, package=None):
|
|
477
562
|
'''Sets a parameter value in schema.
|
|
478
563
|
'''
|
|
479
564
|
|
|
@@ -497,9 +582,18 @@ class DesignSchema(NamedSchema, DependencySchema):
|
|
|
497
582
|
raise ValueError(f"None is an illegal {option} value")
|
|
498
583
|
|
|
499
584
|
if list in typelist and not clobber:
|
|
500
|
-
|
|
585
|
+
params = self.add('fileset', fileset, option, value)
|
|
501
586
|
else:
|
|
502
|
-
|
|
587
|
+
params = self.set('fileset', fileset, option, value)
|
|
588
|
+
|
|
589
|
+
if package and params:
|
|
590
|
+
if not isinstance(params, (list, set, tuple)):
|
|
591
|
+
params = [params]
|
|
592
|
+
|
|
593
|
+
for param in params:
|
|
594
|
+
param.set(package, field="package")
|
|
595
|
+
|
|
596
|
+
return params
|
|
503
597
|
|
|
504
598
|
def __get(self, fileset, option):
|
|
505
599
|
'''Gets a parameter value from schema.
|
siliconcompiler/flowgraph.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import inspect
|
|
2
|
+
import importlib
|
|
2
3
|
|
|
3
4
|
from siliconcompiler import Schema
|
|
4
5
|
from siliconcompiler.schema import BaseSchema, NamedSchema
|
|
@@ -9,8 +10,9 @@ from siliconcompiler import NodeStatus
|
|
|
9
10
|
|
|
10
11
|
|
|
11
12
|
class FlowgraphSchema(NamedSchema):
|
|
12
|
-
def __init__(self, name):
|
|
13
|
-
super().__init__(
|
|
13
|
+
def __init__(self, name=None):
|
|
14
|
+
super().__init__()
|
|
15
|
+
self.set_name(name)
|
|
14
16
|
|
|
15
17
|
schema = EditableSchema(self)
|
|
16
18
|
schema.insert("default", "default", FlowgraphNodeSchema())
|
|
@@ -30,6 +32,8 @@ class FlowgraphSchema(NamedSchema):
|
|
|
30
32
|
|
|
31
33
|
self.__cache_node_outputs = None
|
|
32
34
|
|
|
35
|
+
self.__cache_tasks = None
|
|
36
|
+
|
|
33
37
|
def node(self, step, task, index=0):
|
|
34
38
|
'''
|
|
35
39
|
Creates a flowgraph node.
|
|
@@ -79,6 +83,11 @@ class FlowgraphSchema(NamedSchema):
|
|
|
79
83
|
raise ValueError(f"{task} is not a valid task, it must be associated with "
|
|
80
84
|
"a tool '<tool>.<task>'.")
|
|
81
85
|
|
|
86
|
+
if '/' in step:
|
|
87
|
+
raise ValueError(f"{step} is not a valid step, it cannot contain '/'")
|
|
88
|
+
if '/' in index:
|
|
89
|
+
raise ValueError(f"{index} is not a valid index, it cannot contain '/'")
|
|
90
|
+
|
|
82
91
|
tool_name, task_name = task_parts[-2:]
|
|
83
92
|
|
|
84
93
|
# bind tool to node
|
|
@@ -114,7 +123,7 @@ class FlowgraphSchema(NamedSchema):
|
|
|
114
123
|
|
|
115
124
|
for step, index in [(head, head_index), (tail, tail_index)]:
|
|
116
125
|
if not self.valid(step, index):
|
|
117
|
-
raise ValueError(f"{step}{index} is not a defined node in {self.name()}.")
|
|
126
|
+
raise ValueError(f"{step}/{index} is not a defined node in {self.name()}.")
|
|
118
127
|
|
|
119
128
|
tail_node = (tail, tail_index)
|
|
120
129
|
if tail_node in self.get(head, head_index, 'input'):
|
|
@@ -183,7 +192,7 @@ class FlowgraphSchema(NamedSchema):
|
|
|
183
192
|
before_index = str(before_index)
|
|
184
193
|
|
|
185
194
|
if (before_step, before_index) not in self.get_nodes():
|
|
186
|
-
raise ValueError(f'{before_step}{before_index} is not a valid node in {self.name()}')
|
|
195
|
+
raise ValueError(f'{before_step}/{before_index} is not a valid node in {self.name()}')
|
|
187
196
|
|
|
188
197
|
# add the node
|
|
189
198
|
self.node(step, task, index=index)
|
|
@@ -388,7 +397,7 @@ class FlowgraphSchema(NamedSchema):
|
|
|
388
397
|
index = str(index)
|
|
389
398
|
|
|
390
399
|
if (step, index) not in self.get_nodes():
|
|
391
|
-
raise ValueError(f"{step}{index} is not a valid node")
|
|
400
|
+
raise ValueError(f"{step}/{index} is not a valid node")
|
|
392
401
|
|
|
393
402
|
if self.__cache_node_outputs is not None:
|
|
394
403
|
return self.__cache_node_outputs[(step, index)]
|
|
@@ -464,15 +473,15 @@ class FlowgraphSchema(NamedSchema):
|
|
|
464
473
|
if input_nodes.count(node) > 1:
|
|
465
474
|
in_step, in_index = node
|
|
466
475
|
if logger:
|
|
467
|
-
logger.error(f'Duplicate edge from {in_step}{in_index} to '
|
|
468
|
-
f'{step}{index} in the {self.name()} flowgraph')
|
|
476
|
+
logger.error(f'Duplicate edge from {in_step}/{in_index} to '
|
|
477
|
+
f'{step}/{index} in the {self.name()} flowgraph')
|
|
469
478
|
error = True
|
|
470
479
|
|
|
471
480
|
diff_nodes = check_nodes.difference(self.get_nodes())
|
|
472
481
|
if diff_nodes:
|
|
473
482
|
if logger:
|
|
474
483
|
for step, index in diff_nodes:
|
|
475
|
-
logger.error(f'{step}{index} is missing in the {self.name()} flowgraph')
|
|
484
|
+
logger.error(f'{step}/{index} is missing in the {self.name()} flowgraph')
|
|
476
485
|
error = True
|
|
477
486
|
|
|
478
487
|
# Detect missing definitions
|
|
@@ -480,7 +489,7 @@ class FlowgraphSchema(NamedSchema):
|
|
|
480
489
|
for item in ('tool', 'task', 'taskmodule'):
|
|
481
490
|
if not self.get(step, index, item):
|
|
482
491
|
if logger:
|
|
483
|
-
logger.error(f'{step}{index} is missing a {item} definition in the '
|
|
492
|
+
logger.error(f'{step}/{index} is missing a {item} definition in the '
|
|
484
493
|
f'{self.name()} flowgraph')
|
|
485
494
|
error = True
|
|
486
495
|
|
|
@@ -490,11 +499,37 @@ class FlowgraphSchema(NamedSchema):
|
|
|
490
499
|
if loop_path:
|
|
491
500
|
error = True
|
|
492
501
|
if logger:
|
|
493
|
-
loop_path = [f"{step}{index}" for step, index in loop_path]
|
|
502
|
+
loop_path = [f"{step}/{index}" for step, index in loop_path]
|
|
494
503
|
logger.error(f"{' -> '.join(loop_path)} forms a loop in {self.name()}")
|
|
495
504
|
|
|
496
505
|
return not error
|
|
497
506
|
|
|
507
|
+
def get_task_module(self, step, index):
|
|
508
|
+
"""
|
|
509
|
+
Returns the module for a given task
|
|
510
|
+
|
|
511
|
+
Args:
|
|
512
|
+
step (str): Step name
|
|
513
|
+
index (int/str): Step index
|
|
514
|
+
"""
|
|
515
|
+
|
|
516
|
+
index = str(index)
|
|
517
|
+
|
|
518
|
+
if (step, index) not in self.get_nodes():
|
|
519
|
+
raise ValueError(f"{step}/{index} is not a valid node in {self.name()}.")
|
|
520
|
+
|
|
521
|
+
taskmodule = self.get(step, index, 'taskmodule')
|
|
522
|
+
|
|
523
|
+
# Create cache
|
|
524
|
+
if self.__cache_tasks is None:
|
|
525
|
+
self.__cache_tasks = {}
|
|
526
|
+
|
|
527
|
+
if taskmodule in self.__cache_tasks:
|
|
528
|
+
return self.__cache_tasks[taskmodule]
|
|
529
|
+
|
|
530
|
+
self.__cache_tasks[taskmodule] = importlib.import_module(taskmodule)
|
|
531
|
+
return self.__cache_tasks[taskmodule]
|
|
532
|
+
|
|
498
533
|
|
|
499
534
|
class RuntimeFlowgraph:
|
|
500
535
|
'''
|
|
@@ -649,13 +684,13 @@ class RuntimeFlowgraph:
|
|
|
649
684
|
index = str(index)
|
|
650
685
|
|
|
651
686
|
if (step, index) not in self.get_nodes():
|
|
652
|
-
raise ValueError(f"{step}{index} is not a valid node")
|
|
687
|
+
raise ValueError(f"{step}/{index} is not a valid node")
|
|
653
688
|
|
|
654
689
|
return tuple(sorted(self.__walk_graph((step, str(index)), reverse=False)))
|
|
655
690
|
|
|
656
691
|
def get_node_inputs(self, step, index, record=None):
|
|
657
692
|
if (step, index) not in self.get_nodes():
|
|
658
|
-
raise ValueError(f"{step}{index} is not a valid node")
|
|
693
|
+
raise ValueError(f"{step}/{index} is not a valid node")
|
|
659
694
|
|
|
660
695
|
if record is None:
|
|
661
696
|
inputs = set()
|
|
@@ -722,7 +757,7 @@ class RuntimeFlowgraph:
|
|
|
722
757
|
# Check for undefined prunes
|
|
723
758
|
for step, index in sorted(prune_nodes.difference(flow.get_nodes())):
|
|
724
759
|
if logger:
|
|
725
|
-
logger.error(f'{step}{index} is not defined in the {flow.name()} flowgraph')
|
|
760
|
+
logger.error(f'{step}/{index} is not defined in the {flow.name()} flowgraph')
|
|
726
761
|
error = True
|
|
727
762
|
|
|
728
763
|
if not error:
|
|
@@ -763,9 +798,9 @@ class RuntimeFlowgraph:
|
|
|
763
798
|
if entrynode in runtime.__walk_graph(exitnode):
|
|
764
799
|
found = True
|
|
765
800
|
if not found:
|
|
766
|
-
exits = ",".join([f"{step}{index}"
|
|
801
|
+
exits = ",".join([f"{step}/{index}"
|
|
767
802
|
for step, index in runtime.get_exit_nodes()])
|
|
768
|
-
missing.append(f'no path from {entrynode[0]}{entrynode[1]} to {exits} '
|
|
803
|
+
missing.append(f'no path from {entrynode[0]}/{entrynode[1]} to {exits} '
|
|
769
804
|
f'in the {flow.name()} flowgraph')
|
|
770
805
|
if found:
|
|
771
806
|
found_any = True
|
|
@@ -157,7 +157,7 @@ class VizierOptimizier(Optimizer):
|
|
|
157
157
|
chip.graph(flow, org_flow, name=graph_name)
|
|
158
158
|
|
|
159
159
|
# Complete nodes
|
|
160
|
-
nodes = chip.
|
|
160
|
+
nodes = chip.get("flowgraph", org_flow, field="schema").get_nodes()
|
|
161
161
|
for step, _ in list(nodes):
|
|
162
162
|
nodes.append((step, None))
|
|
163
163
|
nodes = set(nodes)
|
|
@@ -167,7 +167,7 @@ class VizierOptimizier(Optimizer):
|
|
|
167
167
|
if key[0] == 'history':
|
|
168
168
|
continue
|
|
169
169
|
|
|
170
|
-
for value, step, index in chip.
|
|
170
|
+
for value, step, index in chip.get(*key, field=None).getvalues():
|
|
171
171
|
node = (step, index)
|
|
172
172
|
|
|
173
173
|
if node in nodes:
|
siliconcompiler/pdk.py
CHANGED
|
@@ -1,12 +1,12 @@
|
|
|
1
|
-
from siliconcompiler.schema import NamedSchema
|
|
1
|
+
from siliconcompiler.schema import NamedSchema
|
|
2
2
|
from siliconcompiler.schema import EditableSchema, Parameter, Scope
|
|
3
3
|
from siliconcompiler.schema.utils import trim
|
|
4
4
|
|
|
5
5
|
|
|
6
|
-
class PDKSchema(NamedSchema
|
|
7
|
-
def __init__(self, name
|
|
8
|
-
|
|
9
|
-
|
|
6
|
+
class PDKSchema(NamedSchema):
|
|
7
|
+
def __init__(self, name=None):
|
|
8
|
+
super().__init__()
|
|
9
|
+
self.set_name(name)
|
|
10
10
|
|
|
11
11
|
schema_pdk(self)
|
|
12
12
|
|