siliconcompiler 0.25.0__cp38-cp38-win_amd64.whl → 0.26.0__cp38-cp38-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- siliconcompiler/_metadata.py +1 -1
- siliconcompiler/apps/sc.py +3 -0
- siliconcompiler/apps/sc_remote.py +40 -9
- siliconcompiler/core.py +188 -41
- siliconcompiler/flowgraph.py +73 -12
- siliconcompiler/flows/_common.py +1 -0
- siliconcompiler/issue.py +0 -3
- siliconcompiler/remote/client.py +65 -14
- siliconcompiler/remote/schema.py +4 -4
- siliconcompiler/remote/server.py +4 -3
- siliconcompiler/scheduler/__init__.py +10 -4
- siliconcompiler/scheduler/docker_runner.py +7 -2
- siliconcompiler/schema/schema_cfg.py +16 -19
- siliconcompiler/schema/schema_obj.py +3 -3
- siliconcompiler/schema/utils.py +4 -0
- siliconcompiler/tools/_common/__init__.py +21 -5
- siliconcompiler/tools/surelog/bin/surelog.exe +0 -0
- siliconcompiler/use.py +12 -0
- {siliconcompiler-0.25.0.dist-info → siliconcompiler-0.26.0.dist-info}/METADATA +2 -2
- {siliconcompiler-0.25.0.dist-info → siliconcompiler-0.26.0.dist-info}/RECORD +24 -24
- {siliconcompiler-0.25.0.dist-info → siliconcompiler-0.26.0.dist-info}/WHEEL +1 -1
- {siliconcompiler-0.25.0.dist-info → siliconcompiler-0.26.0.dist-info}/LICENSE +0 -0
- {siliconcompiler-0.25.0.dist-info → siliconcompiler-0.26.0.dist-info}/entry_points.txt +0 -0
- {siliconcompiler-0.25.0.dist-info → siliconcompiler-0.26.0.dist-info}/top_level.txt +0 -0
siliconcompiler/_metadata.py
CHANGED
siliconcompiler/apps/sc.py
CHANGED
|
@@ -5,9 +5,9 @@ import sys
|
|
|
5
5
|
|
|
6
6
|
from siliconcompiler import Chip
|
|
7
7
|
from siliconcompiler import SiliconCompilerError
|
|
8
|
-
from siliconcompiler.remote.client import
|
|
9
|
-
|
|
10
|
-
|
|
8
|
+
from siliconcompiler.remote.client import cancel_job, check_progress, delete_job, \
|
|
9
|
+
remote_ping, remote_run_loop, _remote_ping
|
|
10
|
+
from siliconcompiler.remote.client import configure_server, configure_whitelist, configure_print
|
|
11
11
|
from siliconcompiler.scheduler import _finalize_run
|
|
12
12
|
from siliconcompiler.flowgraph import _get_flowgraph_entry_nodes, _get_flowgraph_node_outputs, \
|
|
13
13
|
nodes_to_execute
|
|
@@ -27,6 +27,14 @@ To generate a configuration file, use:
|
|
|
27
27
|
sc-remote -configure -server https://example.com
|
|
28
28
|
sc-remote -configure -server https://example.com:1234
|
|
29
29
|
|
|
30
|
+
to add or remove directories from upload whitelist, these
|
|
31
|
+
also support globbing:
|
|
32
|
+
sc-remote -configure -add ./fine_to_upload
|
|
33
|
+
sc-remote -configure -remove ./no_longer_okay_to_upload
|
|
34
|
+
|
|
35
|
+
to display the full configuration of the credentials file
|
|
36
|
+
sc-remote -configure -list
|
|
37
|
+
|
|
30
38
|
To check an ongoing job's progress, use:
|
|
31
39
|
sc-remote -cfg <stepdir>/outputs/<design>.pkg.json
|
|
32
40
|
|
|
@@ -49,9 +57,20 @@ To delete a job, use:
|
|
|
49
57
|
'-configure': {'action': 'store_true',
|
|
50
58
|
'help': 'create configuration file for the remote',
|
|
51
59
|
'sc_print': False},
|
|
52
|
-
'-server': {'help': 'address of server for configure',
|
|
60
|
+
'-server': {'help': 'address of server for configure (only valid with -configure)',
|
|
53
61
|
'metavar': '<server>',
|
|
54
62
|
'sc_print': False},
|
|
63
|
+
'-add': {'help': 'path to add to the upload whitelist (only valid with -configure)',
|
|
64
|
+
'metavar': '<path>',
|
|
65
|
+
'nargs': '+',
|
|
66
|
+
'sc_print': False},
|
|
67
|
+
'-remove': {'help': 'path to remove from the upload whitelist (only valid with -configure)',
|
|
68
|
+
'metavar': '<path>',
|
|
69
|
+
'nargs': '+',
|
|
70
|
+
'sc_print': False},
|
|
71
|
+
'-list': {'help': 'print the current configuration (only valid with -configure)',
|
|
72
|
+
'action': 'store_true',
|
|
73
|
+
'sc_print': False},
|
|
55
74
|
'-reconnect': {'action': 'store_true',
|
|
56
75
|
'help': 'reconnect to a running job on the remote',
|
|
57
76
|
'sc_print': False},
|
|
@@ -88,11 +107,23 @@ To delete a job, use:
|
|
|
88
107
|
f'{", ".join(["-"+e for e in cfg_only])}')
|
|
89
108
|
|
|
90
109
|
if args['configure']:
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
110
|
+
if args['list']:
|
|
111
|
+
configure_print(chip)
|
|
112
|
+
return 0
|
|
113
|
+
|
|
114
|
+
if not args['add'] and not args['remove']:
|
|
115
|
+
try:
|
|
116
|
+
configure_server(chip, server=args['server'])
|
|
117
|
+
except ValueError as e:
|
|
118
|
+
chip.logger.error(e)
|
|
119
|
+
return 1
|
|
120
|
+
else:
|
|
121
|
+
try:
|
|
122
|
+
configure_whitelist(chip, add=args['add'], remove=args['remove'])
|
|
123
|
+
except ValueError as e:
|
|
124
|
+
chip.logger.error(e)
|
|
125
|
+
return 1
|
|
126
|
+
|
|
96
127
|
return 0
|
|
97
128
|
|
|
98
129
|
# Main logic.
|
siliconcompiler/core.py
CHANGED
|
@@ -17,6 +17,7 @@ import graphviz
|
|
|
17
17
|
import codecs
|
|
18
18
|
from siliconcompiler.remote import client
|
|
19
19
|
from siliconcompiler.schema import Schema, SCHEMA_VERSION
|
|
20
|
+
from siliconcompiler.schema import utils as schema_utils
|
|
20
21
|
from siliconcompiler import utils
|
|
21
22
|
from siliconcompiler import _metadata
|
|
22
23
|
from siliconcompiler import NodeStatus, SiliconCompilerError
|
|
@@ -212,14 +213,14 @@ class Chip:
|
|
|
212
213
|
|
|
213
214
|
self.logger.propagate = False
|
|
214
215
|
|
|
215
|
-
loglevel = '
|
|
216
|
+
loglevel = 'info'
|
|
216
217
|
if hasattr(self, 'schema'):
|
|
217
218
|
loglevel = self.schema.get('option', 'loglevel', step=step, index=index)
|
|
218
219
|
else:
|
|
219
220
|
in_run = False
|
|
220
221
|
|
|
221
222
|
log_format = ['%(levelname)-7s']
|
|
222
|
-
if loglevel == '
|
|
223
|
+
if loglevel == 'debug':
|
|
223
224
|
log_format.append('%(funcName)-10s')
|
|
224
225
|
log_format.append('%(lineno)-4s')
|
|
225
226
|
|
|
@@ -256,7 +257,7 @@ class Chip:
|
|
|
256
257
|
formatter = logging.Formatter(logformat)
|
|
257
258
|
handler.setFormatter(formatter)
|
|
258
259
|
|
|
259
|
-
self.logger.setLevel(loglevel)
|
|
260
|
+
self.logger.setLevel(schema_utils.translate_loglevel(loglevel))
|
|
260
261
|
|
|
261
262
|
###########################################################################
|
|
262
263
|
def _init_codecs(self):
|
|
@@ -556,6 +557,11 @@ class Chip:
|
|
|
556
557
|
self._loaded_modules['libs'].append(use_module.design)
|
|
557
558
|
self.__import_library(use_module.design, use_module.schema.cfg)
|
|
558
559
|
|
|
560
|
+
is_auto_enable = getattr(use_module, 'is_auto_enable', None)
|
|
561
|
+
if is_auto_enable:
|
|
562
|
+
if is_auto_enable():
|
|
563
|
+
self.add('option', 'library', use_module.design)
|
|
564
|
+
|
|
559
565
|
else:
|
|
560
566
|
module_name = module.__name__
|
|
561
567
|
class_name = use_module.__class__.__name__
|
|
@@ -875,7 +881,7 @@ class Chip:
|
|
|
875
881
|
# Special case to ensure loglevel is updated ASAP
|
|
876
882
|
if keypath == ['option', 'loglevel'] and field == 'value' and \
|
|
877
883
|
step == self.get('arg', 'step') and index == self.get('arg', 'index'):
|
|
878
|
-
self.logger.setLevel(value)
|
|
884
|
+
self.logger.setLevel(schema_utils.translate_loglevel(value))
|
|
879
885
|
|
|
880
886
|
try:
|
|
881
887
|
value_success = self.schema.set(*keypath, value, field=field, clobber=clobber,
|
|
@@ -973,7 +979,8 @@ class Chip:
|
|
|
973
979
|
self.error(str(e))
|
|
974
980
|
|
|
975
981
|
###########################################################################
|
|
976
|
-
def input(self, filename, fileset=None, filetype=None, iomap=None,
|
|
982
|
+
def input(self, filename, fileset=None, filetype=None, iomap=None,
|
|
983
|
+
step=None, index=None, package=None):
|
|
977
984
|
'''
|
|
978
985
|
Adds file to a filset. The default behavior is to infer filetypes and
|
|
979
986
|
filesets based on the suffix of the file extensions. The method is
|
|
@@ -989,24 +996,30 @@ class Chip:
|
|
|
989
996
|
fileset (str): File grouping
|
|
990
997
|
filetype (str): File type
|
|
991
998
|
iomap (dict of tuple(set, type)): File set and type mapping based on file extension
|
|
992
|
-
|
|
999
|
+
step (str): Node name
|
|
1000
|
+
index (str): Node index
|
|
1001
|
+
package (str): Name of package where this file can be found
|
|
993
1002
|
'''
|
|
994
1003
|
|
|
995
|
-
self.__add_input_output('input', filename, fileset, filetype, iomap,
|
|
1004
|
+
self.__add_input_output('input', filename, fileset, filetype, iomap,
|
|
1005
|
+
step=step, index=index, package=package)
|
|
996
1006
|
# Replace {iotable} in __doc__ with actual table for fileset/filetype and extension mapping
|
|
997
1007
|
input.__doc__ = input.__doc__.replace("{iotable}",
|
|
998
1008
|
utils.format_fileset_type_table())
|
|
999
1009
|
|
|
1000
1010
|
###########################################################################
|
|
1001
|
-
def output(self, filename, fileset=None, filetype=None, iomap=None
|
|
1011
|
+
def output(self, filename, fileset=None, filetype=None, iomap=None,
|
|
1012
|
+
step=None, index=None, package=None):
|
|
1002
1013
|
'''Same as input'''
|
|
1003
1014
|
|
|
1004
|
-
self.__add_input_output('output', filename, fileset, filetype, iomap
|
|
1015
|
+
self.__add_input_output('output', filename, fileset, filetype, iomap,
|
|
1016
|
+
step=step, index=index, package=package)
|
|
1005
1017
|
# Copy input functions __doc__ and replace 'input' with 'output' to make constant
|
|
1006
1018
|
output.__doc__ = input.__doc__.replace("input", "output")
|
|
1007
1019
|
|
|
1008
1020
|
###########################################################################
|
|
1009
|
-
def __add_input_output(self, category, filename, fileset, filetype, iomap,
|
|
1021
|
+
def __add_input_output(self, category, filename, fileset, filetype, iomap,
|
|
1022
|
+
step=None, index=None, package=None):
|
|
1010
1023
|
'''
|
|
1011
1024
|
Adds file to input or output groups.
|
|
1012
1025
|
Performs a lookup in the io map for the fileset and filetype
|
|
@@ -1045,7 +1058,8 @@ class Chip:
|
|
|
1045
1058
|
elif not fileset:
|
|
1046
1059
|
self.logger.info(f'{filename} inferred as fileset {use_fileset}')
|
|
1047
1060
|
|
|
1048
|
-
self.add(category, use_fileset, use_filetype, filename,
|
|
1061
|
+
self.add(category, use_fileset, use_filetype, filename,
|
|
1062
|
+
step=step, index=index, package=package)
|
|
1049
1063
|
|
|
1050
1064
|
###########################################################################
|
|
1051
1065
|
def find_files(self, *keypath, missing_ok=False, job=None, step=None, index=None):
|
|
@@ -1793,7 +1807,8 @@ class Chip:
|
|
|
1793
1807
|
|
|
1794
1808
|
if 'library' in libcfg:
|
|
1795
1809
|
for sublib_name, sublibcfg in libcfg['library'].items():
|
|
1796
|
-
self.__import_library(sublib_name, sublibcfg,
|
|
1810
|
+
self.__import_library(sublib_name, sublibcfg,
|
|
1811
|
+
job=job, clobber=clobber)
|
|
1797
1812
|
|
|
1798
1813
|
del libcfg['library']
|
|
1799
1814
|
|
|
@@ -1861,13 +1876,24 @@ class Chip:
|
|
|
1861
1876
|
# controlling graph direction
|
|
1862
1877
|
if landscape:
|
|
1863
1878
|
rankdir = 'LR'
|
|
1879
|
+
out_label_suffix = ':e'
|
|
1880
|
+
in_label_suffix = ':w'
|
|
1864
1881
|
else:
|
|
1865
1882
|
rankdir = 'TB'
|
|
1883
|
+
out_label_suffix = ':s'
|
|
1884
|
+
in_label_suffix = ':n'
|
|
1866
1885
|
|
|
1867
1886
|
all_graph_inputs, nodes, edges, show_io = _get_flowgraph_information(self, flow, io=show_io)
|
|
1868
1887
|
|
|
1888
|
+
if not show_io:
|
|
1889
|
+
out_label_suffix = ''
|
|
1890
|
+
in_label_suffix = ''
|
|
1891
|
+
|
|
1869
1892
|
dot = graphviz.Digraph(format=fileformat)
|
|
1870
1893
|
dot.graph_attr['rankdir'] = rankdir
|
|
1894
|
+
if show_io:
|
|
1895
|
+
dot.graph_attr['concentrate'] = 'true'
|
|
1896
|
+
dot.graph_attr['ranksep'] = '0.75'
|
|
1871
1897
|
dot.attr(bgcolor=background)
|
|
1872
1898
|
|
|
1873
1899
|
with dot.subgraph(name='inputs') as input_graph:
|
|
@@ -1875,35 +1901,153 @@ class Chip:
|
|
|
1875
1901
|
input_graph.graph_attr['color'] = background
|
|
1876
1902
|
|
|
1877
1903
|
# add inputs
|
|
1878
|
-
for graph_input in all_graph_inputs:
|
|
1904
|
+
for graph_input in sorted(all_graph_inputs):
|
|
1879
1905
|
input_graph.node(
|
|
1880
1906
|
graph_input, label=graph_input, bordercolor=fontcolor, style='filled',
|
|
1881
1907
|
fontcolor=fontcolor, fontsize=fontsize, ordering="in",
|
|
1882
1908
|
penwidth=penwidth, fillcolor=fillcolor, shape="box")
|
|
1883
1909
|
|
|
1884
|
-
|
|
1885
|
-
|
|
1886
|
-
|
|
1887
|
-
|
|
1888
|
-
|
|
1889
|
-
|
|
1890
|
-
|
|
1891
|
-
|
|
1892
|
-
if
|
|
1893
|
-
|
|
1894
|
-
|
|
1895
|
-
|
|
1896
|
-
labelname
|
|
1897
|
-
|
|
1898
|
-
|
|
1899
|
-
|
|
1910
|
+
with dot.subgraph(name='input_nodes') as input_graph_nodes:
|
|
1911
|
+
input_graph_nodes.graph_attr['cluster'] = 'true'
|
|
1912
|
+
input_graph_nodes.graph_attr['color'] = background
|
|
1913
|
+
|
|
1914
|
+
# add nodes
|
|
1915
|
+
shape = "oval" if not show_io else "Mrecord"
|
|
1916
|
+
for node, info in nodes.items():
|
|
1917
|
+
task_label = f"\\n ({info['task']})" if info['task'] is not None else ""
|
|
1918
|
+
if show_io:
|
|
1919
|
+
input_labels = [f"<{ikey}> {ifile}" for ifile, ikey in info['inputs'].items()]
|
|
1920
|
+
output_labels = [f"<{okey}> {ofile}" for ofile, okey in info['outputs'].items()]
|
|
1921
|
+
center_text = f"\\n {node} {task_label} \\n\\n"
|
|
1922
|
+
labelname = "{"
|
|
1923
|
+
if input_labels:
|
|
1924
|
+
labelname += f"{{ {' | '.join(input_labels)} }} |"
|
|
1925
|
+
labelname += center_text
|
|
1926
|
+
if output_labels:
|
|
1927
|
+
labelname += f"| {{ {' | '.join(output_labels)} }}"
|
|
1928
|
+
labelname += "}"
|
|
1929
|
+
else:
|
|
1930
|
+
labelname = f"{node}{task_label}"
|
|
1931
|
+
|
|
1932
|
+
dst = dot
|
|
1933
|
+
if info['is_input']:
|
|
1934
|
+
dst = input_graph_nodes
|
|
1935
|
+
dst.node(node, label=labelname, bordercolor=fontcolor, style='filled',
|
|
1936
|
+
fontcolor=fontcolor, fontsize=fontsize, ordering="in",
|
|
1937
|
+
penwidth=penwidth, fillcolor=fillcolor, shape=shape)
|
|
1938
|
+
|
|
1939
|
+
for edge0, edge1, weight in edges:
|
|
1940
|
+
dot.edge(f'{edge0}{out_label_suffix}', f'{edge1}{in_label_suffix}', weight=str(weight))
|
|
1941
|
+
|
|
1942
|
+
try:
|
|
1943
|
+
dot.render(filename=fileroot, cleanup=True)
|
|
1944
|
+
except graphviz.ExecutableNotFound as e:
|
|
1945
|
+
self.logger.error(f'Unable to save flowgraph: {e}')
|
|
1946
|
+
|
|
1947
|
+
###########################################################################
|
|
1948
|
+
def write_dependencygraph(self, filename, flow=None,
|
|
1949
|
+
fillcolor='#ffffff', fontcolor='#000000',
|
|
1950
|
+
background='transparent', fontsize='14',
|
|
1951
|
+
border=True, landscape=False):
|
|
1952
|
+
r'''
|
|
1953
|
+
Renders and saves the dependenct graph to a file.
|
|
1954
|
+
|
|
1955
|
+
The chip object flowgraph is traversed to create a graphviz (\*.dot)
|
|
1956
|
+
file comprised of node, edges, and labels. The dot file is a
|
|
1957
|
+
graphical representation of the flowgraph useful for validating the
|
|
1958
|
+
correctness of the execution flow graph. The dot file is then
|
|
1959
|
+
converted to the appropriate picture or drawing format based on the
|
|
1960
|
+
filename suffix provided. Supported output render formats include
|
|
1961
|
+
png, svg, gif, pdf and a few others. For more information about the
|
|
1962
|
+
graphviz project, see see https://graphviz.org/
|
|
1963
|
+
|
|
1964
|
+
Args:
|
|
1965
|
+
filename (filepath): Output filepath
|
|
1966
|
+
flow (str): Name of flowgraph to render
|
|
1967
|
+
fillcolor(str): Node fill RGB color hex value
|
|
1968
|
+
fontcolor (str): Node font RGB color hex value
|
|
1969
|
+
background (str): Background color
|
|
1970
|
+
fontsize (str): Node text font size
|
|
1971
|
+
border (bool): Enables node border if True
|
|
1972
|
+
landscape (bool): Renders graph in landscape layout if True
|
|
1973
|
+
show_io (bool): Add file input/outputs to graph
|
|
1974
|
+
|
|
1975
|
+
Examples:
|
|
1976
|
+
>>> chip.write_flowgraph('mydump.png')
|
|
1977
|
+
Renders the object flowgraph and writes the result to a png file.
|
|
1978
|
+
'''
|
|
1979
|
+
filepath = os.path.abspath(filename)
|
|
1980
|
+
self.logger.debug('Writing dependency graph to file %s', filepath)
|
|
1981
|
+
fileroot, ext = os.path.splitext(filepath)
|
|
1982
|
+
fileformat = ext.replace(".", "")
|
|
1983
|
+
|
|
1984
|
+
# controlling border width
|
|
1985
|
+
if border:
|
|
1986
|
+
penwidth = '1'
|
|
1987
|
+
else:
|
|
1988
|
+
penwidth = '0'
|
|
1989
|
+
|
|
1990
|
+
# controlling graph direction
|
|
1991
|
+
if landscape:
|
|
1992
|
+
rankdir = 'LR'
|
|
1993
|
+
else:
|
|
1994
|
+
rankdir = 'TB'
|
|
1900
1995
|
|
|
1901
|
-
|
|
1996
|
+
dot = graphviz.Digraph(format=fileformat)
|
|
1997
|
+
dot.graph_attr['rankdir'] = rankdir
|
|
1998
|
+
dot.attr(bgcolor=background)
|
|
1999
|
+
|
|
2000
|
+
def make_node(node_type, node, label):
|
|
2001
|
+
node = f'{node_type}-{node}'
|
|
2002
|
+
|
|
2003
|
+
if node in nodes:
|
|
2004
|
+
return node
|
|
2005
|
+
|
|
2006
|
+
nodes.add(node)
|
|
2007
|
+
dot.node(node, label=node, bordercolor=fontcolor, style='filled',
|
|
2008
|
+
fontcolor=fontcolor, fontsize=fontsize, ordering="in",
|
|
2009
|
+
penwidth=penwidth, fillcolor=fillcolor)
|
|
2010
|
+
return node
|
|
2011
|
+
|
|
2012
|
+
nodes = {}
|
|
2013
|
+
|
|
2014
|
+
def collect_library(root_type, lib, name=None):
|
|
2015
|
+
if not name:
|
|
2016
|
+
name = lib.design
|
|
2017
|
+
root_label = f'{root_type}-{name}'
|
|
2018
|
+
|
|
2019
|
+
if root_label in nodes:
|
|
2020
|
+
return
|
|
2021
|
+
|
|
2022
|
+
in_libs = lib.get('option', 'library',
|
|
2023
|
+
step=Schema.GLOBAL_KEY, index=Schema.GLOBAL_KEY) + \
|
|
2024
|
+
lib.get('asic', 'logiclib',
|
|
2025
|
+
step=Schema.GLOBAL_KEY, index=Schema.GLOBAL_KEY) + \
|
|
2026
|
+
lib.get('asic', 'macrolib',
|
|
2027
|
+
step=Schema.GLOBAL_KEY, index=Schema.GLOBAL_KEY)
|
|
2028
|
+
|
|
2029
|
+
in_labels = []
|
|
2030
|
+
for in_lib in in_libs:
|
|
2031
|
+
in_labels.append(f'library-{in_lib}')
|
|
2032
|
+
|
|
2033
|
+
nodes[root_label] = {
|
|
2034
|
+
"text": name,
|
|
2035
|
+
"shape": "oval" if root_type == "library" else "box",
|
|
2036
|
+
"connects_to": set(in_labels)
|
|
2037
|
+
}
|
|
2038
|
+
|
|
2039
|
+
for in_lib in in_libs:
|
|
2040
|
+
collect_library("library", Schema(cfg=self.getdict('library', in_lib)), name=in_lib)
|
|
2041
|
+
|
|
2042
|
+
collect_library("design", self)
|
|
2043
|
+
|
|
2044
|
+
for label, info in nodes.items():
|
|
2045
|
+
dot.node(label, label=info['text'], bordercolor=fontcolor, style='filled',
|
|
1902
2046
|
fontcolor=fontcolor, fontsize=fontsize, ordering="in",
|
|
1903
|
-
penwidth=penwidth, fillcolor=fillcolor, shape=shape)
|
|
2047
|
+
penwidth=penwidth, fillcolor=fillcolor, shape=info['shape'])
|
|
1904
2048
|
|
|
1905
|
-
|
|
1906
|
-
|
|
2049
|
+
for conn in info['connects_to']:
|
|
2050
|
+
dot.edge(label, conn, dir='back')
|
|
1907
2051
|
|
|
1908
2052
|
try:
|
|
1909
2053
|
dot.render(filename=fileroot, cleanup=True)
|
|
@@ -1911,12 +2055,11 @@ class Chip:
|
|
|
1911
2055
|
self.logger.error(f'Unable to save flowgraph: {e}')
|
|
1912
2056
|
|
|
1913
2057
|
########################################################################
|
|
1914
|
-
def collect(self, directory=None, verbose=True):
|
|
2058
|
+
def collect(self, directory=None, verbose=True, whitelist=None):
|
|
1915
2059
|
'''
|
|
1916
2060
|
Collects files found in the configuration dictionary and places
|
|
1917
2061
|
them in inputs/. The function only copies in files that have the 'copy'
|
|
1918
|
-
field set as true.
|
|
1919
|
-
copied in.
|
|
2062
|
+
field set as true.
|
|
1920
2063
|
|
|
1921
2064
|
1. indexing like in run, job1
|
|
1922
2065
|
2. chdir package
|
|
@@ -1927,6 +2070,9 @@ class Chip:
|
|
|
1927
2070
|
Args:
|
|
1928
2071
|
directory (filepath): Output filepath
|
|
1929
2072
|
verbose (bool): Flag to indicate if logging should be used
|
|
2073
|
+
whitelist (list[path]): List of directories that are allowed to be
|
|
2074
|
+
collected. If a directory is is found that is not on this list
|
|
2075
|
+
a RuntimeError will be raised.
|
|
1930
2076
|
'''
|
|
1931
2077
|
|
|
1932
2078
|
if not directory:
|
|
@@ -1942,7 +2088,6 @@ class Chip:
|
|
|
1942
2088
|
dirs = {}
|
|
1943
2089
|
files = {}
|
|
1944
2090
|
|
|
1945
|
-
copyall = self.get('option', 'copyall')
|
|
1946
2091
|
for key in self.allkeys():
|
|
1947
2092
|
if key[-2:] == ('option', 'builddir'):
|
|
1948
2093
|
# skip builddir
|
|
@@ -1963,8 +2108,7 @@ class Chip:
|
|
|
1963
2108
|
is_dir = re.search('dir', leaftype)
|
|
1964
2109
|
is_file = re.search('file', leaftype)
|
|
1965
2110
|
if is_dir or is_file:
|
|
1966
|
-
|
|
1967
|
-
if copyall or copy:
|
|
2111
|
+
if self.get(*key, field='copy'):
|
|
1968
2112
|
for value, step, index in self.schema._getvals(*key):
|
|
1969
2113
|
if not value:
|
|
1970
2114
|
continue
|
|
@@ -1998,7 +2142,7 @@ class Chip:
|
|
|
1998
2142
|
if os.path.exists(dst_path):
|
|
1999
2143
|
continue
|
|
2000
2144
|
|
|
2001
|
-
directory_file_limit =
|
|
2145
|
+
directory_file_limit = None
|
|
2002
2146
|
file_count = 0
|
|
2003
2147
|
|
|
2004
2148
|
# Do sanity checks
|
|
@@ -2041,13 +2185,16 @@ class Chip:
|
|
|
2041
2185
|
nonlocal file_count
|
|
2042
2186
|
file_count += len(files) - len(hidden_files)
|
|
2043
2187
|
|
|
2044
|
-
if file_count > directory_file_limit:
|
|
2188
|
+
if directory_file_limit and file_count > directory_file_limit:
|
|
2045
2189
|
self.logger.error(f'File collection from {abspath} exceeds '
|
|
2046
2190
|
f'{directory_file_limit} files')
|
|
2047
2191
|
return files
|
|
2048
2192
|
|
|
2049
2193
|
return hidden_files
|
|
2050
2194
|
|
|
2195
|
+
if whitelist is not None and abspath not in whitelist:
|
|
2196
|
+
raise RuntimeError(f'{abspath} is not on the approved collection list.')
|
|
2197
|
+
|
|
2051
2198
|
if verbose:
|
|
2052
2199
|
self.logger.info(f"Copying directory {abspath} to '{directory}' directory")
|
|
2053
2200
|
shutil.copytree(abspath, dst_path, ignore=check_path)
|
siliconcompiler/flowgraph.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import os
|
|
2
|
+
import math
|
|
2
3
|
from siliconcompiler import SiliconCompilerError, NodeStatus
|
|
3
4
|
from siliconcompiler.tools._common import input_file_node_name, get_tool_task
|
|
4
5
|
|
|
@@ -425,13 +426,19 @@ def _get_flowgraph_information(chip, flow, io=True):
|
|
|
425
426
|
org_schema = chip.schema.copy()
|
|
426
427
|
|
|
427
428
|
# Setup nodes
|
|
428
|
-
|
|
429
|
+
node_exec_order = _get_flowgraph_execution_order(chip, flow)
|
|
429
430
|
if io:
|
|
430
|
-
|
|
431
|
+
# try:
|
|
432
|
+
for layer_nodes in node_exec_order:
|
|
431
433
|
for step, index in layer_nodes:
|
|
432
434
|
_setup_node(chip, step, index, flow=flow)
|
|
433
|
-
|
|
434
|
-
|
|
435
|
+
# except: # noqa E722
|
|
436
|
+
# io = False
|
|
437
|
+
|
|
438
|
+
node_rank = {}
|
|
439
|
+
for rank, rank_nodes in enumerate(node_exec_order):
|
|
440
|
+
for step, index in rank_nodes:
|
|
441
|
+
node_rank[f'{step}{index}'] = rank
|
|
435
442
|
|
|
436
443
|
graph_inputs = {}
|
|
437
444
|
all_graph_inputs = set()
|
|
@@ -446,6 +453,8 @@ def _get_flowgraph_information(chip, flow, io=True):
|
|
|
446
453
|
for inputs in graph_inputs.values():
|
|
447
454
|
all_graph_inputs.update(inputs)
|
|
448
455
|
|
|
456
|
+
exit_nodes = [f'{step}{index}' for step, index in _get_flowgraph_exit_nodes(chip, flow)]
|
|
457
|
+
|
|
449
458
|
nodes = {}
|
|
450
459
|
edges = []
|
|
451
460
|
|
|
@@ -455,7 +464,9 @@ def _get_flowgraph_information(chip, flow, io=True):
|
|
|
455
464
|
def clean_text(label):
|
|
456
465
|
return label.replace("<", r"\<").replace(">", r"\>")
|
|
457
466
|
|
|
458
|
-
for step, index in _get_flowgraph_nodes(chip, flow)
|
|
467
|
+
all_nodes = [(step, index) for step, index in sorted(_get_flowgraph_nodes(chip, flow))
|
|
468
|
+
if chip.get('record', 'status', step=step, index=index) != NodeStatus.SKIPPED]
|
|
469
|
+
for step, index in all_nodes:
|
|
459
470
|
tool, task = get_tool_task(chip, step, index, flow=flow)
|
|
460
471
|
|
|
461
472
|
if io:
|
|
@@ -472,35 +483,85 @@ def _get_flowgraph_information(chip, flow, io=True):
|
|
|
472
483
|
inputs.extend(graph_inputs[(step, index)])
|
|
473
484
|
|
|
474
485
|
nodes[node] = {
|
|
486
|
+
"node": (step, index),
|
|
487
|
+
"file_inputs": inputs,
|
|
475
488
|
"inputs": {clean_text(f): f'input-{clean_label(f)}' for f in sorted(inputs)},
|
|
476
489
|
"outputs": {clean_text(f): f'output-{clean_label(f)}' for f in sorted(outputs)},
|
|
477
|
-
"task": f'{tool}/{task}' if tool != 'builtin' else task
|
|
490
|
+
"task": f'{tool}/{task}' if tool != 'builtin' else task,
|
|
491
|
+
"is_input": node_rank[node] == 0,
|
|
492
|
+
"rank": node_rank[node]
|
|
478
493
|
}
|
|
494
|
+
nodes[node]["width"] = max(len(nodes[node]["inputs"]), len(nodes[node]["outputs"]))
|
|
495
|
+
|
|
496
|
+
if tool is None or task is None:
|
|
497
|
+
nodes[node]["task"] = None
|
|
498
|
+
|
|
499
|
+
rank_diff = {}
|
|
500
|
+
for in_step, in_index in _get_flowgraph_node_inputs(chip, flow, (step, index)):
|
|
501
|
+
rank_diff[f'{in_step}{in_index}'] = node_rank[node] - node_rank[f'{in_step}{in_index}']
|
|
502
|
+
nodes[node]["rank_diff"] = rank_diff
|
|
479
503
|
|
|
504
|
+
for step, index in all_nodes:
|
|
505
|
+
node = f'{step}{index}'
|
|
480
506
|
if io:
|
|
481
507
|
# get inputs
|
|
508
|
+
edge_stats = {}
|
|
482
509
|
for infile, in_nodes in input_provides(chip, step, index, flow=flow).items():
|
|
483
510
|
outfile = infile
|
|
484
511
|
for in_step, in_index in in_nodes:
|
|
485
512
|
infile = outfile
|
|
486
|
-
if infile not in
|
|
513
|
+
if infile not in nodes[node]["file_inputs"]:
|
|
487
514
|
infile = input_file_node_name(infile, in_step, in_index)
|
|
488
|
-
if infile not in
|
|
515
|
+
if infile not in nodes[node]["file_inputs"]:
|
|
489
516
|
continue
|
|
490
|
-
|
|
517
|
+
in_node_name = f"{in_step}{in_index}"
|
|
518
|
+
outlabel = f"{in_node_name}:output-{clean_label(outfile)}"
|
|
491
519
|
inlabel = f"{step}{index}:input-{clean_label(infile)}"
|
|
492
|
-
|
|
520
|
+
|
|
521
|
+
if in_node_name not in edge_stats:
|
|
522
|
+
edge_stats[in_node_name] = {
|
|
523
|
+
"count": 0,
|
|
524
|
+
"pairs": [],
|
|
525
|
+
"weight": min(nodes[node]["width"], nodes[in_node_name]["width"])
|
|
526
|
+
}
|
|
527
|
+
edge_stats[in_node_name]["count"] += 1
|
|
528
|
+
edge_stats[in_node_name]["pairs"].append((outlabel, inlabel))
|
|
529
|
+
|
|
530
|
+
# assign edge weights
|
|
531
|
+
|
|
532
|
+
# scale multiple weights
|
|
533
|
+
for edge_data in edge_stats.values():
|
|
534
|
+
edge_data["weight"] = int(
|
|
535
|
+
math.floor(max(1, edge_data["weight"] / edge_data["count"])))
|
|
536
|
+
|
|
537
|
+
# lower exit nodes weights
|
|
538
|
+
if node in exit_nodes:
|
|
539
|
+
for edge_data in edge_stats.values():
|
|
540
|
+
edge_data["weight"] = 1
|
|
541
|
+
else:
|
|
542
|
+
for edge_data in edge_stats.values():
|
|
543
|
+
edge_data["weight"] *= 2
|
|
544
|
+
|
|
545
|
+
# adjust for rank differences, lower weight if rankdiff is greater than 1
|
|
546
|
+
for in_node, edge_data in edge_stats.items():
|
|
547
|
+
if nodes[node]["rank_diff"][in_node] > 1:
|
|
548
|
+
edge_data["weight"] = 1
|
|
549
|
+
|
|
550
|
+
# create edges
|
|
551
|
+
for edge_data in edge_stats.values():
|
|
552
|
+
for outlabel, inlabel in edge_data["pairs"]:
|
|
553
|
+
edges.append([outlabel, inlabel, edge_data["weight"]])
|
|
493
554
|
|
|
494
555
|
if (step, index) in graph_inputs:
|
|
495
556
|
for key in graph_inputs[(step, index)]:
|
|
496
557
|
inlabel = f"{step}{index}:input-{clean_label(key)}"
|
|
497
|
-
edges.append((key, inlabel))
|
|
558
|
+
edges.append((key, inlabel, 1))
|
|
498
559
|
else:
|
|
499
560
|
all_inputs = []
|
|
500
561
|
for in_step, in_index in chip.get('flowgraph', flow, step, index, 'input'):
|
|
501
562
|
all_inputs.append(f'{in_step}{in_index}')
|
|
502
563
|
for item in all_inputs:
|
|
503
|
-
edges.append((item, node))
|
|
564
|
+
edges.append((item, node, 1 if node in exit_nodes else 2))
|
|
504
565
|
|
|
505
566
|
# Restore schema
|
|
506
567
|
chip.schema = org_schema
|
siliconcompiler/flows/_common.py
CHANGED
|
@@ -12,6 +12,7 @@ def _make_docs(chip):
|
|
|
12
12
|
from siliconcompiler.targets import freepdk45_demo
|
|
13
13
|
chip.set('input', 'rtl', 'vhdl', 'test')
|
|
14
14
|
chip.set('input', 'rtl', 'verilog', 'test')
|
|
15
|
+
chip.set('input', 'rtl', 'systemverilog', 'test')
|
|
15
16
|
chip.set('input', 'hll', 'c', 'test')
|
|
16
17
|
chip.set('input', 'hll', 'bsv', 'test')
|
|
17
18
|
chip.set('input', 'hll', 'scala', 'test')
|
siliconcompiler/issue.py
CHANGED
|
@@ -58,9 +58,6 @@ def generate_testcase(chip,
|
|
|
58
58
|
task_requires = chip.get('tool', tool, 'task', task, 'require',
|
|
59
59
|
step=step, index=index)
|
|
60
60
|
|
|
61
|
-
# Set copy flags for _collect
|
|
62
|
-
chip.set('option', 'copyall', False)
|
|
63
|
-
|
|
64
61
|
def determine_copy(*keypath, in_require):
|
|
65
62
|
copy = in_require
|
|
66
63
|
|
siliconcompiler/remote/client.py
CHANGED
|
@@ -14,7 +14,6 @@ from siliconcompiler import utils, SiliconCompilerError
|
|
|
14
14
|
from siliconcompiler import NodeStatus as SCNodeStatus
|
|
15
15
|
from siliconcompiler._metadata import default_server
|
|
16
16
|
from siliconcompiler.schema import Schema
|
|
17
|
-
from siliconcompiler.utils import default_credentials_file
|
|
18
17
|
from siliconcompiler.scheduler import _setup_node, _runtask, _executenode, clean_node_dir
|
|
19
18
|
from siliconcompiler.flowgraph import _get_flowgraph_entry_nodes, nodes_to_execute
|
|
20
19
|
from siliconcompiler.remote import JobStatus
|
|
@@ -31,9 +30,9 @@ __warn_if_no_server = True
|
|
|
31
30
|
# Multiprocessing interface.
|
|
32
31
|
multiprocessor = multiprocessing.get_context('spawn')
|
|
33
32
|
|
|
34
|
-
__tos_str = '''Please review the SiliconCompiler cloud
|
|
33
|
+
__tos_str = '''Please review the SiliconCompiler cloud's terms of service:
|
|
35
34
|
|
|
36
|
-
https://www.siliconcompiler.com/terms
|
|
35
|
+
https://www.siliconcompiler.com/terms
|
|
37
36
|
|
|
38
37
|
In particular, please ensure that you have the right to distribute any IP
|
|
39
38
|
which is contained in designs that you upload to the service. This public
|
|
@@ -217,7 +216,8 @@ def _remote_preprocess(chip, remote_nodelist):
|
|
|
217
216
|
|
|
218
217
|
# Collect inputs into a collection directory only for remote runs, since
|
|
219
218
|
# we need to send inputs up to the server.
|
|
220
|
-
chip
|
|
219
|
+
cfg = get_remote_config(chip, False)
|
|
220
|
+
chip.collect(whitelist=cfg.setdefault('directory_whitelist', []))
|
|
221
221
|
|
|
222
222
|
# Recover step/index
|
|
223
223
|
chip.set('arg', 'step', preset_step)
|
|
@@ -299,15 +299,12 @@ def _process_progress_info(chip, progress_info, nodes_to_print=3):
|
|
|
299
299
|
return completed
|
|
300
300
|
|
|
301
301
|
|
|
302
|
-
def
|
|
303
|
-
'''
|
|
304
|
-
Returns the remote credentials
|
|
305
|
-
'''
|
|
302
|
+
def get_remote_config_file(chip, fail=True):
|
|
306
303
|
if chip.get('option', 'credentials'):
|
|
307
304
|
# Use the provided remote credentials file.
|
|
308
305
|
cfg_file = os.path.abspath(chip.get('option', 'credentials'))
|
|
309
306
|
|
|
310
|
-
if not os.path.isfile(cfg_file):
|
|
307
|
+
if fail and not os.path.isfile(cfg_file):
|
|
311
308
|
# Check if it's a file since its been requested by the user
|
|
312
309
|
raise SiliconCompilerError(
|
|
313
310
|
f'Unable to find the credentials file: {cfg_file}',
|
|
@@ -316,6 +313,15 @@ def get_remote_config(chip, verbose):
|
|
|
316
313
|
# Use the default config file path.
|
|
317
314
|
cfg_file = utils.default_credentials_file()
|
|
318
315
|
|
|
316
|
+
return cfg_file
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
def get_remote_config(chip, verbose):
|
|
320
|
+
'''
|
|
321
|
+
Returns the remote credentials
|
|
322
|
+
'''
|
|
323
|
+
cfg_file = get_remote_config_file(chip)
|
|
324
|
+
|
|
319
325
|
remote_cfg = {}
|
|
320
326
|
cfg_dir = os.path.dirname(cfg_file)
|
|
321
327
|
if os.path.isdir(cfg_dir) and os.path.isfile(cfg_file):
|
|
@@ -331,7 +337,8 @@ def get_remote_config(chip, verbose):
|
|
|
331
337
|
f'defaulting to {default_server}')
|
|
332
338
|
__warn_if_no_server = False
|
|
333
339
|
remote_cfg = {
|
|
334
|
-
"address": default_server
|
|
340
|
+
"address": default_server,
|
|
341
|
+
"directory_whitelist": []
|
|
335
342
|
}
|
|
336
343
|
if 'address' not in remote_cfg:
|
|
337
344
|
raise SiliconCompilerError(
|
|
@@ -842,7 +849,7 @@ def remote_ping(chip):
|
|
|
842
849
|
return response_info
|
|
843
850
|
|
|
844
851
|
|
|
845
|
-
def
|
|
852
|
+
def configure_server(chip, server=None, port=None, username=None, password=None):
|
|
846
853
|
|
|
847
854
|
def confirm_dialog(message):
|
|
848
855
|
confirmed = False
|
|
@@ -857,9 +864,7 @@ def configure(chip, server=None, port=None, username=None, password=None):
|
|
|
857
864
|
default_server_name = urllib.parse.urlparse(default_server).hostname
|
|
858
865
|
|
|
859
866
|
# Find the config file/directory path.
|
|
860
|
-
cfg_file = chip
|
|
861
|
-
if not cfg_file:
|
|
862
|
-
cfg_file = default_credentials_file()
|
|
867
|
+
cfg_file = get_remote_config_file(chip, False)
|
|
863
868
|
cfg_dir = os.path.dirname(cfg_file)
|
|
864
869
|
|
|
865
870
|
# Create directory if it doesn't exist.
|
|
@@ -924,9 +929,55 @@ def configure(chip, server=None, port=None, username=None, password=None):
|
|
|
924
929
|
if password:
|
|
925
930
|
config['password'] = password
|
|
926
931
|
|
|
932
|
+
config['directory_whitelist'] = []
|
|
933
|
+
|
|
927
934
|
# Save the values to the target config file in JSON format.
|
|
928
935
|
with open(cfg_file, 'w') as f:
|
|
929
936
|
f.write(json.dumps(config, indent=4))
|
|
930
937
|
|
|
931
938
|
# Let the user know that we finished successfully.
|
|
932
939
|
chip.logger.info(f'Remote configuration saved to: {cfg_file}')
|
|
940
|
+
|
|
941
|
+
|
|
942
|
+
def configure_whitelist(chip, add, remove):
|
|
943
|
+
try:
|
|
944
|
+
cfg_file = get_remote_config_file(chip)
|
|
945
|
+
except SiliconCompilerError as e:
|
|
946
|
+
chip.logger.error(f'{e}')
|
|
947
|
+
|
|
948
|
+
chip.logger.info(f'Updating credentials: {cfg_file}')
|
|
949
|
+
cfg = get_remote_config(chip, True)
|
|
950
|
+
|
|
951
|
+
if 'directory_whitelist' not in cfg:
|
|
952
|
+
cfg['directory_whitelist'] = []
|
|
953
|
+
|
|
954
|
+
if add:
|
|
955
|
+
for path in add:
|
|
956
|
+
path = os.path.abspath(path)
|
|
957
|
+
chip.logger.info(f'Adding {path}')
|
|
958
|
+
cfg['directory_whitelist'].append(path)
|
|
959
|
+
|
|
960
|
+
if remove:
|
|
961
|
+
for path in remove:
|
|
962
|
+
path = os.path.abspath(path)
|
|
963
|
+
if path in cfg['directory_whitelist']:
|
|
964
|
+
chip.logger.info(f'Removing {path}')
|
|
965
|
+
cfg['directory_whitelist'].remove(path)
|
|
966
|
+
|
|
967
|
+
cfg['directory_whitelist'] = list(set(cfg['directory_whitelist']))
|
|
968
|
+
|
|
969
|
+
# Save the values to the target config file in JSON format.
|
|
970
|
+
with open(cfg_file, 'w') as f:
|
|
971
|
+
f.write(json.dumps(cfg, indent=4))
|
|
972
|
+
|
|
973
|
+
|
|
974
|
+
def configure_print(chip):
|
|
975
|
+
cfg = get_remote_config(chip, True)
|
|
976
|
+
|
|
977
|
+
chip.logger.info(f'Server: {get_base_url(chip)}')
|
|
978
|
+
if 'username' in cfg:
|
|
979
|
+
chip.logger.info(f'Username: {cfg["username"]}')
|
|
980
|
+
if 'directory_whitelist' in cfg and cfg['directory_whitelist']:
|
|
981
|
+
chip.logger.info('Directory whitelist:')
|
|
982
|
+
for path in sorted(cfg['directory_whitelist']):
|
|
983
|
+
chip.logger.info(f' {path}')
|
siliconcompiler/remote/schema.py
CHANGED
|
@@ -81,15 +81,15 @@ def schema_cfg():
|
|
|
81
81
|
|
|
82
82
|
scparam(cfg, ['option', 'loglevel'],
|
|
83
83
|
sctype='enum',
|
|
84
|
-
enum=["
|
|
84
|
+
enum=["info", "warning", "error", "critical", "debug"],
|
|
85
85
|
pernode='optional',
|
|
86
86
|
scope='job',
|
|
87
|
-
defvalue='
|
|
87
|
+
defvalue='info',
|
|
88
88
|
shorthelp="Logging level",
|
|
89
89
|
switch="-loglevel <str>",
|
|
90
90
|
example=[
|
|
91
|
-
"cli: -loglevel
|
|
92
|
-
"api: server.set('option', 'loglevel', '
|
|
91
|
+
"cli: -loglevel info",
|
|
92
|
+
"api: server.set('option', 'loglevel', 'info')"],
|
|
93
93
|
schelp="""
|
|
94
94
|
Provides explicit control over the level of debug logging printed.""")
|
|
95
95
|
|
siliconcompiler/remote/server.py
CHANGED
|
@@ -15,6 +15,7 @@ from fastjsonschema import JsonSchemaException
|
|
|
15
15
|
import io
|
|
16
16
|
|
|
17
17
|
from siliconcompiler import Chip, Schema
|
|
18
|
+
from siliconcompiler.schema import utils as schema_utils
|
|
18
19
|
from siliconcompiler._metadata import version as sc_version
|
|
19
20
|
from siliconcompiler.schema import SCHEMA_VERSION as sc_schema_version
|
|
20
21
|
from siliconcompiler.remote.schema import ServerSchema
|
|
@@ -64,7 +65,7 @@ class Server:
|
|
|
64
65
|
__version__ = '0.0.1'
|
|
65
66
|
|
|
66
67
|
####################
|
|
67
|
-
def __init__(self, loglevel="
|
|
68
|
+
def __init__(self, loglevel="info"):
|
|
68
69
|
'''
|
|
69
70
|
Init method for Server object
|
|
70
71
|
'''
|
|
@@ -75,7 +76,7 @@ class Server:
|
|
|
75
76
|
formatter = log.Formatter('%(asctime)s | %(levelname)-8s | %(message)s')
|
|
76
77
|
handler.setFormatter(formatter)
|
|
77
78
|
self.logger.addHandler(handler)
|
|
78
|
-
self.logger.setLevel(loglevel)
|
|
79
|
+
self.logger.setLevel(schema_utils.translate_loglevel(loglevel))
|
|
79
80
|
|
|
80
81
|
self.schema = ServerSchema(logger=self.logger)
|
|
81
82
|
|
|
@@ -497,7 +498,7 @@ class Server:
|
|
|
497
498
|
value = args[-1]
|
|
498
499
|
|
|
499
500
|
if keypath == ['option', 'loglevel'] and field == 'value':
|
|
500
|
-
self.logger.setLevel(value)
|
|
501
|
+
self.logger.setLevel(schema_utils.translate_loglevel(value))
|
|
501
502
|
|
|
502
503
|
self.schema.set(*keypath, value, field=field, clobber=clobber)
|
|
503
504
|
|
|
@@ -219,7 +219,8 @@ def _local_process(chip, flow):
|
|
|
219
219
|
node_status = schema.get('record', 'status', step=step, index=index)
|
|
220
220
|
except: # noqa E722
|
|
221
221
|
pass
|
|
222
|
-
|
|
222
|
+
if node_status:
|
|
223
|
+
chip.set('record', 'status', node_status, step=step, index=index)
|
|
223
224
|
|
|
224
225
|
def mark_pending(step, index):
|
|
225
226
|
chip.set('record', 'status', NodeStatus.PENDING, step=step, index=index)
|
|
@@ -944,6 +945,7 @@ def _run_executable_or_builtin(chip, step, index, version, toolpath, workdir, ru
|
|
|
944
945
|
is_stderr_log, stderr_reader)
|
|
945
946
|
retcode = proc.returncode
|
|
946
947
|
|
|
948
|
+
chip.set('record', 'toolexitcode', retcode, step=step, index=index)
|
|
947
949
|
if retcode != 0:
|
|
948
950
|
msg = f'Command failed with code {retcode}.'
|
|
949
951
|
if logfile:
|
|
@@ -1280,9 +1282,13 @@ def _reset_flow_nodes(chip, flow, nodes_to_execute):
|
|
|
1280
1282
|
# in the nodes to execute.
|
|
1281
1283
|
clear_node(step, index)
|
|
1282
1284
|
elif os.path.isfile(cfg):
|
|
1283
|
-
|
|
1284
|
-
|
|
1285
|
-
|
|
1285
|
+
try:
|
|
1286
|
+
old_status = Schema(manifest=cfg).get('record', 'status', step=step, index=index)
|
|
1287
|
+
if old_status:
|
|
1288
|
+
chip.set('record', 'status', old_status, step=step, index=index)
|
|
1289
|
+
except Exception:
|
|
1290
|
+
# unable to load so leave it default
|
|
1291
|
+
pass
|
|
1286
1292
|
else:
|
|
1287
1293
|
chip.set('record', 'status', NodeStatus.ERROR, step=step, index=index)
|
|
1288
1294
|
|
|
@@ -88,8 +88,13 @@ def get_volumes_directories(chip, cache_dir, workdir, step, index):
|
|
|
88
88
|
def init(chip):
|
|
89
89
|
if sys.platform == 'win32':
|
|
90
90
|
# this avoids the issue of different file system types
|
|
91
|
-
chip.logger.error('Setting
|
|
92
|
-
chip.
|
|
91
|
+
chip.logger.error('Setting copy field to true for docker run on Windows')
|
|
92
|
+
for key in chip.allkeys():
|
|
93
|
+
if key[0] == 'history':
|
|
94
|
+
continue
|
|
95
|
+
sc_type = chip.get(*key, field='type')
|
|
96
|
+
if 'dir' in sc_type or 'file' in sc_type:
|
|
97
|
+
chip.set(*key, True, field='copy')
|
|
93
98
|
chip.collect()
|
|
94
99
|
|
|
95
100
|
|
|
@@ -11,7 +11,7 @@ try:
|
|
|
11
11
|
except ImportError:
|
|
12
12
|
from siliconcompiler.schema.utils import trim
|
|
13
13
|
|
|
14
|
-
SCHEMA_VERSION = '0.44.
|
|
14
|
+
SCHEMA_VERSION = '0.44.2'
|
|
15
15
|
|
|
16
16
|
#############################################################################
|
|
17
17
|
# PARAM DEFINITION
|
|
@@ -2401,6 +2401,16 @@ def schema_record(cfg, step='default', index='default'):
|
|
|
2401
2401
|
pernode='required',
|
|
2402
2402
|
schelp=f'Record tracking the {val[0]} per step and index basis. {helpext}')
|
|
2403
2403
|
|
|
2404
|
+
scparam(cfg, ['record', 'toolexitcode'],
|
|
2405
|
+
sctype='int',
|
|
2406
|
+
shorthelp="Record: tool exit code",
|
|
2407
|
+
switch="-record_toolexitcode 'step index <int>'",
|
|
2408
|
+
example=[
|
|
2409
|
+
"cli: -record_toolexitcode 'dfm 0 0'",
|
|
2410
|
+
"api: chip.set('record', 'toolexitcode', 0, step='dfm', index=0)"],
|
|
2411
|
+
pernode='required',
|
|
2412
|
+
schelp='Record tracking the tool exit code per step and index basis.')
|
|
2413
|
+
|
|
2404
2414
|
# Unlike most other 'record' fields, job ID is not set per-node.
|
|
2405
2415
|
scparam(cfg, ['record', 'remoteid'],
|
|
2406
2416
|
sctype='str',
|
|
@@ -2657,18 +2667,17 @@ def schema_option(cfg):
|
|
|
2657
2667
|
|
|
2658
2668
|
scparam(cfg, ['option', 'loglevel'],
|
|
2659
2669
|
sctype='enum',
|
|
2660
|
-
enum=["
|
|
2670
|
+
enum=["info", "warning", "error", "critical", "debug"],
|
|
2661
2671
|
pernode='optional',
|
|
2662
2672
|
scope='job',
|
|
2663
|
-
defvalue='
|
|
2673
|
+
defvalue='info',
|
|
2664
2674
|
shorthelp="Logging level",
|
|
2665
2675
|
switch="-loglevel <str>",
|
|
2666
2676
|
example=[
|
|
2667
|
-
"cli: -loglevel
|
|
2668
|
-
"api: chip.set('option', 'loglevel', '
|
|
2677
|
+
"cli: -loglevel info",
|
|
2678
|
+
"api: chip.set('option', 'loglevel', 'info')"],
|
|
2669
2679
|
schelp="""
|
|
2670
|
-
Provides explicit control over the level of debug logging printed.
|
|
2671
|
-
Valid entries include INFO, DEBUG, WARNING, ERROR.""")
|
|
2680
|
+
Provides explicit control over the level of debug logging printed.""")
|
|
2672
2681
|
|
|
2673
2682
|
scparam(cfg, ['option', 'builddir'],
|
|
2674
2683
|
sctype='dir',
|
|
@@ -2854,18 +2863,6 @@ def schema_option(cfg):
|
|
|
2854
2863
|
being recorded in the manifest so only turn on this feature
|
|
2855
2864
|
if you have control of the final manifest.""")
|
|
2856
2865
|
|
|
2857
|
-
scparam(cfg, ['option', 'copyall'],
|
|
2858
|
-
sctype='bool',
|
|
2859
|
-
scope='job',
|
|
2860
|
-
shorthelp="Copy all inputs to build directory",
|
|
2861
|
-
switch="-copyall <bool>",
|
|
2862
|
-
example=["cli: -copyall",
|
|
2863
|
-
"api: chip.set('option', 'copyall', True)"],
|
|
2864
|
-
schelp="""
|
|
2865
|
-
Specifies that all used files should be copied into the
|
|
2866
|
-
build directory, overriding the per schema entry copy
|
|
2867
|
-
settings.""")
|
|
2868
|
-
|
|
2869
2866
|
scparam(cfg, ['option', 'entrypoint'],
|
|
2870
2867
|
sctype='str',
|
|
2871
2868
|
pernode='optional',
|
|
@@ -23,7 +23,7 @@ except ImportError:
|
|
|
23
23
|
_has_yaml = False
|
|
24
24
|
|
|
25
25
|
from .schema_cfg import schema_cfg
|
|
26
|
-
from .utils import escape_val_tcl, PACKAGE_ROOT
|
|
26
|
+
from .utils import escape_val_tcl, PACKAGE_ROOT, translate_loglevel
|
|
27
27
|
|
|
28
28
|
|
|
29
29
|
class Schema:
|
|
@@ -1532,7 +1532,7 @@ class Schema:
|
|
|
1532
1532
|
if isinstance(log_level, list):
|
|
1533
1533
|
# if multiple found, pick the first one
|
|
1534
1534
|
log_level = log_level[0]
|
|
1535
|
-
logger.setLevel(log_level.split()[-1])
|
|
1535
|
+
logger.setLevel(translate_loglevel(log_level).split()[-1])
|
|
1536
1536
|
|
|
1537
1537
|
# Read in all cfg files
|
|
1538
1538
|
if 'option_cfg' in cmdargs.keys():
|
|
@@ -1679,7 +1679,7 @@ class Schema:
|
|
|
1679
1679
|
return switchstrs, metavar
|
|
1680
1680
|
|
|
1681
1681
|
###########################################################################
|
|
1682
|
-
def read_manifest(self, filename, clear=True, clobber=True, allow_missing_keys=
|
|
1682
|
+
def read_manifest(self, filename, clear=True, clobber=True, allow_missing_keys=True):
|
|
1683
1683
|
"""
|
|
1684
1684
|
Reads a manifest from disk and merges it with the current manifest.
|
|
1685
1685
|
|
siliconcompiler/schema/utils.py
CHANGED
|
@@ -4,7 +4,7 @@ from siliconcompiler.utils import get_file_ext
|
|
|
4
4
|
from siliconcompiler import units, SiliconCompilerError, NodeStatus
|
|
5
5
|
|
|
6
6
|
|
|
7
|
-
def get_libraries(chip, include_asic=True):
|
|
7
|
+
def get_libraries(chip, include_asic=True, library=None, libraries=None):
|
|
8
8
|
'''
|
|
9
9
|
Returns a list of libraries included in this step/index
|
|
10
10
|
|
|
@@ -17,13 +17,29 @@ def get_libraries(chip, include_asic=True):
|
|
|
17
17
|
|
|
18
18
|
libs = []
|
|
19
19
|
|
|
20
|
+
if not libraries:
|
|
21
|
+
libraries = set()
|
|
22
|
+
|
|
23
|
+
pref_key = []
|
|
24
|
+
if library:
|
|
25
|
+
pref_key = ['library', library]
|
|
26
|
+
|
|
27
|
+
def get_libs(*key):
|
|
28
|
+
if chip.valid(*key) and chip.get(*key, step=step, index=index):
|
|
29
|
+
return chip.get(*key, step=step, index=index)
|
|
30
|
+
return []
|
|
31
|
+
|
|
20
32
|
if include_asic:
|
|
21
|
-
libs.extend(
|
|
22
|
-
libs.extend(
|
|
33
|
+
libs.extend(get_libs(*pref_key, 'asic', 'logiclib'))
|
|
34
|
+
libs.extend(get_libs(*pref_key, 'asic', 'macrolib'))
|
|
23
35
|
|
|
24
|
-
|
|
36
|
+
for lib in get_libs(*pref_key, 'option', 'library'):
|
|
37
|
+
if lib in libs or lib in libraries:
|
|
38
|
+
continue
|
|
39
|
+
libs.append(lib)
|
|
40
|
+
libs.extend(get_libraries(chip, include_asic=include_asic, library=lib, libraries=libs))
|
|
25
41
|
|
|
26
|
-
return libs
|
|
42
|
+
return set(libs)
|
|
27
43
|
|
|
28
44
|
|
|
29
45
|
def add_require_input(chip, *key, include_library_files=True):
|
|
Binary file
|
siliconcompiler/use.py
CHANGED
|
@@ -33,6 +33,7 @@ class PDK(PackageChip):
|
|
|
33
33
|
Args:
|
|
34
34
|
chip (Chip): A real only copy of the parent chip.
|
|
35
35
|
name (string): Name of the PDK.
|
|
36
|
+
package (string): Name of the data source
|
|
36
37
|
Examples:
|
|
37
38
|
>>> siliconcompiler.PDK(chip, "asap7")
|
|
38
39
|
Creates a flow object with name "asap7".
|
|
@@ -50,6 +51,7 @@ class FPGA(PackageChip):
|
|
|
50
51
|
Args:
|
|
51
52
|
chip (Chip): A real only copy of the parent chip.
|
|
52
53
|
name (string): Name of the FPGA.
|
|
54
|
+
package (string): Name of the data source
|
|
53
55
|
Examples:
|
|
54
56
|
>>> siliconcompiler.FPGA(chip, "lattice_ice40")
|
|
55
57
|
Creates a flow object with name "lattice_ice40".
|
|
@@ -67,10 +69,20 @@ class Library(PackageChip):
|
|
|
67
69
|
Args:
|
|
68
70
|
chip (Chip): A real only copy of the parent chip.
|
|
69
71
|
name (string): Name of the library.
|
|
72
|
+
package (string): Name of the data source
|
|
73
|
+
auto_enable (boolean): If True, will automatically be added to ['option','library'].
|
|
74
|
+
This is only valid for non-logiclibs and macrolibs
|
|
70
75
|
Examples:
|
|
71
76
|
>>> siliconcompiler.Library(chip, "asap7sc7p5t")
|
|
72
77
|
Creates a library object with name "asap7sc7p5t".
|
|
73
78
|
"""
|
|
79
|
+
def __init__(self, chip, name, package=None, auto_enable=False):
|
|
80
|
+
super().__init__(chip, name, package=package)
|
|
81
|
+
|
|
82
|
+
self.__auto_enable = auto_enable
|
|
83
|
+
|
|
84
|
+
def is_auto_enable(self):
|
|
85
|
+
return self.__auto_enable
|
|
74
86
|
|
|
75
87
|
|
|
76
88
|
class Flow(Chip):
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: siliconcompiler
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.26.0
|
|
4
4
|
Summary: A compiler framework that automates translation from source code to silicon.
|
|
5
5
|
Home-page: https://siliconcompiler.com
|
|
6
6
|
Author: Andreas Olofsson
|
|
@@ -24,7 +24,7 @@ Requires-Dist: packaging <24,>=21.3
|
|
|
24
24
|
Requires-Dist: psutil >=5.8.0
|
|
25
25
|
Requires-Dist: Pillow ==10.4.0
|
|
26
26
|
Requires-Dist: GitPython ==3.1.43
|
|
27
|
-
Requires-Dist: lambdapdk
|
|
27
|
+
Requires-Dist: lambdapdk >=0.1.25
|
|
28
28
|
Requires-Dist: PyGithub ==2.3.0
|
|
29
29
|
Requires-Dist: urllib3 >=1.26.0
|
|
30
30
|
Requires-Dist: fasteners ==0.19
|
|
@@ -1,19 +1,19 @@
|
|
|
1
1
|
siliconcompiler/__init__.py,sha256=5T-mWDc05v0FEdwg2btphrAb_W7XaXUmKrRSxuSMNUQ,535
|
|
2
2
|
siliconcompiler/__main__.py,sha256=x5bzv4spw66iQOldUM-iCWw2j5NxXkkkC_Wd2hGAAgo,182
|
|
3
3
|
siliconcompiler/_common.py,sha256=27VU0PqZkD6-qz20brjzj-Z8cpDt0oyE6ZA6wARZvrk,1283
|
|
4
|
-
siliconcompiler/_metadata.py,sha256=
|
|
5
|
-
siliconcompiler/core.py,sha256=
|
|
6
|
-
siliconcompiler/flowgraph.py,sha256=
|
|
7
|
-
siliconcompiler/issue.py,sha256=
|
|
4
|
+
siliconcompiler/_metadata.py,sha256=OgH4k7oWlXuww125I7lBFRv2wW9PXSWEUTlGGhSQmUs,1300
|
|
5
|
+
siliconcompiler/core.py,sha256=W6RLfIsqO3EZ3sp_z-1V-rYEl7nY1m_oWznOh0jkeSY,126482
|
|
6
|
+
siliconcompiler/flowgraph.py,sha256=aQh0VO7LVwGNJbWZcBIgzGRv_pDH72_ilMQtK906Rqk,22502
|
|
7
|
+
siliconcompiler/issue.py,sha256=jrQnDKY9G-k-KF61XkhPyLSDmOSBoWA2GHRxB4auKNA,11223
|
|
8
8
|
siliconcompiler/package.py,sha256=Z2FqMRq8mtvmF6d_hyDOZN8DOZ8gu7zABDMWOfUGu-M,14463
|
|
9
9
|
siliconcompiler/units.py,sha256=dYn185TzusMtBd69RFKhNlCky2td5jC__AJdPjqXELU,6069
|
|
10
|
-
siliconcompiler/use.py,sha256=
|
|
10
|
+
siliconcompiler/use.py,sha256=3U4cbbSWdLCObg5FeRIHpkYhqIMaHRYwu6rI6nWXjWE,4131
|
|
11
11
|
siliconcompiler/apps/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
12
12
|
siliconcompiler/apps/_common.py,sha256=368PjPwz0yw4baJOXPOhkgVq0uP_G77j5C6ei_eIaPs,2565
|
|
13
|
-
siliconcompiler/apps/sc.py,sha256=
|
|
13
|
+
siliconcompiler/apps/sc.py,sha256=2LT0rvky_3SRpsvbmZ68asAvGdagIIEW7fwMoCqVpUE,2876
|
|
14
14
|
siliconcompiler/apps/sc_dashboard.py,sha256=paGJ07MQBOzekEs1mZ2mMsl-snt5qaLwHQ36rNU5FRo,3269
|
|
15
15
|
siliconcompiler/apps/sc_issue.py,sha256=BJSj9kgwvtJQCsudjiuHfPguLpZR1QAuQRTyxfEAKHE,7410
|
|
16
|
-
siliconcompiler/apps/sc_remote.py,sha256=
|
|
16
|
+
siliconcompiler/apps/sc_remote.py,sha256=9m6g79STJG784NqzMHyxoPzexZ7ihZaYxxxLlg7cnQk,7522
|
|
17
17
|
siliconcompiler/apps/sc_server.py,sha256=aeW9wldn_qO6yyv9j5drx2wv0KMk8f6s9XSZgPCerhY,933
|
|
18
18
|
siliconcompiler/apps/sc_show.py,sha256=ykP-yUOViD64DRb2jR-E-v4cI6OOCw57XdWYLze-yBM,4702
|
|
19
19
|
siliconcompiler/checklists/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -23,7 +23,7 @@ siliconcompiler/data/logo.png,sha256=yNR4onh4CBnkqBK_N170SMnKWOykSR-Oer6u2slCsKA
|
|
|
23
23
|
siliconcompiler/data/RobotoMono/LICENSE.txt,sha256=Pd-b5cKP4n2tFDpdx27qJSIq0d1ok0oEcGTlbtL6QMU,11560
|
|
24
24
|
siliconcompiler/data/RobotoMono/RobotoMono-Regular.ttf,sha256=w8iOaiprWYm5hBNPzFHOaddn_RgCWHLoz0FsBMTaryA,86908
|
|
25
25
|
siliconcompiler/flows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
26
|
-
siliconcompiler/flows/_common.py,sha256=
|
|
26
|
+
siliconcompiler/flows/_common.py,sha256=xu2k8gzE0RznrhXblSWLiuhvAqYRI4wbmd0JPepu_J8,2214
|
|
27
27
|
siliconcompiler/flows/asicflow.py,sha256=pWonmU0iqKyk4KW4unKdAfgadp0l659NKbhXFyn6DRc,6298
|
|
28
28
|
siliconcompiler/flows/asictopflow.py,sha256=EGzqEwTJLC983xA7J9Dky32sZeHQ-Ptn5uKh2DOQ70E,1207
|
|
29
29
|
siliconcompiler/flows/dvflow.py,sha256=iTrmw2dRYEDCwNZysLx4N1YIe5KiVbZOnCUhmSYUsHE,2252
|
|
@@ -49,9 +49,9 @@ siliconcompiler/pdks/freepdk45.py,sha256=fl8iQQBf90qGkzBvTwxasiGR55wQwYJjTs5ht9d
|
|
|
49
49
|
siliconcompiler/pdks/gf180.py,sha256=_CvEiHsd8VNHasH_SHdwJiSsO56o7pB6aLWm92U_WVY,211
|
|
50
50
|
siliconcompiler/pdks/skywater130.py,sha256=ml5bGW6h7-aaYprmBPy4pjj4sfT0Qi2hVTcdwTy-OkY,212
|
|
51
51
|
siliconcompiler/remote/__init__.py,sha256=RFKWKDF1qK928_-8-1nmANyGwkmgUJYh39CLj_HshUc,846
|
|
52
|
-
siliconcompiler/remote/client.py,sha256=
|
|
53
|
-
siliconcompiler/remote/schema.py,sha256=
|
|
54
|
-
siliconcompiler/remote/server.py,sha256=
|
|
52
|
+
siliconcompiler/remote/client.py,sha256=501HqtnMPGzQJW2PuSdFA5D9A8GBnb-6sXMm5XcWF8Y,36597
|
|
53
|
+
siliconcompiler/remote/schema.py,sha256=kK1C6nvbpRrOJu61qnZKGxrar2LXg8G9JNBFTxQmWow,3941
|
|
54
|
+
siliconcompiler/remote/server.py,sha256=2jw0DJISpgnhxKlMaGYBuC6YnCuaJLmGRZJAanTGQ5A,20013
|
|
55
55
|
siliconcompiler/remote/server_schema/requests/cancel_job.json,sha256=D6obnSDKrf8wEmc1unX_SliR6lzLn8rvYJoiAtCJPGo,1521
|
|
56
56
|
siliconcompiler/remote/server_schema/requests/check_progress.json,sha256=X-b6QQabJKUdjxd_pP3Ip2B4i4G_qVBnnvUj3anAB2s,1854
|
|
57
57
|
siliconcompiler/remote/server_schema/requests/check_server.json,sha256=CKFIh-QLURfxQjsD8lwyFfCvIb5c-7_91RVsYmFpp4E,1000
|
|
@@ -72,16 +72,16 @@ siliconcompiler/report/streamlit_viewer.py,sha256=WbwcEf2t1gQihUnxnefwlAXKThwcNd
|
|
|
72
72
|
siliconcompiler/report/summary_image.py,sha256=r8GbFJgD0ZLfYFFl8nmUhsh87wWP7evCljWWHx7_L8U,3687
|
|
73
73
|
siliconcompiler/report/summary_table.py,sha256=NfSB8yHFirSCxvBROXURLzZM4UrYN2M050CScjeuFYk,2997
|
|
74
74
|
siliconcompiler/report/utils.py,sha256=Yr-vcCoylN3HvlqAK0l6LLgdHfauezCmAhCzVqiHHpU,6603
|
|
75
|
-
siliconcompiler/scheduler/__init__.py,sha256=
|
|
76
|
-
siliconcompiler/scheduler/docker_runner.py,sha256=
|
|
75
|
+
siliconcompiler/scheduler/__init__.py,sha256=4ab-Wc4LSy8HTWAkxYn474pTudHyvZq8q2a_s42k7-Y,82167
|
|
76
|
+
siliconcompiler/scheduler/docker_runner.py,sha256=ZboFmi9C_TPkgQlizU3nLmdDUip5EqvN-1JoJZMFFTs,8318
|
|
77
77
|
siliconcompiler/scheduler/run_node.py,sha256=Mmj2epARKCuwN6oW-PyvExwY3OzRxUrG0mPLr3SwQ6M,5201
|
|
78
78
|
siliconcompiler/scheduler/send_messages.py,sha256=ZVO6923-EJWUMlDOOpLEhaSrsKtP-d4J_UcfRp6kJDo,6387
|
|
79
79
|
siliconcompiler/scheduler/slurm.py,sha256=IaglZSvrHOqEDT46ZcJ19gXpJxiMm7AAO7EvVdrauZc,7305
|
|
80
80
|
siliconcompiler/scheduler/validation/email_credentials.json,sha256=rJHUmTS0YyQVCeZpJI1D4WgxsXRHigZTJ6xToITziuo,1800
|
|
81
81
|
siliconcompiler/schema/__init__.py,sha256=5MfwK7me_exH7qjcInSUSesM0kiGIx8FXQDj4Br2QAQ,127
|
|
82
|
-
siliconcompiler/schema/schema_cfg.py,sha256=
|
|
83
|
-
siliconcompiler/schema/schema_obj.py,sha256=
|
|
84
|
-
siliconcompiler/schema/utils.py,sha256=
|
|
82
|
+
siliconcompiler/schema/schema_cfg.py,sha256=7Ct8mKKZlgyM2OgtajgBnLcbAxzwBnQYDM81O0k5BzI,182877
|
|
83
|
+
siliconcompiler/schema/schema_obj.py,sha256=R3rrGNgoyNakMmQ2HDW9XJwfar3WMKVyqWrClr38jR8,74403
|
|
84
|
+
siliconcompiler/schema/utils.py,sha256=KKWtwOkXcDjaxs2f4yIuE6JCFZaapGjdLG4dQLYmH08,4111
|
|
85
85
|
siliconcompiler/sphinx_ext/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
86
86
|
siliconcompiler/sphinx_ext/dynamicgen.py,sha256=bfGLUzggDEvw0GtWzt73LStlh1m90224KCPFj8PRz4s,35531
|
|
87
87
|
siliconcompiler/sphinx_ext/schemagen.py,sha256=M_Qn6QaL62hem1zEc4IhF9PYoH3BzgvFY1tnPcW9W04,7776
|
|
@@ -104,7 +104,7 @@ siliconcompiler/templates/report/sc_report.j2,sha256=o6S1pwkPEYSIcw2fJaOVSflkGsF
|
|
|
104
104
|
siliconcompiler/templates/slurm/run.sh,sha256=Il7hoBLXXaCU6gSELRUkmFYh98CLUNt0-rw1e26CCOE,223
|
|
105
105
|
siliconcompiler/templates/tcl/manifest.tcl.j2,sha256=EMTCgHgEje8r0G1ZuYrCtq0A7xgAQSwrvMvgcalXx7o,2376
|
|
106
106
|
siliconcompiler/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
107
|
-
siliconcompiler/tools/_common/__init__.py,sha256=
|
|
107
|
+
siliconcompiler/tools/_common/__init__.py,sha256=l2LYD0tSOUY283Z5Ib3RXfKky9IgiP0KIsPrH2xI9aY,13839
|
|
108
108
|
siliconcompiler/tools/_common/asic.py,sha256=2VqtNV-o0lmCNMT7wm-i_IbfbTA6Qxr3wXfopuGTNWQ,3163
|
|
109
109
|
siliconcompiler/tools/_common/sdc/sc_constraints.sdc,sha256=4Pmema9zkVpHHRGkLUZxnz9RIW4flfq2MfwEirBv-CY,2903
|
|
110
110
|
siliconcompiler/tools/_common/tcl/sc_pin_constraints.tcl,sha256=nHzkcos2sjI-ENYmvB03lLsS9YHcfvC_Aute16vTJqg,2134
|
|
@@ -199,7 +199,7 @@ siliconcompiler/tools/slang/__init__.py,sha256=BMJjbTWCumTBbshaTc5Wgjcl3kxPiPjwc
|
|
|
199
199
|
siliconcompiler/tools/slang/lint.py,sha256=eNe82gmZgCMvLEKQJHagyP1yNWRQ23agBE3n709-Lz4,3080
|
|
200
200
|
siliconcompiler/tools/surelog/parse.py,sha256=i7mU6xIWrSfuTb9ov1ZSJKipyyhvlGFFmKf8y--Qrns,6208
|
|
201
201
|
siliconcompiler/tools/surelog/surelog.py,sha256=PlkIjrFGq1t8U2gxFSKPouDmcnS6LE1oTZDrXtVJh7M,5034
|
|
202
|
-
siliconcompiler/tools/surelog/bin/surelog.exe,sha256=
|
|
202
|
+
siliconcompiler/tools/surelog/bin/surelog.exe,sha256=VI94VJEO7otEVa76cIARSe11K-t6nQ6m4RRKld8pAf4,6422016
|
|
203
203
|
siliconcompiler/tools/surelog/templates/output.v,sha256=NE9iQW-IEx0wanJSpbZQjRt-Qq2oIx78JOzlsBcd0Is,213
|
|
204
204
|
siliconcompiler/tools/sv2v/convert.py,sha256=PG1cYSUil2sZDGh8Eb0dCvsTMnW7o2nUewv2LA23DCw,1837
|
|
205
205
|
siliconcompiler/tools/sv2v/sv2v.py,sha256=AuMHqm109GJhz6oqvDyyrO9ICGI8FiDXKzBsdMFvDa0,1078
|
|
@@ -243,9 +243,9 @@ siliconcompiler/tools/yosys/templates/abc.const,sha256=2Ea7eZz2eHzar3RLf_l2Nb9dn
|
|
|
243
243
|
siliconcompiler/utils/__init__.py,sha256=b3mhPeBb8HIqU-8w23h2IMLSxuDrXm53e5iSeqZrkDI,13168
|
|
244
244
|
siliconcompiler/utils/asic.py,sha256=knq-raDWs1FKtfqkUbLOecdSwXezlmqb8gk9QPZWdqY,5144
|
|
245
245
|
siliconcompiler/utils/showtools.py,sha256=kNaw97U6tV_MwLvWb1dme_k9E6dQVqnTT6y2zzMcXJk,1158
|
|
246
|
-
siliconcompiler-0.
|
|
247
|
-
siliconcompiler-0.
|
|
248
|
-
siliconcompiler-0.
|
|
249
|
-
siliconcompiler-0.
|
|
250
|
-
siliconcompiler-0.
|
|
251
|
-
siliconcompiler-0.
|
|
246
|
+
siliconcompiler-0.26.0.dist-info/LICENSE,sha256=UJh7mqgsPZN3gg37jhwYnrtCUs1m19vkIA6Px7jAOPY,10956
|
|
247
|
+
siliconcompiler-0.26.0.dist-info/METADATA,sha256=KAd29bPV4uUiaBagiWyd8UQpjfkWUQ8RBuShJI5ErGg,9663
|
|
248
|
+
siliconcompiler-0.26.0.dist-info/WHEEL,sha256=W2kdImq02l-EUh3at9VjR6o2QIEyWCSYSH9BzhJTfwY,99
|
|
249
|
+
siliconcompiler-0.26.0.dist-info/entry_points.txt,sha256=Q2sa6SsP2rjHz9oFuNyfeksaxl5Duccn_mV0fHN8pfQ,411
|
|
250
|
+
siliconcompiler-0.26.0.dist-info/top_level.txt,sha256=H8TOYhnEUZAV1RJTa8JRtjLIebwHzkQUhA2wkNU2O6M,16
|
|
251
|
+
siliconcompiler-0.26.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|