siliconcompiler 0.32.3__py3-none-any.whl → 0.33.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- siliconcompiler/__init__.py +19 -2
- siliconcompiler/_metadata.py +1 -1
- siliconcompiler/apps/sc.py +2 -2
- siliconcompiler/apps/sc_install.py +3 -3
- siliconcompiler/apps/sc_issue.py +1 -1
- siliconcompiler/apps/sc_remote.py +4 -4
- siliconcompiler/apps/sc_show.py +2 -2
- siliconcompiler/apps/utils/replay.py +5 -3
- siliconcompiler/asic.py +120 -0
- siliconcompiler/checklist.py +150 -0
- siliconcompiler/core.py +267 -289
- siliconcompiler/flowgraph.py +803 -515
- siliconcompiler/fpga.py +84 -0
- siliconcompiler/metric.py +420 -0
- siliconcompiler/optimizer/vizier.py +2 -3
- siliconcompiler/package/__init__.py +29 -6
- siliconcompiler/pdk.py +415 -0
- siliconcompiler/record.py +449 -0
- siliconcompiler/remote/client.py +6 -3
- siliconcompiler/remote/schema.py +116 -112
- siliconcompiler/remote/server.py +3 -5
- siliconcompiler/report/dashboard/cli/__init__.py +13 -722
- siliconcompiler/report/dashboard/cli/board.py +895 -0
- siliconcompiler/report/dashboard/web/__init__.py +10 -10
- siliconcompiler/report/dashboard/web/components/__init__.py +5 -4
- siliconcompiler/report/dashboard/web/components/flowgraph.py +3 -3
- siliconcompiler/report/dashboard/web/components/graph.py +6 -3
- siliconcompiler/report/dashboard/web/state.py +1 -1
- siliconcompiler/report/dashboard/web/utils/__init__.py +4 -3
- siliconcompiler/report/html_report.py +2 -3
- siliconcompiler/report/report.py +13 -7
- siliconcompiler/report/summary_image.py +1 -1
- siliconcompiler/report/summary_table.py +3 -3
- siliconcompiler/report/utils.py +11 -10
- siliconcompiler/scheduler/__init__.py +145 -280
- siliconcompiler/scheduler/run_node.py +2 -1
- siliconcompiler/scheduler/send_messages.py +4 -4
- siliconcompiler/scheduler/slurm.py +2 -2
- siliconcompiler/schema/__init__.py +19 -2
- siliconcompiler/schema/baseschema.py +493 -0
- siliconcompiler/schema/cmdlineschema.py +250 -0
- siliconcompiler/{sphinx_ext → schema/docs}/__init__.py +3 -1
- siliconcompiler/{sphinx_ext → schema/docs}/dynamicgen.py +63 -81
- siliconcompiler/{sphinx_ext → schema/docs}/schemagen.py +73 -85
- siliconcompiler/{sphinx_ext → schema/docs}/utils.py +12 -13
- siliconcompiler/schema/editableschema.py +136 -0
- siliconcompiler/schema/journalingschema.py +238 -0
- siliconcompiler/schema/namedschema.py +41 -0
- siliconcompiler/schema/packageschema.py +101 -0
- siliconcompiler/schema/parameter.py +791 -0
- siliconcompiler/schema/parametertype.py +323 -0
- siliconcompiler/schema/parametervalue.py +736 -0
- siliconcompiler/schema/safeschema.py +37 -0
- siliconcompiler/schema/schema_cfg.py +109 -1789
- siliconcompiler/schema/utils.py +5 -68
- siliconcompiler/schema_obj.py +119 -0
- siliconcompiler/tool.py +1308 -0
- siliconcompiler/tools/_common/__init__.py +6 -10
- siliconcompiler/tools/_common/sdc/sc_constraints.sdc +1 -1
- siliconcompiler/tools/bluespec/convert.py +7 -7
- siliconcompiler/tools/builtin/_common.py +1 -1
- siliconcompiler/tools/builtin/concatenate.py +2 -2
- siliconcompiler/tools/builtin/minimum.py +1 -1
- siliconcompiler/tools/builtin/mux.py +2 -1
- siliconcompiler/tools/builtin/nop.py +1 -1
- siliconcompiler/tools/builtin/verify.py +6 -4
- siliconcompiler/tools/chisel/convert.py +4 -4
- siliconcompiler/tools/genfasm/bitstream.py +3 -3
- siliconcompiler/tools/ghdl/convert.py +1 -1
- siliconcompiler/tools/icarus/compile.py +4 -4
- siliconcompiler/tools/icepack/bitstream.py +6 -1
- siliconcompiler/tools/klayout/convert_drc_db.py +5 -0
- siliconcompiler/tools/klayout/klayout_export.py +0 -1
- siliconcompiler/tools/klayout/klayout_utils.py +3 -10
- siliconcompiler/tools/nextpnr/apr.py +6 -1
- siliconcompiler/tools/nextpnr/nextpnr.py +4 -4
- siliconcompiler/tools/openroad/_apr.py +13 -0
- siliconcompiler/tools/openroad/rdlroute.py +3 -3
- siliconcompiler/tools/openroad/scripts/apr/postamble.tcl +1 -1
- siliconcompiler/tools/openroad/scripts/apr/preamble.tcl +5 -5
- siliconcompiler/tools/openroad/scripts/apr/sc_antenna_repair.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_clock_tree_synthesis.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_detailed_placement.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_detailed_route.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_endcap_tapcell_insertion.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_fillercell_insertion.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_fillmetal_insertion.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_global_placement.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_global_route.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_init_floorplan.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_macro_placement.tcl +3 -3
- siliconcompiler/tools/openroad/scripts/apr/sc_metrics.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_pin_placement.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_power_grid.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_repair_design.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_repair_timing.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_write_data.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/common/procs.tcl +57 -1
- siliconcompiler/tools/openroad/scripts/common/screenshot.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/common/write_images.tcl +28 -3
- siliconcompiler/tools/openroad/scripts/sc_rcx.tcl +1 -1
- siliconcompiler/tools/openroad/scripts/sc_rdlroute.tcl +3 -3
- siliconcompiler/tools/openroad/scripts/sc_show.tcl +6 -6
- siliconcompiler/tools/slang/__init__.py +10 -10
- siliconcompiler/tools/surelog/parse.py +4 -4
- siliconcompiler/tools/sv2v/convert.py +20 -3
- siliconcompiler/tools/verilator/compile.py +2 -2
- siliconcompiler/tools/verilator/verilator.py +3 -3
- siliconcompiler/tools/vpr/place.py +1 -1
- siliconcompiler/tools/vpr/route.py +4 -4
- siliconcompiler/tools/vpr/screenshot.py +1 -1
- siliconcompiler/tools/vpr/show.py +5 -5
- siliconcompiler/tools/vpr/vpr.py +24 -24
- siliconcompiler/tools/xdm/convert.py +2 -2
- siliconcompiler/tools/xyce/simulate.py +1 -1
- siliconcompiler/tools/yosys/sc_synth_asic.tcl +74 -68
- siliconcompiler/tools/yosys/syn_asic.py +2 -2
- siliconcompiler/toolscripts/_tools.json +7 -7
- siliconcompiler/toolscripts/ubuntu22/install-vpr.sh +0 -2
- siliconcompiler/toolscripts/ubuntu24/install-vpr.sh +0 -2
- siliconcompiler/utils/__init__.py +8 -112
- siliconcompiler/utils/flowgraph.py +339 -0
- siliconcompiler/{issue.py → utils/issue.py} +4 -3
- siliconcompiler/utils/logging.py +1 -2
- {siliconcompiler-0.32.3.dist-info → siliconcompiler-0.33.0.dist-info}/METADATA +9 -8
- {siliconcompiler-0.32.3.dist-info → siliconcompiler-0.33.0.dist-info}/RECORD +151 -134
- {siliconcompiler-0.32.3.dist-info → siliconcompiler-0.33.0.dist-info}/WHEEL +1 -1
- {siliconcompiler-0.32.3.dist-info → siliconcompiler-0.33.0.dist-info}/entry_points.txt +8 -8
- siliconcompiler/schema/schema_obj.py +0 -1936
- siliconcompiler/toolscripts/ubuntu20/install-vpr.sh +0 -29
- siliconcompiler/toolscripts/ubuntu20/install-yosys-parmys.sh +0 -61
- /siliconcompiler/{templates → data/templates}/__init__.py +0 -0
- /siliconcompiler/{templates → data/templates}/email/__init__.py +0 -0
- /siliconcompiler/{templates → data/templates}/email/general.j2 +0 -0
- /siliconcompiler/{templates → data/templates}/email/summary.j2 +0 -0
- /siliconcompiler/{templates → data/templates}/issue/README.txt +0 -0
- /siliconcompiler/{templates → data/templates}/issue/__init__.py +0 -0
- /siliconcompiler/{templates → data/templates}/issue/run.sh +0 -0
- /siliconcompiler/{templates → data/templates}/replay/replay.py.j2 +0 -0
- /siliconcompiler/{templates → data/templates}/replay/replay.sh.j2 +0 -0
- /siliconcompiler/{templates → data/templates}/replay/requirements.txt +0 -0
- /siliconcompiler/{templates → data/templates}/replay/setup.sh +0 -0
- /siliconcompiler/{templates → data/templates}/report/__init__.py +0 -0
- /siliconcompiler/{templates → data/templates}/report/bootstrap.min.css +0 -0
- /siliconcompiler/{templates → data/templates}/report/bootstrap.min.js +0 -0
- /siliconcompiler/{templates → data/templates}/report/bootstrap_LICENSE.md +0 -0
- /siliconcompiler/{templates → data/templates}/report/sc_report.j2 +0 -0
- /siliconcompiler/{templates → data/templates}/slurm/__init__.py +0 -0
- /siliconcompiler/{templates → data/templates}/slurm/run.sh +0 -0
- /siliconcompiler/{templates → data/templates}/tcl/__init__.py +0 -0
- /siliconcompiler/{templates → data/templates}/tcl/manifest.tcl.j2 +0 -0
- /siliconcompiler/{units.py → utils/units.py} +0 -0
- {siliconcompiler-0.32.3.dist-info → siliconcompiler-0.33.0.dist-info}/licenses/LICENSE +0 -0
- {siliconcompiler-0.32.3.dist-info → siliconcompiler-0.33.0.dist-info}/top_level.txt +0 -0
siliconcompiler/flowgraph.py
CHANGED
|
@@ -1,576 +1,864 @@
|
|
|
1
|
-
import
|
|
2
|
-
import math
|
|
3
|
-
from siliconcompiler import SiliconCompilerError, NodeStatus
|
|
4
|
-
from siliconcompiler.tools._common import input_file_node_name, get_tool_task
|
|
1
|
+
import inspect
|
|
5
2
|
|
|
3
|
+
from siliconcompiler import Schema
|
|
4
|
+
from siliconcompiler.schema import BaseSchema, NamedSchema
|
|
5
|
+
from siliconcompiler.schema import EditableSchema, Parameter, Scope
|
|
6
|
+
from siliconcompiler.schema.utils import trim
|
|
6
7
|
|
|
7
|
-
|
|
8
|
-
'''
|
|
9
|
-
Assumes a flowgraph with valid edges for the inputs
|
|
10
|
-
'''
|
|
11
|
-
nodes_to_execute = []
|
|
12
|
-
for from_node in from_nodes:
|
|
13
|
-
for node in _nodes_to_execute_recursive(chip, flow, from_node, to_nodes, prune_nodes):
|
|
14
|
-
if node not in nodes_to_execute:
|
|
15
|
-
nodes_to_execute.append(node)
|
|
16
|
-
return nodes_to_execute
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
def _nodes_to_execute_recursive(chip, flow, from_node, to_nodes, prune_nodes, path=[]):
|
|
20
|
-
path = path.copy()
|
|
21
|
-
nodes_to_execute = []
|
|
22
|
-
|
|
23
|
-
if from_node in prune_nodes:
|
|
24
|
-
return []
|
|
25
|
-
if from_node in path:
|
|
26
|
-
raise SiliconCompilerError(f'Path {path} would form a circle with {from_node}')
|
|
27
|
-
path.append(from_node)
|
|
28
|
-
|
|
29
|
-
if from_node in to_nodes:
|
|
30
|
-
for node in path:
|
|
31
|
-
nodes_to_execute.append(node)
|
|
32
|
-
for output_node in _get_flowgraph_node_outputs(chip, flow, from_node):
|
|
33
|
-
for node in _nodes_to_execute_recursive(chip, flow, output_node, to_nodes,
|
|
34
|
-
prune_nodes, path=path):
|
|
35
|
-
if node not in nodes_to_execute:
|
|
36
|
-
nodes_to_execute.append(node)
|
|
37
|
-
|
|
38
|
-
return nodes_to_execute
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
def _unreachable_steps_to_execute(chip, flow, cond=lambda _: True):
|
|
42
|
-
from_nodes = set(_get_execution_entry_nodes(chip, flow))
|
|
43
|
-
to_nodes = set(_get_execution_exit_nodes(chip, flow))
|
|
44
|
-
prune_nodes = chip.get('option', 'prune')
|
|
45
|
-
reachable_nodes = set(_reachable_flowgraph_nodes(chip, flow, from_nodes, cond=cond,
|
|
46
|
-
prune_nodes=prune_nodes))
|
|
47
|
-
unreachable_nodes = to_nodes.difference(reachable_nodes)
|
|
48
|
-
unreachable_steps = set()
|
|
49
|
-
for unreachable_node in unreachable_nodes:
|
|
50
|
-
if not any(filter(lambda node: node[0] == unreachable_node[0], reachable_nodes)):
|
|
51
|
-
unreachable_steps.add(unreachable_node[0])
|
|
52
|
-
return unreachable_steps
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
def _reachable_flowgraph_nodes(chip, flow, from_nodes, cond=lambda _: True, prune_nodes=[]):
|
|
56
|
-
visited_nodes = set()
|
|
57
|
-
current_nodes = from_nodes.copy()
|
|
58
|
-
while current_nodes:
|
|
59
|
-
current_nodes_copy = current_nodes.copy()
|
|
60
|
-
for current_node in current_nodes_copy:
|
|
61
|
-
if current_node in prune_nodes:
|
|
62
|
-
current_nodes.remove(current_node)
|
|
63
|
-
continue
|
|
64
|
-
if cond(current_node):
|
|
65
|
-
visited_nodes.add(current_node)
|
|
66
|
-
current_nodes.remove(current_node)
|
|
67
|
-
outputs = _get_flowgraph_node_outputs(chip, flow, current_node)
|
|
68
|
-
current_nodes.update(outputs)
|
|
69
|
-
if current_nodes == current_nodes_copy:
|
|
70
|
-
break
|
|
71
|
-
return visited_nodes
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
def _get_flowgraph_node_inputs(chip, flow, node):
|
|
75
|
-
step, index = node
|
|
76
|
-
inputs = set()
|
|
77
|
-
for in_node in chip.get('flowgraph', flow, step, index, 'input'):
|
|
78
|
-
if chip.get('record', 'status', step=in_node[0], index=in_node[1]) == \
|
|
79
|
-
NodeStatus.SKIPPED:
|
|
80
|
-
inputs.update(_get_flowgraph_node_inputs(chip, flow, in_node))
|
|
81
|
-
else:
|
|
82
|
-
inputs.add(in_node)
|
|
83
|
-
return list(inputs)
|
|
8
|
+
from siliconcompiler import NodeStatus
|
|
84
9
|
|
|
85
10
|
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
return _reachable_flowgraph_nodes(chip, flow, from_nodes, prune_nodes=prune_nodes)
|
|
11
|
+
class FlowgraphSchema(NamedSchema):
|
|
12
|
+
def __init__(self, name=None):
|
|
13
|
+
super().__init__(name=name)
|
|
90
14
|
|
|
15
|
+
schema = EditableSchema(self)
|
|
16
|
+
schema.insert("default", "default", FlowgraphNodeSchema())
|
|
91
17
|
|
|
92
|
-
|
|
93
|
-
prune_nodes = chip.get('option', 'prune')
|
|
94
|
-
pruned_flowgraph_nodes = _get_pruned_flowgraph_nodes(chip, flow, prune_nodes)
|
|
95
|
-
return list(filter(lambda node: node in pruned_flowgraph_nodes,
|
|
96
|
-
_get_flowgraph_node_inputs(chip, flow, node)))
|
|
18
|
+
self.__clear_cache()
|
|
97
19
|
|
|
20
|
+
def __clear_cache(self):
|
|
21
|
+
'''
|
|
22
|
+
Clear the cache of node information
|
|
23
|
+
'''
|
|
98
24
|
|
|
99
|
-
|
|
100
|
-
|
|
25
|
+
self.__cache_nodes = None
|
|
26
|
+
self.__cache_nodes_entry = None
|
|
27
|
+
self.__cache_nodes_exit = None
|
|
28
|
+
self.__cache_execution_order_forward = None
|
|
29
|
+
self.__cache_execution_order_reverse = None
|
|
101
30
|
|
|
102
|
-
|
|
103
|
-
for iter_node in iter_nodes:
|
|
104
|
-
iter_node_inputs = chip.get('flowgraph', flow, *iter_node, 'input')
|
|
105
|
-
if node in iter_node_inputs:
|
|
106
|
-
node_outputs.append(iter_node)
|
|
31
|
+
self.__cache_node_outputs = None
|
|
107
32
|
|
|
108
|
-
|
|
33
|
+
def node(self, step, task, index=0):
|
|
34
|
+
'''
|
|
35
|
+
Creates a flowgraph node.
|
|
109
36
|
|
|
37
|
+
Creates a flowgraph node by binding a step to a tool specific task.
|
|
38
|
+
A tool can be an external executable or one of the built in functions
|
|
39
|
+
in the SiliconCompiler framework). Built in functions include: minimum,
|
|
40
|
+
maximum, join, mux, verify.
|
|
110
41
|
|
|
111
|
-
|
|
112
|
-
nodes = []
|
|
113
|
-
for step in chip.getkeys('flowgraph', flow):
|
|
114
|
-
if steps and step not in steps:
|
|
115
|
-
continue
|
|
116
|
-
for index in chip.getkeys('flowgraph', flow, step):
|
|
117
|
-
if indices and index not in indices:
|
|
118
|
-
continue
|
|
119
|
-
nodes.append((step, index))
|
|
120
|
-
return nodes
|
|
42
|
+
The method modifies the following schema parameters:
|
|
121
43
|
|
|
44
|
+
* [<step>,<index>,tool,<tool>]
|
|
45
|
+
* [<step>,<index>,task,<task>]
|
|
46
|
+
* [<step>,<index>,taskmodule,<taskmodule>]
|
|
122
47
|
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
if chip.get('arg', 'step'):
|
|
128
|
-
return _get_flowgraph_nodes(chip, flow, steps=[chip.get('arg', 'step')])
|
|
129
|
-
# If we explicitly get the nodes for a flow other than the current one,
|
|
130
|
-
# Ignore the 'option' 'from'
|
|
131
|
-
if chip.get('option', 'flow') == flow and chip.get('option', 'from'):
|
|
132
|
-
return _get_flowgraph_nodes(chip, flow, steps=chip.get('option', 'from'))
|
|
133
|
-
return _get_flowgraph_entry_nodes(chip, flow)
|
|
48
|
+
Args:
|
|
49
|
+
step (str): Step name
|
|
50
|
+
task (module/str): Task to associate with this node
|
|
51
|
+
index (int/str): Step index
|
|
134
52
|
|
|
53
|
+
Examples:
|
|
54
|
+
>>> import siliconcomiler.tools.openroad.place as place
|
|
55
|
+
>>> flow.node('apr_place', place, index=0)
|
|
56
|
+
Creates a 'place' task with step='apr_place' and index=0 and binds it to the
|
|
57
|
+
'openroad' tool.
|
|
58
|
+
'''
|
|
135
59
|
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
Collect all step/indices that represent the entry
|
|
139
|
-
nodes for the flowgraph
|
|
140
|
-
'''
|
|
141
|
-
nodes = []
|
|
142
|
-
for (step, index) in _get_flowgraph_nodes(chip, flow, steps=steps):
|
|
143
|
-
if not chip.get('flowgraph', flow, step, index, 'input'):
|
|
144
|
-
nodes.append((step, index))
|
|
145
|
-
return nodes
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
def _get_execution_exit_nodes(chip, flow):
|
|
149
|
-
if chip.get('arg', 'step') and chip.get('arg', 'index'):
|
|
150
|
-
return [(chip.get('arg', 'step'), chip.get('arg', 'index'))]
|
|
151
|
-
if chip.get('arg', 'step'):
|
|
152
|
-
return _get_flowgraph_nodes(chip, flow, steps=[chip.get('arg', 'step')])
|
|
153
|
-
# If we explicitly get the nodes for a flow other than the current one,
|
|
154
|
-
# Ignore the 'option' 'to'
|
|
155
|
-
if chip.get('option', 'flow') == flow and chip.get('option', 'to'):
|
|
156
|
-
return _get_flowgraph_nodes(chip, flow, steps=chip.get('option', 'to'))
|
|
157
|
-
return _get_flowgraph_exit_nodes(chip, flow)
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
#######################################
|
|
161
|
-
def _get_flowgraph_exit_nodes(chip, flow, steps=None):
|
|
162
|
-
'''
|
|
163
|
-
Collect all step/indices that represent the exit
|
|
164
|
-
nodes for the flowgraph
|
|
165
|
-
'''
|
|
166
|
-
inputnodes = []
|
|
167
|
-
for (step, index) in _get_flowgraph_nodes(chip, flow, steps=steps):
|
|
168
|
-
inputnodes.extend(chip.get('flowgraph', flow, step, index, 'input'))
|
|
169
|
-
nodes = []
|
|
170
|
-
for (step, index) in _get_flowgraph_nodes(chip, flow, steps=steps):
|
|
171
|
-
if (step, index) not in inputnodes:
|
|
172
|
-
nodes.append((step, index))
|
|
173
|
-
return nodes
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
#######################################
|
|
177
|
-
def _get_flowgraph_execution_order(chip, flow, reverse=False):
|
|
178
|
-
'''
|
|
179
|
-
Generates a list of nodes in the order they will be executed.
|
|
180
|
-
'''
|
|
60
|
+
if step in (Schema.GLOBAL_KEY, 'default', 'sc_collected_files'):
|
|
61
|
+
raise ValueError(f"{step} is a reserved name")
|
|
181
62
|
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
# Collect execution order of nodes
|
|
197
|
-
if reverse:
|
|
198
|
-
order = [set(_get_flowgraph_exit_nodes(chip, flow))]
|
|
199
|
-
else:
|
|
200
|
-
order = [set(_get_flowgraph_entry_nodes(chip, flow))]
|
|
201
|
-
|
|
202
|
-
visited = set()
|
|
203
|
-
while True:
|
|
204
|
-
next_level = set()
|
|
205
|
-
next_visited = set()
|
|
206
|
-
for step, index in sorted(order[-1]):
|
|
207
|
-
if (step, index) not in rev_ex_map:
|
|
208
|
-
# No edges so assume inputs are okay
|
|
209
|
-
inputs_valid = True
|
|
210
|
-
else:
|
|
211
|
-
inputs_valid = all([node in visited for node in rev_ex_map[(step, index)]])
|
|
63
|
+
index = str(index)
|
|
64
|
+
if index in (Schema.GLOBAL_KEY, 'default'):
|
|
65
|
+
raise ValueError(f"{index} is a reserved name")
|
|
66
|
+
|
|
67
|
+
# Determine task name and module
|
|
68
|
+
task_module = None
|
|
69
|
+
if isinstance(task, str):
|
|
70
|
+
task_module = task
|
|
71
|
+
elif inspect.ismodule(task):
|
|
72
|
+
task_module = task.__name__
|
|
73
|
+
else:
|
|
74
|
+
raise ValueError(f"{task} is not a string or module and cannot be used to "
|
|
75
|
+
"setup a task.")
|
|
212
76
|
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
77
|
+
task_parts = task_module.split('.')
|
|
78
|
+
if len(task_parts) < 2:
|
|
79
|
+
raise ValueError(f"{task} is not a valid task, it must be associated with "
|
|
80
|
+
"a tool '<tool>.<task>'.")
|
|
81
|
+
|
|
82
|
+
tool_name, task_name = task_parts[-2:]
|
|
83
|
+
|
|
84
|
+
# bind tool to node
|
|
85
|
+
self.set(step, index, 'tool', tool_name)
|
|
86
|
+
self.set(step, index, 'task', task_name)
|
|
87
|
+
self.set(step, index, 'taskmodule', task_module)
|
|
88
|
+
|
|
89
|
+
self.__clear_cache()
|
|
90
|
+
|
|
91
|
+
def edge(self, tail, head, tail_index=0, head_index=0):
|
|
92
|
+
'''
|
|
93
|
+
Creates a directed edge from a tail node to a head node.
|
|
94
|
+
|
|
95
|
+
Connects the output of a tail node with the input of a head node by
|
|
96
|
+
setting the 'input' field of the head node in the schema flowgraph.
|
|
97
|
+
|
|
98
|
+
The method modifies the following parameters:
|
|
99
|
+
|
|
100
|
+
[<head>,<head_index>,input]
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
tail (str): Name of tail node
|
|
104
|
+
head (str): Name of head node
|
|
105
|
+
tail_index (int/str): Index of tail node to connect
|
|
106
|
+
head_index (int/str): Index of head node to connect
|
|
107
|
+
|
|
108
|
+
Examples:
|
|
109
|
+
>>> chip.edge('place', 'cts')
|
|
110
|
+
Creates a directed edge from place to cts.
|
|
111
|
+
'''
|
|
112
|
+
head_index = str(head_index)
|
|
113
|
+
tail_index = str(tail_index)
|
|
114
|
+
|
|
115
|
+
for step, index in [(head, head_index), (tail, tail_index)]:
|
|
116
|
+
if not self.valid(step, index):
|
|
117
|
+
raise ValueError(f"{step}{index} is not a defined node in {self.name()}.")
|
|
118
|
+
|
|
119
|
+
tail_node = (tail, tail_index)
|
|
120
|
+
if tail_node in self.get(head, head_index, 'input'):
|
|
121
|
+
return
|
|
122
|
+
|
|
123
|
+
self.add(head, head_index, 'input', tail_node)
|
|
124
|
+
|
|
125
|
+
self.__clear_cache()
|
|
126
|
+
|
|
127
|
+
def remove_node(self, step, index=None):
|
|
128
|
+
'''
|
|
129
|
+
Remove a flowgraph node.
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
step (str): Step name
|
|
133
|
+
index (int/str): Step index
|
|
134
|
+
'''
|
|
135
|
+
|
|
136
|
+
if step not in self.getkeys():
|
|
137
|
+
raise ValueError(f'{step} is not a valid step in {self.name()}')
|
|
138
|
+
|
|
139
|
+
if index is None:
|
|
140
|
+
# Iterate over all indexes
|
|
141
|
+
for index in self.getkeys(step):
|
|
142
|
+
self.remove_node(step, index)
|
|
143
|
+
return
|
|
144
|
+
|
|
145
|
+
index = str(index)
|
|
146
|
+
if index not in self.getkeys(step):
|
|
147
|
+
raise ValueError(f'{index} is not a valid index for {step} in {self.name()}')
|
|
148
|
+
|
|
149
|
+
# Save input edges
|
|
150
|
+
node = (step, index)
|
|
151
|
+
node_inputs = self.get(step, index, 'input')
|
|
152
|
+
|
|
153
|
+
# remove node
|
|
154
|
+
self.remove(step, index)
|
|
155
|
+
|
|
156
|
+
# remove step if all nodes a gone
|
|
157
|
+
if len(self.getkeys(step)) == 0:
|
|
158
|
+
self.remove(step)
|
|
159
|
+
|
|
160
|
+
for flow_step in self.getkeys():
|
|
161
|
+
for flow_index in self.getkeys(flow_step):
|
|
162
|
+
inputs = self.get(flow_step, flow_index, 'input')
|
|
163
|
+
if node in inputs:
|
|
164
|
+
inputs = [inode for inode in inputs if inode != node]
|
|
165
|
+
inputs.extend(node_inputs)
|
|
166
|
+
self.set(flow_step, flow_index, 'input', sorted(set(inputs)))
|
|
167
|
+
|
|
168
|
+
self.__clear_cache()
|
|
169
|
+
|
|
170
|
+
###########################################################################
|
|
171
|
+
def graph(self, subflow, name=None):
|
|
172
|
+
'''
|
|
173
|
+
Instantiates a named flow as a graph in the current flowgraph.
|
|
174
|
+
|
|
175
|
+
Args:
|
|
176
|
+
subflow (str): Name of flow to instantiate
|
|
177
|
+
name (str): Name of instance
|
|
178
|
+
|
|
179
|
+
Examples:
|
|
180
|
+
>>> chip.graph(asicflow)
|
|
181
|
+
Instantiates a flow named 'asicflow'.
|
|
182
|
+
'''
|
|
183
|
+
if not isinstance(subflow, FlowgraphSchema):
|
|
184
|
+
raise ValueError(f"subflow must a FlowgraphSchema, not: {type(subflow)}")
|
|
185
|
+
|
|
186
|
+
for step in subflow.getkeys():
|
|
187
|
+
# uniquify each step
|
|
188
|
+
if name is None:
|
|
189
|
+
newstep = step
|
|
217
190
|
else:
|
|
218
|
-
|
|
191
|
+
newstep = name + "." + step
|
|
219
192
|
|
|
220
|
-
|
|
193
|
+
if newstep in self.getkeys():
|
|
194
|
+
raise ValueError(f"{newstep} is already defined")
|
|
221
195
|
|
|
222
|
-
|
|
223
|
-
|
|
196
|
+
# forward information
|
|
197
|
+
for keys in subflow.allkeys(step):
|
|
198
|
+
self.set(newstep, *keys, subflow.get(step, *keys))
|
|
224
199
|
|
|
225
|
-
|
|
200
|
+
if name is None:
|
|
201
|
+
continue
|
|
226
202
|
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
203
|
+
# rename inputs
|
|
204
|
+
for index in self.getkeys(newstep):
|
|
205
|
+
all_inputs = self.get(newstep, index, 'input')
|
|
206
|
+
self.set(newstep, index, 'input', [])
|
|
207
|
+
for in_step, in_index in all_inputs:
|
|
208
|
+
newin = name + "." + in_step
|
|
209
|
+
self.add(newstep, index, 'input', (newin, in_index))
|
|
234
210
|
|
|
235
|
-
|
|
211
|
+
self.__clear_cache()
|
|
236
212
|
|
|
237
|
-
|
|
213
|
+
def get_nodes(self):
|
|
214
|
+
'''
|
|
215
|
+
Returns all the nodes defined in this flowgraph
|
|
216
|
+
'''
|
|
217
|
+
if self.__cache_nodes is not None:
|
|
218
|
+
return self.__cache_nodes
|
|
238
219
|
|
|
220
|
+
nodes = []
|
|
221
|
+
for step in self.getkeys():
|
|
222
|
+
for index in self.getkeys(step):
|
|
223
|
+
nodes.append((step, index))
|
|
239
224
|
|
|
240
|
-
|
|
241
|
-
from_nodes = _get_flowgraph_entry_nodes(chip, flow)
|
|
242
|
-
return get_nodes_from(chip, flow, from_nodes)
|
|
225
|
+
self.__cache_nodes = tuple(sorted(set(nodes)))
|
|
243
226
|
|
|
227
|
+
return self.__cache_nodes
|
|
244
228
|
|
|
245
|
-
def
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
set(to_nodes),
|
|
251
|
-
set(chip.get('option', 'prune')))
|
|
229
|
+
def get_entry_nodes(self):
|
|
230
|
+
'''
|
|
231
|
+
Collect all step/indices that represent the entry
|
|
232
|
+
nodes for the flowgraph
|
|
233
|
+
'''
|
|
252
234
|
|
|
235
|
+
if self.__cache_nodes_entry is not None:
|
|
236
|
+
return self.__cache_nodes_entry
|
|
253
237
|
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
This takes the from/to options into account if flow is the current flow or None.
|
|
238
|
+
nodes = []
|
|
239
|
+
for step, index in self.get_nodes():
|
|
240
|
+
if not self.get(step, index, 'input'):
|
|
241
|
+
nodes.append((step, index))
|
|
259
242
|
|
|
260
|
-
|
|
261
|
-
A list of nodes that will get executed during run() (or a specific flow).
|
|
243
|
+
self.__cache_nodes_entry = tuple(sorted(set(nodes)))
|
|
262
244
|
|
|
263
|
-
|
|
264
|
-
>>> nodes = nodes_to_execute()
|
|
265
|
-
'''
|
|
266
|
-
if flow is None:
|
|
267
|
-
flow = chip.get('option', 'flow')
|
|
245
|
+
return self.__cache_nodes_entry
|
|
268
246
|
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
return _nodes_to_execute(chip, flow, from_nodes, to_nodes, prune_nodes)
|
|
247
|
+
def get_exit_nodes(self):
|
|
248
|
+
'''
|
|
249
|
+
Collect all step/indices that represent the exit
|
|
250
|
+
nodes for the flowgraph
|
|
251
|
+
'''
|
|
275
252
|
|
|
253
|
+
if self.__cache_nodes_exit is not None:
|
|
254
|
+
return self.__cache_nodes_exit
|
|
276
255
|
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
256
|
+
inputnodes = []
|
|
257
|
+
for step, index in self.get_nodes():
|
|
258
|
+
inputnodes.extend(self.get(step, index, 'input'))
|
|
259
|
+
nodes = []
|
|
260
|
+
for step, index in self.get_nodes():
|
|
261
|
+
if (step, index) not in inputnodes:
|
|
262
|
+
nodes.append((step, index))
|
|
281
263
|
|
|
282
|
-
|
|
283
|
-
* Checks that there are no duplicate edges
|
|
284
|
-
* Checks if from/to is valid
|
|
264
|
+
self.__cache_nodes_exit = tuple(sorted(set(nodes)))
|
|
285
265
|
|
|
286
|
-
|
|
287
|
-
'''
|
|
266
|
+
return self.__cache_nodes_exit
|
|
288
267
|
|
|
289
|
-
|
|
290
|
-
|
|
268
|
+
def get_execution_order(self, reverse=False):
|
|
269
|
+
'''
|
|
270
|
+
Generates a list of nodes in the order they will be executed.
|
|
291
271
|
|
|
292
|
-
|
|
272
|
+
Args:
|
|
273
|
+
reverse (boolean): if True, the nodes will be ordered from exit nodes
|
|
274
|
+
to entry nodes.
|
|
275
|
+
'''
|
|
293
276
|
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
277
|
+
if reverse:
|
|
278
|
+
if self.__cache_execution_order_reverse is not None:
|
|
279
|
+
return self.__cache_execution_order_reverse
|
|
280
|
+
else:
|
|
281
|
+
if self.__cache_execution_order_forward is not None:
|
|
282
|
+
return self.__cache_execution_order_forward
|
|
283
|
+
|
|
284
|
+
# Generate execution edges lookup map
|
|
285
|
+
ex_map = {}
|
|
286
|
+
for step, index in self.get_nodes():
|
|
287
|
+
for istep, iindex in self.get(step, index, 'input'):
|
|
288
|
+
if reverse:
|
|
289
|
+
ex_map.setdefault((step, index), set()).add((istep, iindex))
|
|
290
|
+
else:
|
|
291
|
+
ex_map.setdefault((istep, iindex), set()).add((step, index))
|
|
292
|
+
|
|
293
|
+
rev_ex_map = {}
|
|
294
|
+
for node, edges in ex_map.items():
|
|
295
|
+
for step, index in edges:
|
|
296
|
+
rev_ex_map.setdefault((step, index), set()).add(node)
|
|
297
|
+
|
|
298
|
+
# Collect execution order of nodes
|
|
299
|
+
if reverse:
|
|
300
|
+
order = [set(self.get_exit_nodes())]
|
|
301
|
+
else:
|
|
302
|
+
order = [set(self.get_entry_nodes())]
|
|
303
|
+
|
|
304
|
+
visited = set()
|
|
305
|
+
while True:
|
|
306
|
+
next_level = set()
|
|
307
|
+
next_visited = set()
|
|
308
|
+
for step, index in sorted(order[-1]):
|
|
309
|
+
if (step, index) not in rev_ex_map:
|
|
310
|
+
# No edges so assume inputs are okay
|
|
311
|
+
inputs_valid = True
|
|
312
|
+
else:
|
|
313
|
+
inputs_valid = all([node in visited for node in rev_ex_map[(step, index)]])
|
|
314
|
+
|
|
315
|
+
if inputs_valid:
|
|
316
|
+
next_visited.add((step, index))
|
|
317
|
+
if (step, index) in ex_map:
|
|
318
|
+
next_level.update(ex_map.pop((step, index)))
|
|
319
|
+
else:
|
|
320
|
+
next_level.add((step, index))
|
|
321
|
+
|
|
322
|
+
visited.update(next_visited)
|
|
323
|
+
|
|
324
|
+
if not next_level:
|
|
325
|
+
break
|
|
326
|
+
|
|
327
|
+
order.append(next_level)
|
|
328
|
+
|
|
329
|
+
# Filter duplicates from flow
|
|
330
|
+
used_nodes = set()
|
|
331
|
+
exec_order = []
|
|
332
|
+
order.reverse()
|
|
333
|
+
for n, level_nodes in enumerate(order):
|
|
334
|
+
exec_order.append(list(level_nodes.difference(used_nodes)))
|
|
335
|
+
used_nodes.update(level_nodes)
|
|
336
|
+
|
|
337
|
+
exec_order.reverse()
|
|
338
|
+
|
|
339
|
+
ordering = tuple([tuple(sorted(level)) for level in exec_order])
|
|
340
|
+
|
|
341
|
+
if reverse:
|
|
342
|
+
self.__cache_execution_order_reverse = ordering
|
|
343
|
+
return self.__cache_execution_order_reverse
|
|
344
|
+
else:
|
|
345
|
+
self.__cache_execution_order_forward = ordering
|
|
346
|
+
return self.__cache_execution_order_forward
|
|
299
347
|
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
chip.logger.error(f'Duplicate edge from {in_step}{in_index} to '
|
|
304
|
-
f'{step}{index} in the {flow} flowgraph')
|
|
305
|
-
error = True
|
|
348
|
+
def get_node_outputs(self, step, index):
|
|
349
|
+
'''
|
|
350
|
+
Returns the nodes the given nodes provides input to.
|
|
306
351
|
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
352
|
+
Args:
|
|
353
|
+
step (str): step name
|
|
354
|
+
index (str/int) index name
|
|
355
|
+
'''
|
|
310
356
|
|
|
311
|
-
|
|
312
|
-
chip.logger.error(f'{step}{index} is missing a tool definition in the {flow} '
|
|
313
|
-
'flowgraph')
|
|
314
|
-
error = True
|
|
357
|
+
index = str(index)
|
|
315
358
|
|
|
316
|
-
if not
|
|
317
|
-
|
|
318
|
-
'flowgraph')
|
|
319
|
-
error = True
|
|
359
|
+
if (step, index) not in self.get_nodes():
|
|
360
|
+
raise ValueError(f"{step}{index} is not a valid node")
|
|
320
361
|
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
chip.logger.error(f'{step} is not defined in the {flow} flowgraph')
|
|
324
|
-
error = True
|
|
362
|
+
if self.__cache_node_outputs is not None:
|
|
363
|
+
return self.__cache_node_outputs[(step, index)]
|
|
325
364
|
|
|
326
|
-
|
|
327
|
-
if step not in chip.getkeys('flowgraph', flow):
|
|
328
|
-
chip.logger.error(f'{step} is not defined in the {flow} flowgraph')
|
|
329
|
-
error = True
|
|
365
|
+
self.__cache_node_outputs = {}
|
|
330
366
|
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
367
|
+
input_map = {}
|
|
368
|
+
for istep, iindex in self.get_nodes():
|
|
369
|
+
input_map[(istep, iindex)] = self.get(istep, iindex, 'input')
|
|
370
|
+
self.__cache_node_outputs[(istep, iindex)] = set()
|
|
371
|
+
|
|
372
|
+
for src_node, dst_nodes in input_map.items():
|
|
373
|
+
for dst_node in dst_nodes:
|
|
374
|
+
if dst_node not in self.__cache_node_outputs:
|
|
375
|
+
self.__cache_node_outputs[dst_node] = set()
|
|
376
|
+
self.__cache_node_outputs[dst_node].add(src_node)
|
|
377
|
+
|
|
378
|
+
self.__cache_node_outputs = {
|
|
379
|
+
node: tuple(sorted(outputs)) for node, outputs in self.__cache_node_outputs.items()
|
|
380
|
+
}
|
|
338
381
|
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
382
|
+
return self.__cache_node_outputs[(step, index)]
|
|
383
|
+
|
|
384
|
+
def __find_loops(self, step, index, path=None):
|
|
385
|
+
'''
|
|
386
|
+
Search for loops in the graph.
|
|
387
|
+
|
|
388
|
+
Args:
|
|
389
|
+
step (str): step name to start from
|
|
390
|
+
index (str) index name to start from
|
|
391
|
+
path (list of nodes): path in graph so far
|
|
392
|
+
'''
|
|
393
|
+
if path is None:
|
|
394
|
+
path = []
|
|
395
|
+
|
|
396
|
+
if (step, index) in path:
|
|
397
|
+
path.append((step, index))
|
|
398
|
+
return path
|
|
399
|
+
|
|
400
|
+
path.append((step, index))
|
|
401
|
+
|
|
402
|
+
for ostep, oindex in self.get_node_outputs(step, index):
|
|
403
|
+
loop_path = self.__find_loops(ostep, oindex, path=path.copy())
|
|
404
|
+
if loop_path:
|
|
405
|
+
return loop_path
|
|
406
|
+
|
|
407
|
+
return None
|
|
408
|
+
|
|
409
|
+
def validate(self, logger=None):
|
|
410
|
+
'''
|
|
411
|
+
Check if flowgraph is valid.
|
|
412
|
+
|
|
413
|
+
* Checks if all edges have valid nodes
|
|
414
|
+
* Checks that there are no duplicate edges
|
|
415
|
+
* Checks if nodes are defined properly
|
|
416
|
+
* Checks if there are any loops present in the graph
|
|
417
|
+
|
|
418
|
+
Returns True if valid, False otherwise.
|
|
419
|
+
|
|
420
|
+
Args:
|
|
421
|
+
logger (logging.Logger): logger to use for reporting
|
|
422
|
+
'''
|
|
423
|
+
|
|
424
|
+
error = False
|
|
425
|
+
|
|
426
|
+
check_nodes = set()
|
|
427
|
+
for step, index in self.get_nodes():
|
|
428
|
+
check_nodes.add((step, index))
|
|
429
|
+
input_nodes = self.get(step, index, 'input')
|
|
430
|
+
check_nodes.update(input_nodes)
|
|
431
|
+
|
|
432
|
+
for node in set(input_nodes):
|
|
433
|
+
if input_nodes.count(node) > 1:
|
|
434
|
+
in_step, in_index = node
|
|
435
|
+
if logger:
|
|
436
|
+
logger.error(f'Duplicate edge from {in_step}{in_index} to '
|
|
437
|
+
f'{step}{index} in the {self.name()} flowgraph')
|
|
438
|
+
error = True
|
|
439
|
+
|
|
440
|
+
diff_nodes = check_nodes.difference(self.get_nodes())
|
|
441
|
+
if diff_nodes:
|
|
442
|
+
if logger:
|
|
443
|
+
for step, index in diff_nodes:
|
|
444
|
+
logger.error(f'{step}{index} is missing in the {self.name()} flowgraph')
|
|
445
|
+
error = True
|
|
344
446
|
|
|
345
|
-
|
|
447
|
+
# Detect missing definitions
|
|
448
|
+
for step, index in self.get_nodes():
|
|
449
|
+
for item in ('tool', 'task', 'taskmodule'):
|
|
450
|
+
if not self.get(step, index, item):
|
|
451
|
+
if logger:
|
|
452
|
+
logger.error(f'{step}{index} is missing a {item} definition in the '
|
|
453
|
+
f'{self.name()} flowgraph')
|
|
454
|
+
error = True
|
|
455
|
+
|
|
456
|
+
# detect loops
|
|
457
|
+
for start_step, start_index in self.get_entry_nodes():
|
|
458
|
+
loop_path = self.__find_loops(start_step, start_index)
|
|
459
|
+
if loop_path:
|
|
460
|
+
error = True
|
|
461
|
+
if logger:
|
|
462
|
+
loop_path = [f"{step}{index}" for step, index in loop_path]
|
|
463
|
+
logger.error(f"{' -> '.join(loop_path)} forms a loop in {self.name()}")
|
|
346
464
|
|
|
465
|
+
return not error
|
|
347
466
|
|
|
348
|
-
###########################################################################
|
|
349
|
-
def _check_flowgraph_io(chip, nodes=None):
|
|
350
|
-
'''Check if flowgraph is valid in terms of input and output files.
|
|
351
467
|
|
|
352
|
-
|
|
468
|
+
class RuntimeFlowgraph:
|
|
353
469
|
'''
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
# Get files we receive from input nodes.
|
|
368
|
-
in_nodes = _get_flowgraph_node_inputs(chip, flow, (step, index))
|
|
369
|
-
all_inputs = set()
|
|
370
|
-
requirements = chip.get('tool', tool, 'task', task, 'input', step=step, index=index)
|
|
371
|
-
for in_step, in_index in in_nodes:
|
|
372
|
-
if (in_step, in_index) not in nodes:
|
|
373
|
-
# If we're not running the input step, the required
|
|
374
|
-
# inputs need to already be copied into the build
|
|
375
|
-
# directory.
|
|
376
|
-
workdir = chip.getworkdir(step=in_step, index=in_index)
|
|
377
|
-
in_step_out_dir = os.path.join(workdir, 'outputs')
|
|
378
|
-
|
|
379
|
-
if not os.path.isdir(in_step_out_dir):
|
|
380
|
-
# This means this step hasn't been run, but that
|
|
381
|
-
# will be flagged by a different check. No error
|
|
382
|
-
# message here since it would be redundant.
|
|
383
|
-
inputs = []
|
|
384
|
-
continue
|
|
470
|
+
Runtime representation of a flowgraph
|
|
471
|
+
|
|
472
|
+
Args:
|
|
473
|
+
base (:class:`FlowgraphSchema`): base flowgraph for this runtime
|
|
474
|
+
args (tuple of step, index): specific node to apply runtime to
|
|
475
|
+
from_steps (list of steps): steps to start the runtime from
|
|
476
|
+
to_steps (list of steps): step to end the runtime at
|
|
477
|
+
prune_nodes (list of nodes): nodes to remove from execution
|
|
478
|
+
'''
|
|
479
|
+
def __init__(self, base, args=None, from_steps=None, to_steps=None, prune_nodes=None):
|
|
480
|
+
if not all([hasattr(base, attr) for attr in dir(FlowgraphSchema)]):
|
|
481
|
+
raise ValueError(f"base must a FlowgraphSchema, not: {type(base)}")
|
|
385
482
|
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
483
|
+
self.__base = base
|
|
484
|
+
|
|
485
|
+
if args and args[0] is not None:
|
|
486
|
+
from_steps = None
|
|
487
|
+
to_steps = None
|
|
488
|
+
prune_nodes = None
|
|
489
|
+
|
|
490
|
+
step, index = args
|
|
491
|
+
if index is None:
|
|
492
|
+
self.__from = [(step, index) for index in self.__base.getkeys(step)]
|
|
389
493
|
else:
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
for inp in inputs:
|
|
393
|
-
node_inp = input_file_node_name(inp, in_step, in_index)
|
|
394
|
-
if node_inp in requirements:
|
|
395
|
-
inp = node_inp
|
|
396
|
-
if inp in all_inputs:
|
|
397
|
-
chip.logger.error(f'Invalid flow: {step}{index} '
|
|
398
|
-
f'receives {inp} from multiple input tasks')
|
|
399
|
-
return False
|
|
400
|
-
all_inputs.add(inp)
|
|
401
|
-
|
|
402
|
-
for requirement in requirements:
|
|
403
|
-
if requirement not in all_inputs:
|
|
404
|
-
chip.logger.error(f'Invalid flow: {step}{index} will '
|
|
405
|
-
f'not receive required input {requirement}.')
|
|
406
|
-
return False
|
|
407
|
-
|
|
408
|
-
return True
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
###########################################################################
|
|
412
|
-
def _gather_outputs(chip, step, index):
|
|
413
|
-
'''Return set of filenames that are guaranteed to be in outputs
|
|
414
|
-
directory after a successful run of step/index.'''
|
|
415
|
-
|
|
416
|
-
flow = chip.get('option', 'flow')
|
|
417
|
-
task_gather = getattr(chip._get_task_module(step, index, flow=flow, error=False),
|
|
418
|
-
'_gather_outputs',
|
|
419
|
-
None)
|
|
420
|
-
if task_gather:
|
|
421
|
-
return set(task_gather(chip, step, index))
|
|
422
|
-
|
|
423
|
-
tool, task = get_tool_task(chip, step, index, flow=flow)
|
|
424
|
-
return set(chip.get('tool', tool, 'task', task, 'output', step=step, index=index))
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
def _get_flowgraph_information(chip, flow, io=True):
|
|
428
|
-
from siliconcompiler.scheduler import _setup_node
|
|
429
|
-
from siliconcompiler.tools._common import input_provides, input_file_node_name
|
|
430
|
-
|
|
431
|
-
# Save schema to avoid making permanent changes
|
|
432
|
-
org_schema = chip.schema
|
|
433
|
-
chip.schema = chip.schema.copy()
|
|
434
|
-
|
|
435
|
-
# Setup nodes
|
|
436
|
-
node_exec_order = _get_flowgraph_execution_order(chip, flow)
|
|
437
|
-
if io:
|
|
438
|
-
# try:
|
|
439
|
-
for layer_nodes in node_exec_order:
|
|
440
|
-
for step, index in layer_nodes:
|
|
441
|
-
_setup_node(chip, step, index, flow=flow)
|
|
442
|
-
# except: # noqa E722
|
|
443
|
-
# io = False
|
|
444
|
-
|
|
445
|
-
node_rank = {}
|
|
446
|
-
for rank, rank_nodes in enumerate(node_exec_order):
|
|
447
|
-
for step, index in rank_nodes:
|
|
448
|
-
node_rank[f'{step}{index}'] = rank
|
|
449
|
-
|
|
450
|
-
graph_inputs = {}
|
|
451
|
-
all_graph_inputs = set()
|
|
452
|
-
if io:
|
|
453
|
-
for step, index in _get_flowgraph_nodes(chip, flow):
|
|
454
|
-
tool, task = get_tool_task(chip, step, index, flow=flow)
|
|
455
|
-
for keypath in chip.get('tool', tool, 'task', task, 'require', step=step, index=index):
|
|
456
|
-
key = tuple(keypath.split(','))
|
|
457
|
-
if key[0] == 'input':
|
|
458
|
-
graph_inputs.setdefault((step, index), set()).add(keypath)
|
|
459
|
-
|
|
460
|
-
for inputs in graph_inputs.values():
|
|
461
|
-
all_graph_inputs.update(inputs)
|
|
462
|
-
|
|
463
|
-
exit_nodes = [f'{step}{index}' for step, index in _get_flowgraph_exit_nodes(chip, flow)]
|
|
464
|
-
|
|
465
|
-
nodes = {}
|
|
466
|
-
edges = []
|
|
467
|
-
|
|
468
|
-
def clean_label(label):
|
|
469
|
-
return label.replace("<", "").replace(">", "")
|
|
470
|
-
|
|
471
|
-
def clean_text(label):
|
|
472
|
-
return label.replace("<", r"\<").replace(">", r"\>")
|
|
473
|
-
|
|
474
|
-
all_nodes = [(step, index) for step, index in sorted(_get_flowgraph_nodes(chip, flow))
|
|
475
|
-
if chip.get('record', 'status', step=step, index=index) != NodeStatus.SKIPPED]
|
|
476
|
-
for step, index in all_nodes:
|
|
477
|
-
tool, task = get_tool_task(chip, step, index, flow=flow)
|
|
478
|
-
|
|
479
|
-
if io:
|
|
480
|
-
inputs = chip.get('tool', tool, 'task', task, 'input', step=step, index=index)
|
|
481
|
-
outputs = chip.get('tool', tool, 'task', task, 'output', step=step, index=index)
|
|
482
|
-
if chip.get('record', 'status', step=step, index=index) == NodeStatus.SKIPPED:
|
|
483
|
-
continue
|
|
494
|
+
self.__from = [(step, index)]
|
|
495
|
+
self.__to = self.__from
|
|
484
496
|
else:
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
if
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
497
|
+
if from_steps:
|
|
498
|
+
self.__from = []
|
|
499
|
+
for step in from_steps:
|
|
500
|
+
try:
|
|
501
|
+
self.__from.extend([(step, index) for index in self.__base.getkeys(step)])
|
|
502
|
+
except KeyError:
|
|
503
|
+
pass
|
|
504
|
+
else:
|
|
505
|
+
self.__from = self.__base.get_entry_nodes()
|
|
506
|
+
|
|
507
|
+
if to_steps:
|
|
508
|
+
self.__to = []
|
|
509
|
+
for step in to_steps:
|
|
510
|
+
try:
|
|
511
|
+
self.__to.extend([(step, index) for index in self.__base.getkeys(step)])
|
|
512
|
+
except KeyError:
|
|
513
|
+
pass
|
|
514
|
+
else:
|
|
515
|
+
self.__to = self.__base.get_exit_nodes()
|
|
516
|
+
|
|
517
|
+
self.__from = sorted(set(self.__from))
|
|
518
|
+
self.__to = sorted(set(self.__to))
|
|
519
|
+
|
|
520
|
+
if not prune_nodes:
|
|
521
|
+
prune_nodes = set()
|
|
522
|
+
self.__prune = sorted(set(prune_nodes))
|
|
523
|
+
|
|
524
|
+
# remove pruned from and tos
|
|
525
|
+
self.__from = [node for node in self.__from if node not in self.__prune]
|
|
526
|
+
self.__to = [node for node in self.__to if node not in self.__prune]
|
|
527
|
+
|
|
528
|
+
self.__compute_graph()
|
|
529
|
+
|
|
530
|
+
def __walk_graph(self, node, path=None, reverse=True):
|
|
531
|
+
if node in self.__prune:
|
|
532
|
+
return set()
|
|
533
|
+
|
|
534
|
+
if path is None:
|
|
535
|
+
path = []
|
|
536
|
+
|
|
537
|
+
if node in path:
|
|
538
|
+
return set(path)
|
|
539
|
+
|
|
540
|
+
path.append(node)
|
|
541
|
+
if reverse:
|
|
542
|
+
if node in self.__from:
|
|
543
|
+
return set(path)
|
|
544
|
+
else:
|
|
545
|
+
if node in self.__to:
|
|
546
|
+
return set(path)
|
|
547
|
+
|
|
548
|
+
nodes = set()
|
|
549
|
+
if reverse:
|
|
550
|
+
for input_node in self.__base.get(*node, "input"):
|
|
551
|
+
nodes.update(self.__walk_graph(input_node, path=path, reverse=reverse))
|
|
552
|
+
else:
|
|
553
|
+
for output_node in self.__base.get_node_outputs(*node):
|
|
554
|
+
nodes.update(self.__walk_graph(output_node, path=path, reverse=reverse))
|
|
555
|
+
return nodes
|
|
556
|
+
|
|
557
|
+
def __compute_graph(self):
|
|
558
|
+
'''
|
|
559
|
+
Precompute graph information
|
|
560
|
+
'''
|
|
561
|
+
|
|
562
|
+
self.__nodes = set()
|
|
563
|
+
for entry in self.__to:
|
|
564
|
+
self.__nodes.update(self.__walk_graph(entry))
|
|
565
|
+
self.__nodes = tuple(sorted(self.__nodes))
|
|
566
|
+
|
|
567
|
+
# Update to and from
|
|
568
|
+
self.__from = tuple([
|
|
569
|
+
node for node in self.__from
|
|
570
|
+
if not self.__base.get(*node, "input") or
|
|
571
|
+
all([in_node not in self.__nodes for in_node in self.__base.get(*node, "input")])
|
|
572
|
+
])
|
|
573
|
+
self.__to = tuple([
|
|
574
|
+
node for node in self.__to
|
|
575
|
+
if not self.__base.get_node_outputs(*node) or
|
|
576
|
+
all([out_node not in self.__nodes for out_node in self.__base.get_node_outputs(*node)])
|
|
577
|
+
])
|
|
578
|
+
|
|
579
|
+
ordering = []
|
|
580
|
+
for level_nodes in self.__base.get_execution_order():
|
|
581
|
+
level_exec = [node for node in level_nodes if node in self.__nodes]
|
|
582
|
+
if level_exec:
|
|
583
|
+
ordering.append(tuple(level_exec))
|
|
584
|
+
self.__execution_order = tuple(ordering)
|
|
585
|
+
|
|
586
|
+
def get_nodes(self):
|
|
587
|
+
'''
|
|
588
|
+
Returns the nodes available in this graph
|
|
589
|
+
'''
|
|
590
|
+
return self.__nodes
|
|
591
|
+
|
|
592
|
+
def get_execution_order(self):
|
|
593
|
+
'''
|
|
594
|
+
Returns the execution order of the nodes
|
|
595
|
+
'''
|
|
596
|
+
return self.__execution_order
|
|
597
|
+
|
|
598
|
+
def get_entry_nodes(self):
|
|
599
|
+
'''
|
|
600
|
+
Returns the entry nodes for this graph
|
|
601
|
+
'''
|
|
602
|
+
return self.__from
|
|
603
|
+
|
|
604
|
+
def get_exit_nodes(self):
|
|
605
|
+
'''
|
|
606
|
+
Returns the exit nodes for this graph
|
|
607
|
+
'''
|
|
608
|
+
return self.__to
|
|
609
|
+
|
|
610
|
+
def get_nodes_starting_at(self, step, index):
|
|
611
|
+
'''
|
|
612
|
+
Returns all the nodes that the given step, index connect to
|
|
613
|
+
|
|
614
|
+
Args:
|
|
615
|
+
step (str): step to start from
|
|
616
|
+
index (str/int): index to start from
|
|
617
|
+
'''
|
|
618
|
+
index = str(index)
|
|
619
|
+
|
|
620
|
+
if (step, index) not in self.get_nodes():
|
|
621
|
+
raise ValueError(f"{step}{index} is not a valid node")
|
|
622
|
+
|
|
623
|
+
return tuple(sorted(self.__walk_graph((step, str(index)), reverse=False)))
|
|
624
|
+
|
|
625
|
+
def get_node_inputs(self, step, index, record=None):
|
|
626
|
+
if (step, index) not in self.get_nodes():
|
|
627
|
+
raise ValueError(f"{step}{index} is not a valid node")
|
|
628
|
+
|
|
629
|
+
if record is None:
|
|
630
|
+
inputs = set()
|
|
631
|
+
for in_step, in_index in self.__base.get(step, index, "input"):
|
|
632
|
+
if (in_step, in_index) not in self.get_nodes():
|
|
633
|
+
continue
|
|
634
|
+
inputs.add((in_step, in_index))
|
|
635
|
+
return sorted(inputs)
|
|
636
|
+
|
|
637
|
+
inputs = set()
|
|
638
|
+
for in_step, in_index in self.__base.get(step, index, "input"):
|
|
639
|
+
if (in_step, in_index) not in self.get_nodes():
|
|
640
|
+
continue
|
|
641
|
+
|
|
642
|
+
if record.get("status", step=in_step, index=in_index) == NodeStatus.SKIPPED:
|
|
643
|
+
inputs.update(self.get_node_inputs(in_step, in_index, record=record))
|
|
548
644
|
else:
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
645
|
+
inputs.add((in_step, in_index))
|
|
646
|
+
return sorted(inputs)
|
|
647
|
+
|
|
648
|
+
def get_completed_nodes(self, record=None):
|
|
649
|
+
if not record:
|
|
650
|
+
return []
|
|
651
|
+
|
|
652
|
+
nodes = set()
|
|
653
|
+
for step, index in self.get_nodes():
|
|
654
|
+
if NodeStatus.is_success(record.get("status", step=step, index=index)):
|
|
655
|
+
nodes.add((step, index))
|
|
656
|
+
|
|
657
|
+
return sorted(nodes)
|
|
658
|
+
|
|
659
|
+
@staticmethod
|
|
660
|
+
def validate(flow, from_steps=None, to_steps=None, prune_nodes=None, logger=None):
|
|
661
|
+
all_steps = set([step for step, _ in flow.get_nodes()])
|
|
662
|
+
|
|
663
|
+
if from_steps:
|
|
664
|
+
from_steps = set(from_steps)
|
|
665
|
+
else:
|
|
666
|
+
from_steps = set()
|
|
667
|
+
|
|
668
|
+
if to_steps:
|
|
669
|
+
to_steps = set(to_steps)
|
|
670
|
+
else:
|
|
671
|
+
to_steps = set()
|
|
672
|
+
|
|
673
|
+
if prune_nodes:
|
|
674
|
+
prune_nodes = set(prune_nodes)
|
|
566
675
|
else:
|
|
567
|
-
|
|
568
|
-
for in_step, in_index in chip.get('flowgraph', flow, step, index, 'input'):
|
|
569
|
-
all_inputs.append(f'{in_step}{in_index}')
|
|
570
|
-
for item in all_inputs:
|
|
571
|
-
edges.append((item, node, 1 if node in exit_nodes else 2))
|
|
676
|
+
prune_nodes = set()
|
|
572
677
|
|
|
573
|
-
|
|
574
|
-
|
|
678
|
+
error = False
|
|
679
|
+
|
|
680
|
+
# Check for undefined steps
|
|
681
|
+
for step in sorted(from_steps.difference(all_steps)):
|
|
682
|
+
if logger:
|
|
683
|
+
logger.error(f'From {step} is not defined in the {flow.name()} flowgraph')
|
|
684
|
+
error = True
|
|
685
|
+
|
|
686
|
+
for step in sorted(to_steps.difference(all_steps)):
|
|
687
|
+
if logger:
|
|
688
|
+
logger.error(f'To {step} is not defined in the {flow.name()} flowgraph')
|
|
689
|
+
error = True
|
|
690
|
+
|
|
691
|
+
# Check for undefined prunes
|
|
692
|
+
for step, index in sorted(prune_nodes.difference(flow.get_nodes())):
|
|
693
|
+
if logger:
|
|
694
|
+
logger.error(f'{step}{index} is not defined in the {flow.name()} flowgraph')
|
|
695
|
+
error = True
|
|
696
|
+
|
|
697
|
+
if not error:
|
|
698
|
+
runtime = RuntimeFlowgraph(
|
|
699
|
+
flow,
|
|
700
|
+
from_steps=from_steps,
|
|
701
|
+
to_steps=to_steps,
|
|
702
|
+
prune_nodes=prune_nodes)
|
|
703
|
+
unpruned = RuntimeFlowgraph(
|
|
704
|
+
flow,
|
|
705
|
+
from_steps=from_steps,
|
|
706
|
+
to_steps=to_steps)
|
|
707
|
+
|
|
708
|
+
# Check for missing entry or exit steps
|
|
709
|
+
unpruned_exits = set([step for step, _ in unpruned.get_exit_nodes()])
|
|
710
|
+
runtime_exits = set([step for step, _ in runtime.get_exit_nodes()])
|
|
711
|
+
for step in unpruned_exits.difference(runtime_exits):
|
|
712
|
+
if logger:
|
|
713
|
+
logger.error(f'pruning removed all exit nodes for {step} in the {flow.name()} '
|
|
714
|
+
'flowgraph')
|
|
715
|
+
error = True
|
|
716
|
+
|
|
717
|
+
unpruned_entry = set([step for step, _ in unpruned.get_entry_nodes()])
|
|
718
|
+
runtime_entry = set([step for step, _ in runtime.get_entry_nodes()])
|
|
719
|
+
for step in unpruned_entry.difference(runtime_entry):
|
|
720
|
+
if logger:
|
|
721
|
+
logger.error(f'pruning removed all entry nodes for {step} in the {flow.name()} '
|
|
722
|
+
'flowgraph')
|
|
723
|
+
error = True
|
|
575
724
|
|
|
576
|
-
|
|
725
|
+
if not error:
|
|
726
|
+
# Check for missing paths
|
|
727
|
+
missing = []
|
|
728
|
+
found_any = False
|
|
729
|
+
for entrynode in runtime.get_entry_nodes():
|
|
730
|
+
found = False
|
|
731
|
+
for exitnode in runtime.get_exit_nodes():
|
|
732
|
+
if entrynode in runtime.__walk_graph(exitnode):
|
|
733
|
+
found = True
|
|
734
|
+
if not found:
|
|
735
|
+
exits = ",".join([f"{step}{index}"
|
|
736
|
+
for step, index in runtime.get_exit_nodes()])
|
|
737
|
+
missing.append(f'no path from {entrynode[0]}{entrynode[1]} to {exits} '
|
|
738
|
+
f'in the {flow.name()} flowgraph')
|
|
739
|
+
if found:
|
|
740
|
+
found_any = True
|
|
741
|
+
if not found_any:
|
|
742
|
+
error = True
|
|
743
|
+
if logger:
|
|
744
|
+
for msg in missing:
|
|
745
|
+
logger.error(msg)
|
|
746
|
+
|
|
747
|
+
return not error
|
|
748
|
+
|
|
749
|
+
|
|
750
|
+
class FlowgraphNodeSchema(BaseSchema):
|
|
751
|
+
def __init__(self):
|
|
752
|
+
super().__init__()
|
|
753
|
+
|
|
754
|
+
schema_flowgraph(self)
|
|
755
|
+
|
|
756
|
+
|
|
757
|
+
###############################################################################
|
|
758
|
+
# Flow Configuration
|
|
759
|
+
###############################################################################
|
|
760
|
+
def schema_flowgraph(schema):
|
|
761
|
+
schema = EditableSchema(schema)
|
|
762
|
+
|
|
763
|
+
# flowgraph input
|
|
764
|
+
schema.insert(
|
|
765
|
+
'input',
|
|
766
|
+
Parameter(
|
|
767
|
+
'[(str,str)]',
|
|
768
|
+
scope=Scope.GLOBAL,
|
|
769
|
+
shorthelp="Flowgraph: step input",
|
|
770
|
+
switch="-flowgraph_input 'flow step index <(str,str)>'",
|
|
771
|
+
example=[
|
|
772
|
+
"cli: -flowgraph_input 'asicflow cts 0 (place,0)'",
|
|
773
|
+
"api: chip.set('flowgraph', 'asicflow', 'cts', '0', 'input', ('place', '0'))"],
|
|
774
|
+
help=trim("""A list of inputs for the current step and index, specified as a
|
|
775
|
+
(step, index) tuple.""")))
|
|
776
|
+
|
|
777
|
+
# flowgraph metric weights
|
|
778
|
+
metric = 'default'
|
|
779
|
+
schema.insert(
|
|
780
|
+
'weight', metric,
|
|
781
|
+
Parameter(
|
|
782
|
+
'float',
|
|
783
|
+
scope=Scope.GLOBAL,
|
|
784
|
+
defvalue=0.0,
|
|
785
|
+
shorthelp="Flowgraph: metric weights",
|
|
786
|
+
switch="-flowgraph_weight 'flow step index metric <float>'",
|
|
787
|
+
example=[
|
|
788
|
+
"cli: -flowgraph_weight 'asicflow cts 0 area_cells 1.0'",
|
|
789
|
+
"api: chip.set('flowgraph', 'asicflow', 'cts', '0', 'weight', 'area_cells', 1.0)"],
|
|
790
|
+
help=trim("""Weights specified on a per step and per metric basis used to give
|
|
791
|
+
effective "goodness" score for a step by calculating the sum all step
|
|
792
|
+
real metrics results by the corresponding per step weights.""")))
|
|
793
|
+
|
|
794
|
+
schema.insert(
|
|
795
|
+
'goal', metric,
|
|
796
|
+
Parameter(
|
|
797
|
+
'float',
|
|
798
|
+
scope=Scope.GLOBAL,
|
|
799
|
+
shorthelp="Flowgraph: metric goals",
|
|
800
|
+
switch="-flowgraph_goal 'flow step index metric <float>'",
|
|
801
|
+
example=[
|
|
802
|
+
"cli: -flowgraph_goal 'asicflow cts 0 area_cells 1.0'",
|
|
803
|
+
"api: chip.set('flowgraph', 'asicflow', 'cts', '0', 'goal', 'errors', 0)"],
|
|
804
|
+
help=trim("""Goals specified on a per step and per metric basis used to
|
|
805
|
+
determine whether a certain task can be considered when merging
|
|
806
|
+
multiple tasks at a minimum or maximum node. A task is considered
|
|
807
|
+
failing if the absolute value of any of its metrics are larger than
|
|
808
|
+
the goal for that metric, if set.""")))
|
|
809
|
+
|
|
810
|
+
# flowgraph tool
|
|
811
|
+
schema.insert(
|
|
812
|
+
'tool',
|
|
813
|
+
Parameter(
|
|
814
|
+
'str',
|
|
815
|
+
scope=Scope.GLOBAL,
|
|
816
|
+
shorthelp="Flowgraph: tool selection",
|
|
817
|
+
switch="-flowgraph_tool 'flow step index <str>'",
|
|
818
|
+
example=[
|
|
819
|
+
"cli: -flowgraph_tool 'asicflow place 0 openroad'",
|
|
820
|
+
"api: chip.set('flowgraph', 'asicflow', 'place', '0', 'tool', 'openroad')"],
|
|
821
|
+
help=trim("""Name of the tool name used for task execution.""")))
|
|
822
|
+
|
|
823
|
+
# task (belonging to tool)
|
|
824
|
+
schema.insert(
|
|
825
|
+
'task',
|
|
826
|
+
Parameter(
|
|
827
|
+
'str',
|
|
828
|
+
scope=Scope.GLOBAL,
|
|
829
|
+
shorthelp="Flowgraph: task selection",
|
|
830
|
+
switch="-flowgraph_task 'flow step index <str>'",
|
|
831
|
+
example=[
|
|
832
|
+
"cli: -flowgraph_task 'asicflow myplace 0 place'",
|
|
833
|
+
"api: chip.set('flowgraph', 'asicflow', 'myplace', '0', 'task', 'place')"],
|
|
834
|
+
help=trim("""Name of the tool associated task used for step execution.""")))
|
|
835
|
+
|
|
836
|
+
schema.insert(
|
|
837
|
+
'taskmodule',
|
|
838
|
+
Parameter(
|
|
839
|
+
'str',
|
|
840
|
+
scope=Scope.GLOBAL,
|
|
841
|
+
shorthelp="Flowgraph: task module",
|
|
842
|
+
switch="-flowgraph_taskmodule 'flow step index <str>'",
|
|
843
|
+
example=[
|
|
844
|
+
"cli: -flowgraph_taskmodule 'asicflow place 0 "
|
|
845
|
+
"siliconcompiler.tools.openroad.place'",
|
|
846
|
+
"api: chip.set('flowgraph', 'asicflow', 'place', '0', 'taskmodule', "
|
|
847
|
+
"'siliconcompiler.tools.openroad.place')"],
|
|
848
|
+
help=trim("""
|
|
849
|
+
Full python module name of the task module used for task setup and execution.
|
|
850
|
+
""")))
|
|
851
|
+
|
|
852
|
+
# flowgraph arguments
|
|
853
|
+
schema.insert(
|
|
854
|
+
'args',
|
|
855
|
+
Parameter(
|
|
856
|
+
'[str]',
|
|
857
|
+
scope=Scope.GLOBAL,
|
|
858
|
+
shorthelp="Flowgraph: setup arguments",
|
|
859
|
+
switch="-flowgraph_args 'flow step index <str>'",
|
|
860
|
+
example=[
|
|
861
|
+
"cli: -flowgraph_args 'asicflow cts 0 0'",
|
|
862
|
+
"api: chip.add('flowgraph', 'asicflow', 'cts', '0', 'args', '0')"],
|
|
863
|
+
help=trim("""User specified flowgraph string arguments specified on a per
|
|
864
|
+
step and per index basis.""")))
|