siliconcompiler 0.34.0__py3-none-any.whl → 0.34.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- siliconcompiler/__init__.py +14 -2
- siliconcompiler/_metadata.py +1 -1
- siliconcompiler/apps/_common.py +1 -1
- siliconcompiler/apps/sc.py +1 -1
- siliconcompiler/apps/sc_issue.py +1 -1
- siliconcompiler/apps/sc_remote.py +3 -3
- siliconcompiler/apps/sc_show.py +3 -3
- siliconcompiler/apps/utils/replay.py +4 -4
- siliconcompiler/checklist.py +203 -2
- siliconcompiler/constraints/__init__.py +17 -0
- siliconcompiler/constraints/asic_component.py +378 -0
- siliconcompiler/constraints/asic_floorplan.py +449 -0
- siliconcompiler/constraints/asic_pins.py +489 -0
- siliconcompiler/constraints/asic_timing.py +517 -0
- siliconcompiler/core.py +31 -249
- siliconcompiler/data/templates/email/general.j2 +3 -3
- siliconcompiler/data/templates/email/summary.j2 +1 -1
- siliconcompiler/data/templates/issue/README.txt +1 -1
- siliconcompiler/data/templates/report/sc_report.j2 +7 -7
- siliconcompiler/dependencyschema.py +10 -174
- siliconcompiler/design.py +325 -114
- siliconcompiler/flowgraph.py +63 -15
- siliconcompiler/library.py +133 -0
- siliconcompiler/metric.py +94 -72
- siliconcompiler/metrics/__init__.py +7 -0
- siliconcompiler/metrics/asic.py +245 -0
- siliconcompiler/metrics/fpga.py +220 -0
- siliconcompiler/optimizer/vizier.py +2 -2
- siliconcompiler/package/__init__.py +138 -35
- siliconcompiler/package/github.py +6 -10
- siliconcompiler/packageschema.py +256 -12
- siliconcompiler/pathschema.py +226 -0
- siliconcompiler/pdk.py +5 -5
- siliconcompiler/project.py +459 -0
- siliconcompiler/remote/client.py +18 -12
- siliconcompiler/remote/server.py +2 -2
- siliconcompiler/report/dashboard/cli/__init__.py +6 -6
- siliconcompiler/report/dashboard/cli/board.py +3 -3
- siliconcompiler/report/dashboard/web/components/__init__.py +5 -5
- siliconcompiler/report/dashboard/web/components/flowgraph.py +4 -4
- siliconcompiler/report/dashboard/web/components/graph.py +2 -2
- siliconcompiler/report/dashboard/web/state.py +1 -1
- siliconcompiler/report/dashboard/web/utils/__init__.py +5 -5
- siliconcompiler/report/html_report.py +1 -1
- siliconcompiler/report/report.py +4 -4
- siliconcompiler/report/summary_table.py +2 -2
- siliconcompiler/report/utils.py +5 -5
- siliconcompiler/scheduler/docker.py +4 -10
- siliconcompiler/scheduler/run_node.py +4 -8
- siliconcompiler/scheduler/scheduler.py +18 -24
- siliconcompiler/scheduler/schedulernode.py +161 -143
- siliconcompiler/scheduler/send_messages.py +3 -3
- siliconcompiler/scheduler/slurm.py +5 -3
- siliconcompiler/scheduler/taskscheduler.py +10 -8
- siliconcompiler/schema/__init__.py +0 -2
- siliconcompiler/schema/baseschema.py +148 -26
- siliconcompiler/schema/editableschema.py +14 -6
- siliconcompiler/schema/journal.py +23 -15
- siliconcompiler/schema/namedschema.py +30 -4
- siliconcompiler/schema/parameter.py +34 -19
- siliconcompiler/schema/parametertype.py +2 -0
- siliconcompiler/schema/parametervalue.py +198 -15
- siliconcompiler/schema/schema_cfg.py +18 -14
- siliconcompiler/schema_obj.py +5 -3
- siliconcompiler/tool.py +591 -179
- siliconcompiler/tools/__init__.py +2 -0
- siliconcompiler/tools/builtin/_common.py +5 -5
- siliconcompiler/tools/builtin/concatenate.py +5 -5
- siliconcompiler/tools/builtin/minimum.py +4 -4
- siliconcompiler/tools/builtin/mux.py +4 -4
- siliconcompiler/tools/builtin/nop.py +4 -4
- siliconcompiler/tools/builtin/verify.py +7 -7
- siliconcompiler/tools/execute/exec_input.py +1 -1
- siliconcompiler/tools/genfasm/genfasm.py +1 -6
- siliconcompiler/tools/openroad/_apr.py +5 -1
- siliconcompiler/tools/openroad/antenna_repair.py +1 -1
- siliconcompiler/tools/openroad/macro_placement.py +1 -1
- siliconcompiler/tools/openroad/power_grid.py +1 -1
- siliconcompiler/tools/openroad/scripts/common/procs.tcl +5 -0
- siliconcompiler/tools/opensta/timing.py +26 -3
- siliconcompiler/tools/slang/__init__.py +2 -2
- siliconcompiler/tools/surfer/__init__.py +0 -0
- siliconcompiler/tools/surfer/show.py +53 -0
- siliconcompiler/tools/surfer/surfer.py +30 -0
- siliconcompiler/tools/vpr/route.py +27 -14
- siliconcompiler/tools/vpr/vpr.py +23 -6
- siliconcompiler/tools/yosys/__init__.py +1 -1
- siliconcompiler/tools/yosys/scripts/procs.tcl +143 -0
- siliconcompiler/tools/yosys/{sc_synth_asic.tcl → scripts/sc_synth_asic.tcl} +4 -0
- siliconcompiler/tools/yosys/{sc_synth_fpga.tcl → scripts/sc_synth_fpga.tcl} +24 -77
- siliconcompiler/tools/yosys/syn_fpga.py +14 -0
- siliconcompiler/toolscripts/_tools.json +9 -13
- siliconcompiler/toolscripts/rhel9/install-vpr.sh +0 -2
- siliconcompiler/toolscripts/ubuntu22/install-surfer.sh +33 -0
- siliconcompiler/toolscripts/ubuntu24/install-surfer.sh +33 -0
- siliconcompiler/utils/__init__.py +2 -1
- siliconcompiler/utils/flowgraph.py +24 -23
- siliconcompiler/utils/issue.py +23 -29
- siliconcompiler/utils/logging.py +35 -6
- siliconcompiler/utils/showtools.py +6 -1
- {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.2.dist-info}/METADATA +15 -25
- {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.2.dist-info}/RECORD +109 -97
- siliconcompiler/schema/packageschema.py +0 -101
- siliconcompiler/tools/yosys/procs.tcl +0 -71
- siliconcompiler/toolscripts/rhel9/install-yosys-parmys.sh +0 -68
- siliconcompiler/toolscripts/ubuntu22/install-yosys-parmys.sh +0 -68
- siliconcompiler/toolscripts/ubuntu24/install-yosys-parmys.sh +0 -68
- /siliconcompiler/tools/yosys/{sc_lec.tcl → scripts/sc_lec.tcl} +0 -0
- /siliconcompiler/tools/yosys/{sc_screenshot.tcl → scripts/sc_screenshot.tcl} +0 -0
- /siliconcompiler/tools/yosys/{syn_strategies.tcl → scripts/syn_strategies.tcl} +0 -0
- {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.2.dist-info}/WHEEL +0 -0
- {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.2.dist-info}/entry_points.txt +0 -0
- {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.2.dist-info}/licenses/LICENSE +0 -0
- {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.2.dist-info}/top_level.txt +0 -0
siliconcompiler/tool.py
CHANGED
|
@@ -1,4 +1,7 @@
|
|
|
1
1
|
import contextlib
|
|
2
|
+
import copy
|
|
3
|
+
import csv
|
|
4
|
+
import gzip
|
|
2
5
|
import logging
|
|
3
6
|
import os
|
|
4
7
|
import psutil
|
|
@@ -8,6 +11,7 @@ import shutil
|
|
|
8
11
|
import subprocess
|
|
9
12
|
import sys
|
|
10
13
|
import time
|
|
14
|
+
import yaml
|
|
11
15
|
|
|
12
16
|
try:
|
|
13
17
|
import resource
|
|
@@ -25,12 +29,16 @@ import os.path
|
|
|
25
29
|
from packaging.version import Version, InvalidVersion
|
|
26
30
|
from packaging.specifiers import SpecifierSet, InvalidSpecifier
|
|
27
31
|
|
|
28
|
-
from
|
|
32
|
+
from typing import List, Union
|
|
33
|
+
|
|
34
|
+
from siliconcompiler.schema import BaseSchema, NamedSchema, Journal
|
|
29
35
|
from siliconcompiler.schema import EditableSchema, Parameter, PerNode, Scope
|
|
36
|
+
from siliconcompiler.schema.parametertype import NodeType
|
|
30
37
|
from siliconcompiler.schema.utils import trim
|
|
31
38
|
|
|
32
|
-
from siliconcompiler import utils
|
|
39
|
+
from siliconcompiler import utils, NodeStatus
|
|
33
40
|
from siliconcompiler import sc_open
|
|
41
|
+
from siliconcompiler import Schema
|
|
34
42
|
|
|
35
43
|
from siliconcompiler.record import RecordTool
|
|
36
44
|
from siliconcompiler.flowgraph import RuntimeFlowgraph
|
|
@@ -61,37 +69,6 @@ class TaskExecutableNotFound(TaskError):
|
|
|
61
69
|
|
|
62
70
|
|
|
63
71
|
class TaskSchema(NamedSchema):
|
|
64
|
-
def __init__(self, name):
|
|
65
|
-
super().__init__(name)
|
|
66
|
-
|
|
67
|
-
schema_task(self)
|
|
68
|
-
|
|
69
|
-
def add_parameter(self, name, type, help, defvalue=None):
|
|
70
|
-
'''
|
|
71
|
-
Adds a parameter to the task definition.
|
|
72
|
-
|
|
73
|
-
Args:
|
|
74
|
-
name (str): name of parameter
|
|
75
|
-
type (str): schema type of the parameter
|
|
76
|
-
help (str): help string for this parameter
|
|
77
|
-
defvalue (any): default value for the parameter
|
|
78
|
-
'''
|
|
79
|
-
help = trim(help)
|
|
80
|
-
param = Parameter(
|
|
81
|
-
type,
|
|
82
|
-
defvalue=defvalue,
|
|
83
|
-
scope=Scope.JOB,
|
|
84
|
-
pernode=PerNode.OPTIONAL,
|
|
85
|
-
shorthelp=help,
|
|
86
|
-
help=help
|
|
87
|
-
)
|
|
88
|
-
|
|
89
|
-
EditableSchema(self).insert("var", name, param)
|
|
90
|
-
|
|
91
|
-
return param
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
class ToolSchema(NamedSchema):
|
|
95
72
|
__parse_version_check_str = r"""
|
|
96
73
|
(?P<operator>(==|!=|<=|>=|<|>|~=))
|
|
97
74
|
\s*
|
|
@@ -107,17 +84,28 @@ class ToolSchema(NamedSchema):
|
|
|
107
84
|
r"^\s*" + __parse_version_check_str + r"\s*$",
|
|
108
85
|
re.VERBOSE | re.IGNORECASE)
|
|
109
86
|
|
|
110
|
-
def __init__(self, name):
|
|
111
|
-
super().__init__(
|
|
87
|
+
def __init__(self, name=None):
|
|
88
|
+
super().__init__()
|
|
89
|
+
self.set_name(name)
|
|
112
90
|
|
|
113
|
-
|
|
91
|
+
schema_task(self)
|
|
114
92
|
|
|
115
|
-
|
|
116
|
-
|
|
93
|
+
self.__set_runtime(None)
|
|
94
|
+
|
|
95
|
+
@contextlib.contextmanager
|
|
96
|
+
def runtime(self, chip, step=None, index=None, relpath=None):
|
|
97
|
+
'''
|
|
98
|
+
Sets the runtime information needed to properly execute a task.
|
|
99
|
+
Note: unstable API
|
|
117
100
|
|
|
118
|
-
|
|
101
|
+
Args:
|
|
102
|
+
chip (:class:`Chip`): root schema for the runtime information
|
|
103
|
+
'''
|
|
104
|
+
obj_copy = copy.copy(self)
|
|
105
|
+
obj_copy.__set_runtime(chip, step=step, index=index, relpath=relpath)
|
|
106
|
+
yield obj_copy
|
|
119
107
|
|
|
120
|
-
def
|
|
108
|
+
def __set_runtime(self, chip, step=None, index=None, relpath=None):
|
|
121
109
|
'''
|
|
122
110
|
Sets the runtime information needed to properly execute a task.
|
|
123
111
|
Note: unstable API
|
|
@@ -128,22 +116,34 @@ class ToolSchema(NamedSchema):
|
|
|
128
116
|
self.__chip = None
|
|
129
117
|
self.__schema_full = None
|
|
130
118
|
self.__logger = None
|
|
119
|
+
self.__design_name = None
|
|
120
|
+
self.__design_top = None
|
|
121
|
+
self.__design_top_global = None
|
|
122
|
+
self.__cwd = None
|
|
123
|
+
self.__relpath = relpath
|
|
124
|
+
self.__collection_path = None
|
|
131
125
|
if chip:
|
|
132
126
|
self.__chip = chip
|
|
133
127
|
self.__schema_full = chip.schema
|
|
134
128
|
self.__logger = chip.logger
|
|
129
|
+
self.__design_name = chip.design
|
|
130
|
+
self.__design_top = chip.top(step=step, index=index)
|
|
131
|
+
self.__design_top_global = chip.top()
|
|
132
|
+
self.__cwd = chip.cwd
|
|
133
|
+
self.__collection_path = chip._getcollectdir()
|
|
135
134
|
|
|
136
135
|
self.__step = step
|
|
137
136
|
self.__index = index
|
|
138
|
-
self.__tool = None
|
|
139
|
-
self.__task = None
|
|
140
137
|
|
|
141
138
|
self.__schema_record = None
|
|
142
139
|
self.__schema_metric = None
|
|
143
140
|
self.__schema_flow = None
|
|
141
|
+
self.__schema_flow_runtime = None
|
|
142
|
+
self.__schema_tool = None
|
|
144
143
|
if self.__schema_full:
|
|
145
144
|
self.__schema_record = self.__schema_full.get("record", field="schema")
|
|
146
145
|
self.__schema_metric = self.__schema_full.get("metric", field="schema")
|
|
146
|
+
self.__schema_tool = self._parent()._parent()
|
|
147
147
|
|
|
148
148
|
if not self.__step:
|
|
149
149
|
self.__step = self.__schema_full.get('arg', 'step')
|
|
@@ -157,8 +157,25 @@ class ToolSchema(NamedSchema):
|
|
|
157
157
|
if not flow:
|
|
158
158
|
raise RuntimeError("flow not specified")
|
|
159
159
|
self.__schema_flow = self.__schema_full.get("flowgraph", flow, field="schema")
|
|
160
|
-
|
|
161
|
-
self.
|
|
160
|
+
|
|
161
|
+
self.__schema_flow_runtime = RuntimeFlowgraph(
|
|
162
|
+
self.__schema_flow,
|
|
163
|
+
from_steps=set([step for step, _ in self.__schema_flow.get_entry_nodes()]),
|
|
164
|
+
prune_nodes=self.__schema_full.get('option', 'prune'))
|
|
165
|
+
|
|
166
|
+
def design_name(self) -> str:
|
|
167
|
+
'''
|
|
168
|
+
Returns:
|
|
169
|
+
name of the design
|
|
170
|
+
'''
|
|
171
|
+
return self.__design_name
|
|
172
|
+
|
|
173
|
+
def design_topmodule(self) -> str:
|
|
174
|
+
'''
|
|
175
|
+
Returns:
|
|
176
|
+
top module of the design
|
|
177
|
+
'''
|
|
178
|
+
return self.__design_top
|
|
162
179
|
|
|
163
180
|
def node(self):
|
|
164
181
|
'''
|
|
@@ -171,10 +188,10 @@ class ToolSchema(NamedSchema):
|
|
|
171
188
|
def tool(self):
|
|
172
189
|
'''
|
|
173
190
|
Returns:
|
|
174
|
-
|
|
191
|
+
tool name
|
|
175
192
|
'''
|
|
176
193
|
|
|
177
|
-
|
|
194
|
+
raise NotImplementedError("tool name must be implemented by the child class")
|
|
178
195
|
|
|
179
196
|
def task(self):
|
|
180
197
|
'''
|
|
@@ -182,7 +199,7 @@ class ToolSchema(NamedSchema):
|
|
|
182
199
|
task name
|
|
183
200
|
'''
|
|
184
201
|
|
|
185
|
-
|
|
202
|
+
raise NotImplementedError("task name must be implemented by the child class")
|
|
186
203
|
|
|
187
204
|
def logger(self):
|
|
188
205
|
'''
|
|
@@ -209,10 +226,21 @@ class ToolSchema(NamedSchema):
|
|
|
209
226
|
return self.__schema_metric
|
|
210
227
|
elif type == "flow":
|
|
211
228
|
return self.__schema_flow
|
|
229
|
+
elif type == "runtimeflow":
|
|
230
|
+
return self.__schema_flow_runtime
|
|
231
|
+
elif type == "tool":
|
|
232
|
+
return self.__schema_tool
|
|
212
233
|
else:
|
|
213
234
|
raise ValueError(f"{type} is not a schema section")
|
|
214
235
|
|
|
215
|
-
def
|
|
236
|
+
def has_breakpoint(self) -> bool:
|
|
237
|
+
'''
|
|
238
|
+
Returns:
|
|
239
|
+
True if this task has a breakpoint associated with it
|
|
240
|
+
'''
|
|
241
|
+
return self.schema().get("option", "breakpoint", step=self.__step, index=self.__index)
|
|
242
|
+
|
|
243
|
+
def get_exe(self) -> str:
|
|
216
244
|
'''
|
|
217
245
|
Determines the absolute path for the specified executable.
|
|
218
246
|
|
|
@@ -223,7 +251,7 @@ class ToolSchema(NamedSchema):
|
|
|
223
251
|
path to executable, or None if not specified
|
|
224
252
|
'''
|
|
225
253
|
|
|
226
|
-
exe = self.get('exe')
|
|
254
|
+
exe = self.schema("tool").get('exe')
|
|
227
255
|
|
|
228
256
|
if exe is None:
|
|
229
257
|
return None
|
|
@@ -238,7 +266,7 @@ class ToolSchema(NamedSchema):
|
|
|
238
266
|
|
|
239
267
|
return fullexe
|
|
240
268
|
|
|
241
|
-
def get_exe_version(self):
|
|
269
|
+
def get_exe_version(self) -> str:
|
|
242
270
|
'''
|
|
243
271
|
Gets the version of the specified executable.
|
|
244
272
|
|
|
@@ -250,7 +278,7 @@ class ToolSchema(NamedSchema):
|
|
|
250
278
|
version determined by :meth:`.parse_version`.
|
|
251
279
|
'''
|
|
252
280
|
|
|
253
|
-
veropt = self.get('vswitch')
|
|
281
|
+
veropt = self.schema("tool").get('vswitch')
|
|
254
282
|
if not veropt:
|
|
255
283
|
return None
|
|
256
284
|
|
|
@@ -263,7 +291,8 @@ class ToolSchema(NamedSchema):
|
|
|
263
291
|
cmdlist = [exe]
|
|
264
292
|
cmdlist.extend(veropt)
|
|
265
293
|
|
|
266
|
-
self.__logger.debug(f'Running {self.
|
|
294
|
+
self.__logger.debug(f'Running {self.tool()}/{self.task()} version check: '
|
|
295
|
+
f'{" ".join(cmdlist)}')
|
|
267
296
|
|
|
268
297
|
proc = subprocess.run(cmdlist,
|
|
269
298
|
stdin=subprocess.DEVNULL,
|
|
@@ -278,9 +307,11 @@ class ToolSchema(NamedSchema):
|
|
|
278
307
|
try:
|
|
279
308
|
version = self.parse_version(proc.stdout)
|
|
280
309
|
except NotImplementedError:
|
|
281
|
-
raise NotImplementedError(f'{self.
|
|
310
|
+
raise NotImplementedError(f'{self.tool()}/{self.task()} does not implement '
|
|
311
|
+
'parse_version()')
|
|
282
312
|
except Exception as e:
|
|
283
|
-
self.__logger.error(f'{self.
|
|
313
|
+
self.__logger.error(f'{self.tool()}/{self.task()} failed to parse version string: '
|
|
314
|
+
f'{proc.stdout}')
|
|
284
315
|
raise e from None
|
|
285
316
|
|
|
286
317
|
self.__logger.info(f"Tool '{exe_base}' found with version '{version}' "
|
|
@@ -288,7 +319,7 @@ class ToolSchema(NamedSchema):
|
|
|
288
319
|
|
|
289
320
|
return version
|
|
290
321
|
|
|
291
|
-
def check_exe_version(self, reported_version):
|
|
322
|
+
def check_exe_version(self, reported_version) -> bool:
|
|
292
323
|
'''
|
|
293
324
|
Check if the reported version matches the versions specified in
|
|
294
325
|
:keypath:`tool,<tool>,version`.
|
|
@@ -301,7 +332,7 @@ class ToolSchema(NamedSchema):
|
|
|
301
332
|
|
|
302
333
|
'''
|
|
303
334
|
|
|
304
|
-
spec_sets = self.get('version', step=self.__step, index=self.__index)
|
|
335
|
+
spec_sets = self.schema("tool").get('version', step=self.__step, index=self.__index)
|
|
305
336
|
if not spec_sets:
|
|
306
337
|
# No requirement so always true
|
|
307
338
|
return True
|
|
@@ -310,7 +341,7 @@ class ToolSchema(NamedSchema):
|
|
|
310
341
|
split_specs = [s.strip() for s in spec_set.split(",") if s.strip()]
|
|
311
342
|
specs_list = []
|
|
312
343
|
for spec in split_specs:
|
|
313
|
-
match = re.match(
|
|
344
|
+
match = re.match(TaskSchema.__parse_version_check, spec)
|
|
314
345
|
if match is None:
|
|
315
346
|
self.__logger.warning(f'Invalid version specifier {spec}. '
|
|
316
347
|
f'Defaulting to =={spec}.')
|
|
@@ -324,15 +355,15 @@ class ToolSchema(NamedSchema):
|
|
|
324
355
|
try:
|
|
325
356
|
normalized_version = self.normalize_version(reported_version)
|
|
326
357
|
except Exception as e:
|
|
327
|
-
self.__logger.error(f'Unable to normalize version for {self.
|
|
358
|
+
self.__logger.error(f'Unable to normalize version for {self.tool()}/{self.task()}: '
|
|
328
359
|
f'{reported_version}')
|
|
329
360
|
raise e from None
|
|
330
361
|
|
|
331
362
|
try:
|
|
332
363
|
version = Version(normalized_version)
|
|
333
364
|
except InvalidVersion:
|
|
334
|
-
self.__logger.error(f'Version {normalized_version} reported by
|
|
335
|
-
'not match standard.')
|
|
365
|
+
self.__logger.error(f'Version {normalized_version} reported by '
|
|
366
|
+
f'{self.tool()}/{self.task()} does not match standard.')
|
|
336
367
|
return False
|
|
337
368
|
|
|
338
369
|
try:
|
|
@@ -340,7 +371,8 @@ class ToolSchema(NamedSchema):
|
|
|
340
371
|
f'{op}{self.normalize_version(ver)}' for op, ver in specs_list]
|
|
341
372
|
normalized_specs = ','.join(normalized_spec_list)
|
|
342
373
|
except Exception as e:
|
|
343
|
-
self.__logger.error(f'Unable to normalize versions for
|
|
374
|
+
self.__logger.error(f'Unable to normalize versions for '
|
|
375
|
+
f'{self.tool()}/{self.task()}: '
|
|
344
376
|
f'{",".join([f"{op}{ver}" for op, ver in specs_list])}')
|
|
345
377
|
raise e from None
|
|
346
378
|
|
|
@@ -355,7 +387,8 @@ class ToolSchema(NamedSchema):
|
|
|
355
387
|
return True
|
|
356
388
|
|
|
357
389
|
allowedstr = '; '.join(spec_sets)
|
|
358
|
-
self.__logger.error(f"Version check failed for {self.
|
|
390
|
+
self.__logger.error(f"Version check failed for {self.tool()}/{self.task()}. "
|
|
391
|
+
"Check installation.")
|
|
359
392
|
self.__logger.error(f"Found version {reported_version}, "
|
|
360
393
|
f"did not satisfy any version specifier set {allowedstr}.")
|
|
361
394
|
return False
|
|
@@ -377,16 +410,17 @@ class ToolSchema(NamedSchema):
|
|
|
377
410
|
envvars[env] = self.__schema_full.get('option', 'env', env)
|
|
378
411
|
|
|
379
412
|
# Add tool specific vars
|
|
380
|
-
for lic_env in self.getkeys('licenseserver'):
|
|
381
|
-
license_file = self.get('licenseserver', lic_env,
|
|
413
|
+
for lic_env in self.schema("tool").getkeys('licenseserver'):
|
|
414
|
+
license_file = self.schema("tool").get('licenseserver', lic_env,
|
|
415
|
+
step=self.__step, index=self.__index)
|
|
382
416
|
if license_file:
|
|
383
417
|
envvars[lic_env] = ':'.join(license_file)
|
|
384
418
|
|
|
385
419
|
if include_path:
|
|
386
|
-
path = self.find_files(
|
|
420
|
+
path = self.schema("tool").find_files(
|
|
387
421
|
"path", step=self.__step, index=self.__index,
|
|
388
|
-
|
|
389
|
-
|
|
422
|
+
cwd=self.__cwd,
|
|
423
|
+
collection_dir=self.__collection_path,
|
|
390
424
|
missing_ok=True)
|
|
391
425
|
|
|
392
426
|
envvars["PATH"] = os.getenv("PATH", os.defpath)
|
|
@@ -401,9 +435,8 @@ class ToolSchema(NamedSchema):
|
|
|
401
435
|
envvars[var] = val
|
|
402
436
|
|
|
403
437
|
# Add task specific vars
|
|
404
|
-
for env in self.getkeys(
|
|
405
|
-
envvars[env] = self.get(
|
|
406
|
-
step=self.__step, index=self.__index)
|
|
438
|
+
for env in self.getkeys("env"):
|
|
439
|
+
envvars[env] = self.get("env", env)
|
|
407
440
|
|
|
408
441
|
return envvars
|
|
409
442
|
|
|
@@ -417,9 +450,20 @@ class ToolSchema(NamedSchema):
|
|
|
417
450
|
|
|
418
451
|
cmdargs = []
|
|
419
452
|
try:
|
|
420
|
-
|
|
453
|
+
if self.__relpath:
|
|
454
|
+
args = []
|
|
455
|
+
for arg in self.runtime_options():
|
|
456
|
+
arg = str(arg)
|
|
457
|
+
if os.path.isabs(arg) and os.path.exists(arg):
|
|
458
|
+
args.append(os.path.relpath(arg, self.__relpath))
|
|
459
|
+
else:
|
|
460
|
+
args.append(arg)
|
|
461
|
+
else:
|
|
462
|
+
args = self.runtime_options()
|
|
463
|
+
|
|
464
|
+
cmdargs.extend(args)
|
|
421
465
|
except Exception as e:
|
|
422
|
-
self.__logger.error(f'Failed to get runtime options for {self.
|
|
466
|
+
self.__logger.error(f'Failed to get runtime options for {self.tool()}/{self.task()}')
|
|
423
467
|
raise e from None
|
|
424
468
|
|
|
425
469
|
# Cleanup args
|
|
@@ -440,13 +484,13 @@ class ToolSchema(NamedSchema):
|
|
|
440
484
|
replay_opts["work_dir"] = workdir
|
|
441
485
|
replay_opts["exports"] = self.get_runtime_environmental_variables(include_path=include_path)
|
|
442
486
|
|
|
443
|
-
replay_opts["executable"] = self.get('exe')
|
|
487
|
+
replay_opts["executable"] = self.schema("tool").get('exe')
|
|
444
488
|
replay_opts["step"] = self.__step
|
|
445
489
|
replay_opts["index"] = self.__index
|
|
446
|
-
replay_opts["cfg_file"] = f"inputs/{self.
|
|
490
|
+
replay_opts["cfg_file"] = f"inputs/{self.__design_name}.pkg.json"
|
|
447
491
|
replay_opts["node_only"] = 0 if replay_opts["executable"] else 1
|
|
448
492
|
|
|
449
|
-
vswitch = self.get('vswitch')
|
|
493
|
+
vswitch = self.schema("tool").get('vswitch')
|
|
450
494
|
if vswitch:
|
|
451
495
|
replay_opts["version_flag"] = shlex.join(vswitch)
|
|
452
496
|
|
|
@@ -503,6 +547,66 @@ class ToolSchema(NamedSchema):
|
|
|
503
547
|
os.makedirs(os.path.join(workdir, 'outputs'), exist_ok=True)
|
|
504
548
|
os.makedirs(os.path.join(workdir, 'reports'), exist_ok=True)
|
|
505
549
|
|
|
550
|
+
def __write_yaml_manifest(self, fout, manifest):
|
|
551
|
+
class YamlIndentDumper(yaml.Dumper):
|
|
552
|
+
def increase_indent(self, flow=False, indentless=False):
|
|
553
|
+
return super().increase_indent(flow=flow, indentless=indentless)
|
|
554
|
+
|
|
555
|
+
fout.write(yaml.dump(manifest.getdict(), Dumper=YamlIndentDumper,
|
|
556
|
+
default_flow_style=False))
|
|
557
|
+
|
|
558
|
+
def __write_tcl_manifest(self, fout, manifest):
|
|
559
|
+
template = utils.get_file_template('tcl/manifest.tcl.j2')
|
|
560
|
+
tcl_set_cmds = []
|
|
561
|
+
for key in sorted(manifest.allkeys()):
|
|
562
|
+
# print out all non default values
|
|
563
|
+
if 'default' in key:
|
|
564
|
+
continue
|
|
565
|
+
|
|
566
|
+
param = manifest.get(*key, field=None)
|
|
567
|
+
|
|
568
|
+
# create a TCL dict
|
|
569
|
+
keystr = ' '.join([NodeType.to_tcl(keypart, 'str') for keypart in key])
|
|
570
|
+
|
|
571
|
+
valstr = param.gettcl(step=self.__step, index=self.__index)
|
|
572
|
+
if valstr is None:
|
|
573
|
+
continue
|
|
574
|
+
|
|
575
|
+
# Ensure empty values get something
|
|
576
|
+
if valstr == '':
|
|
577
|
+
valstr = '{}'
|
|
578
|
+
|
|
579
|
+
tcl_set_cmds.append(f"dict set sc_cfg {keystr} {valstr}")
|
|
580
|
+
|
|
581
|
+
if template:
|
|
582
|
+
fout.write(template.render(manifest_dict='\n'.join(tcl_set_cmds),
|
|
583
|
+
scroot=os.path.abspath(
|
|
584
|
+
os.path.join(os.path.dirname(__file__))),
|
|
585
|
+
record_access="get" in Journal.access(self).get_types(),
|
|
586
|
+
record_access_id=Schema._RECORD_ACCESS_IDENTIFIER))
|
|
587
|
+
else:
|
|
588
|
+
for cmd in tcl_set_cmds:
|
|
589
|
+
fout.write(cmd + '\n')
|
|
590
|
+
fout.write('\n')
|
|
591
|
+
|
|
592
|
+
def __write_csv_manifest(self, fout, manifest):
|
|
593
|
+
csvwriter = csv.writer(fout)
|
|
594
|
+
csvwriter.writerow(['Keypath', 'Value'])
|
|
595
|
+
|
|
596
|
+
for key in sorted(manifest.allkeys()):
|
|
597
|
+
keypath = ','.join(key)
|
|
598
|
+
param = manifest.get(*key, field=None)
|
|
599
|
+
if param.get(field="pernode").is_never():
|
|
600
|
+
value = param.get()
|
|
601
|
+
else:
|
|
602
|
+
value = param.get(step=self.__step, index=self.__index)
|
|
603
|
+
|
|
604
|
+
if isinstance(value, (set, list)):
|
|
605
|
+
for item in value:
|
|
606
|
+
csvwriter.writerow([keypath, item])
|
|
607
|
+
else:
|
|
608
|
+
csvwriter.writerow([keypath, value])
|
|
609
|
+
|
|
506
610
|
def write_task_manifest(self, directory, backup=True):
|
|
507
611
|
'''
|
|
508
612
|
Write the manifest needed for the task
|
|
@@ -512,7 +616,7 @@ class ToolSchema(NamedSchema):
|
|
|
512
616
|
backup (bool): if True and an existing manifest is found a backup is kept.
|
|
513
617
|
'''
|
|
514
618
|
|
|
515
|
-
suffix = self.get('format')
|
|
619
|
+
suffix = self.schema("tool").get('format')
|
|
516
620
|
if not suffix:
|
|
517
621
|
return
|
|
518
622
|
|
|
@@ -521,8 +625,65 @@ class ToolSchema(NamedSchema):
|
|
|
521
625
|
if backup and os.path.exists(manifest_path):
|
|
522
626
|
shutil.copyfile(manifest_path, f'{manifest_path}.bak')
|
|
523
627
|
|
|
524
|
-
#
|
|
525
|
-
self.
|
|
628
|
+
# Generate abs paths
|
|
629
|
+
schema = self.__abspath_schema()
|
|
630
|
+
|
|
631
|
+
if re.search(r'\.json(\.gz)?$', manifest_path):
|
|
632
|
+
schema.write_manifest(manifest_path)
|
|
633
|
+
else:
|
|
634
|
+
try:
|
|
635
|
+
# format specific dumping
|
|
636
|
+
if manifest_path.endswith('.gz'):
|
|
637
|
+
fout = gzip.open(manifest_path, 'wt', encoding='UTF-8')
|
|
638
|
+
elif re.search(r'\.csv$', manifest_path):
|
|
639
|
+
# Files written using csv library should be opened with newline=''
|
|
640
|
+
# https://docs.python.org/3/library/csv.html#id3
|
|
641
|
+
fout = open(manifest_path, 'w', newline='')
|
|
642
|
+
else:
|
|
643
|
+
fout = open(manifest_path, 'w')
|
|
644
|
+
|
|
645
|
+
if re.search(r'(\.yaml|\.yml)(\.gz)?$', manifest_path):
|
|
646
|
+
self.__write_yaml_manifest(fout, schema)
|
|
647
|
+
elif re.search(r'\.tcl(\.gz)?$', manifest_path):
|
|
648
|
+
self.__write_tcl_manifest(fout, schema)
|
|
649
|
+
elif re.search(r'\.csv(\.gz)?$', manifest_path):
|
|
650
|
+
self.__write_csv_manifest(fout, schema)
|
|
651
|
+
else:
|
|
652
|
+
raise ValueError(f"{manifest_path} is not a recognized path type")
|
|
653
|
+
finally:
|
|
654
|
+
fout.close()
|
|
655
|
+
|
|
656
|
+
def __abspath_schema(self):
|
|
657
|
+
root = self.schema()
|
|
658
|
+
schema = root.copy()
|
|
659
|
+
|
|
660
|
+
strict = root.get("option", "strict")
|
|
661
|
+
root.set("option", "strict", False)
|
|
662
|
+
|
|
663
|
+
for keypath in root.allkeys():
|
|
664
|
+
paramtype = schema.get(*keypath, field='type')
|
|
665
|
+
if 'file' not in paramtype and 'dir' not in paramtype:
|
|
666
|
+
# only do something if type is file or dir
|
|
667
|
+
continue
|
|
668
|
+
|
|
669
|
+
for value, step, index in root.get(*keypath, field=None).getvalues():
|
|
670
|
+
if not value:
|
|
671
|
+
continue
|
|
672
|
+
abspaths = root.find_files(*keypath, missing_ok=True, step=step, index=index)
|
|
673
|
+
if isinstance(abspaths, (set, list)) and None in abspaths:
|
|
674
|
+
# Lists may not contain None
|
|
675
|
+
schema.set(*keypath, [], step=step, index=index)
|
|
676
|
+
else:
|
|
677
|
+
if self.__relpath:
|
|
678
|
+
if isinstance(abspaths, (set, list)):
|
|
679
|
+
abspaths = [os.path.relpath(path, self.__relpath) for path in abspaths]
|
|
680
|
+
else:
|
|
681
|
+
abspaths = os.path.relpath(abspaths, self.__relpath)
|
|
682
|
+
schema.set(*keypath, abspaths, step=step, index=index)
|
|
683
|
+
|
|
684
|
+
root.set("option", "strict", strict)
|
|
685
|
+
|
|
686
|
+
return schema
|
|
526
687
|
|
|
527
688
|
def __get_io_file(self, io_type):
|
|
528
689
|
'''
|
|
@@ -531,10 +692,8 @@ class ToolSchema(NamedSchema):
|
|
|
531
692
|
Args:
|
|
532
693
|
io_type (str): name of io type
|
|
533
694
|
'''
|
|
534
|
-
suffix = self.get(
|
|
535
|
-
|
|
536
|
-
destination = self.get('task', self.__task, io_type, 'destination',
|
|
537
|
-
step=self.__step, index=self.__index)
|
|
695
|
+
suffix = self.get(io_type, "suffix")
|
|
696
|
+
destination = self.get(io_type, "destination")
|
|
538
697
|
|
|
539
698
|
io_file = None
|
|
540
699
|
io_log = False
|
|
@@ -542,7 +701,7 @@ class ToolSchema(NamedSchema):
|
|
|
542
701
|
io_file = f"{self.__step}.{suffix}"
|
|
543
702
|
io_log = True
|
|
544
703
|
elif destination == 'output':
|
|
545
|
-
io_file = os.path.join('outputs', f"{self.
|
|
704
|
+
io_file = os.path.join('outputs', f"{self.__design_top_global}.{suffix}")
|
|
546
705
|
elif destination == 'none':
|
|
547
706
|
io_file = os.devnull
|
|
548
707
|
|
|
@@ -581,13 +740,13 @@ class ToolSchema(NamedSchema):
|
|
|
581
740
|
TERMINATE_TIMEOUT = 5
|
|
582
741
|
|
|
583
742
|
terminate_process(proc.pid, timeout=TERMINATE_TIMEOUT)
|
|
584
|
-
self.__logger.info(f'Waiting for {self.
|
|
743
|
+
self.__logger.info(f'Waiting for {self.tool()}/{self.task()} to exit...')
|
|
585
744
|
try:
|
|
586
745
|
proc.wait(timeout=TERMINATE_TIMEOUT)
|
|
587
746
|
except subprocess.TimeoutExpired:
|
|
588
747
|
if proc.poll() is None:
|
|
589
|
-
self.__logger.warning(f'{self.
|
|
590
|
-
'seconds. Terminating...')
|
|
748
|
+
self.__logger.warning(f'{self.tool()}/{self.task()} did not exit within '
|
|
749
|
+
f'{TERMINATE_TIMEOUT} seconds. Terminating...')
|
|
591
750
|
terminate_process(proc.pid, timeout=TERMINATE_TIMEOUT)
|
|
592
751
|
|
|
593
752
|
def run_task(self, workdir, quiet, loglevel, breakpoint, nice, timeout):
|
|
@@ -658,7 +817,7 @@ class ToolSchema(NamedSchema):
|
|
|
658
817
|
contextlib.redirect_stdout(stdout_writer):
|
|
659
818
|
retcode = self.run()
|
|
660
819
|
except Exception as e:
|
|
661
|
-
self.__logger.error(f'Failed in run() for {self.
|
|
820
|
+
self.__logger.error(f'Failed in run() for {self.tool()}/{self.task()}: {e}')
|
|
662
821
|
utils.print_traceback(self.__logger, e)
|
|
663
822
|
raise e
|
|
664
823
|
finally:
|
|
@@ -706,9 +865,9 @@ class ToolSchema(NamedSchema):
|
|
|
706
865
|
retcode = pty.spawn([exe, *cmdlist], read)
|
|
707
866
|
else:
|
|
708
867
|
with open(stdout_file, 'w') as stdout_writer, \
|
|
709
|
-
open(stdout_file, 'r', errors='
|
|
868
|
+
open(stdout_file, 'r', errors='replace') as stdout_reader, \
|
|
710
869
|
open(stderr_file, 'w') as stderr_writer, \
|
|
711
|
-
open(stderr_file, 'r', errors='
|
|
870
|
+
open(stderr_file, 'r', errors='replace') as stderr_reader:
|
|
712
871
|
# if STDOUT and STDERR are to be redirected to the same file,
|
|
713
872
|
# use a single writer
|
|
714
873
|
if stderr_file == stdout_file:
|
|
@@ -805,7 +964,7 @@ class ToolSchema(NamedSchema):
|
|
|
805
964
|
|
|
806
965
|
# Remove runtime information
|
|
807
966
|
for key in list(state.keys()):
|
|
808
|
-
if key.startswith("
|
|
967
|
+
if key.startswith("_TaskSchema__"):
|
|
809
968
|
del state[key]
|
|
810
969
|
|
|
811
970
|
return state
|
|
@@ -814,10 +973,258 @@ class ToolSchema(NamedSchema):
|
|
|
814
973
|
self.__dict__ = state
|
|
815
974
|
|
|
816
975
|
# Reinit runtime information
|
|
817
|
-
self.
|
|
976
|
+
self.__set_runtime(None)
|
|
818
977
|
|
|
819
978
|
def get_output_files(self):
|
|
820
|
-
return set(self.get("
|
|
979
|
+
return set(self.get("output"))
|
|
980
|
+
|
|
981
|
+
def get_files_from_input_nodes(self):
|
|
982
|
+
"""
|
|
983
|
+
Returns a dictionary of files with the node they originated from
|
|
984
|
+
"""
|
|
985
|
+
|
|
986
|
+
nodes = self.schema("runtimeflow").get_nodes()
|
|
987
|
+
|
|
988
|
+
inputs = {}
|
|
989
|
+
for in_step, in_index in self.schema("flow").get(*self.node(), 'input'):
|
|
990
|
+
if (in_step, in_index) not in nodes:
|
|
991
|
+
# node has been pruned so will not provide anything
|
|
992
|
+
continue
|
|
993
|
+
|
|
994
|
+
in_tool = self.schema("flow").get(in_step, in_index, "tool")
|
|
995
|
+
in_task = self.schema("flow").get(in_step, in_index, "task")
|
|
996
|
+
|
|
997
|
+
task_obj = self.schema().get("tool", in_tool, "task", in_task, field="schema")
|
|
998
|
+
|
|
999
|
+
if self.schema("record").get('status', step=in_step, index=in_index) == \
|
|
1000
|
+
NodeStatus.SKIPPED:
|
|
1001
|
+
with task_obj.runtime(self.__chip, step=in_step, index=in_index) as task:
|
|
1002
|
+
for file, nodes in task.get_files_from_input_nodes().items():
|
|
1003
|
+
inputs.setdefault(file, []).extend(nodes)
|
|
1004
|
+
continue
|
|
1005
|
+
|
|
1006
|
+
for output in NamedSchema.get(task_obj, "output", step=in_step, index=in_index):
|
|
1007
|
+
inputs.setdefault(output, []).append((in_step, in_index))
|
|
1008
|
+
|
|
1009
|
+
return inputs
|
|
1010
|
+
|
|
1011
|
+
def compute_input_file_node_name(self, filename, step, index):
|
|
1012
|
+
"""
|
|
1013
|
+
Generate a unique name for in input file based on the originating node.
|
|
1014
|
+
|
|
1015
|
+
Args:
|
|
1016
|
+
filename (str): name of inputfile
|
|
1017
|
+
step (str): Step name
|
|
1018
|
+
index (str): Index name
|
|
1019
|
+
"""
|
|
1020
|
+
|
|
1021
|
+
_, file_type = os.path.splitext(filename)
|
|
1022
|
+
|
|
1023
|
+
if file_type:
|
|
1024
|
+
base = filename
|
|
1025
|
+
total_ext = []
|
|
1026
|
+
while file_type:
|
|
1027
|
+
base, file_type = os.path.splitext(base)
|
|
1028
|
+
total_ext.append(file_type)
|
|
1029
|
+
|
|
1030
|
+
total_ext.reverse()
|
|
1031
|
+
|
|
1032
|
+
return f'{base}.{step}{index}{"".join(total_ext)}'
|
|
1033
|
+
else:
|
|
1034
|
+
return f'{filename}.{step}{index}'
|
|
1035
|
+
|
|
1036
|
+
def add_parameter(self, name, type, help, defvalue=None, **kwargs):
|
|
1037
|
+
'''
|
|
1038
|
+
Adds a parameter to the task definition.
|
|
1039
|
+
|
|
1040
|
+
Args:
|
|
1041
|
+
name (str): name of parameter
|
|
1042
|
+
type (str): schema type of the parameter
|
|
1043
|
+
help (str): help string for this parameter
|
|
1044
|
+
defvalue (any): default value for the parameter
|
|
1045
|
+
'''
|
|
1046
|
+
help = trim(help)
|
|
1047
|
+
param = Parameter(
|
|
1048
|
+
type,
|
|
1049
|
+
**kwargs,
|
|
1050
|
+
defvalue=defvalue,
|
|
1051
|
+
scope=Scope.JOB,
|
|
1052
|
+
pernode=PerNode.OPTIONAL,
|
|
1053
|
+
shorthelp=help,
|
|
1054
|
+
help=help
|
|
1055
|
+
)
|
|
1056
|
+
|
|
1057
|
+
EditableSchema(self).insert("var", name, param)
|
|
1058
|
+
|
|
1059
|
+
return param
|
|
1060
|
+
|
|
1061
|
+
def add_required_tool_key(self, *key: str):
|
|
1062
|
+
'''
|
|
1063
|
+
Adds a required tool keypath to the task driver.
|
|
1064
|
+
|
|
1065
|
+
Args:
|
|
1066
|
+
key (list of str): required key path
|
|
1067
|
+
'''
|
|
1068
|
+
return self.add_required_key(self, *key)
|
|
1069
|
+
|
|
1070
|
+
def add_required_key(self, obj: Union[BaseSchema, str], *key: str):
|
|
1071
|
+
'''
|
|
1072
|
+
Adds a required keypath to the task driver.
|
|
1073
|
+
|
|
1074
|
+
Args:
|
|
1075
|
+
obj (:class:`BaseSchema` or str): if this is a string it will be considered
|
|
1076
|
+
part of the key, otherwise the keypath to the obj will be prepended to
|
|
1077
|
+
the key
|
|
1078
|
+
key (list of str): required key path
|
|
1079
|
+
'''
|
|
1080
|
+
|
|
1081
|
+
if isinstance(obj, BaseSchema):
|
|
1082
|
+
key = (*obj._keypath, *key)
|
|
1083
|
+
else:
|
|
1084
|
+
key = (obj, *key)
|
|
1085
|
+
|
|
1086
|
+
if any([not isinstance(k, str) for k in key]):
|
|
1087
|
+
raise ValueError("key can only contain strings")
|
|
1088
|
+
|
|
1089
|
+
return self.add("require", ",".join(key))
|
|
1090
|
+
|
|
1091
|
+
def set_threads(self, max_threads: int = None, clobber: bool = False):
|
|
1092
|
+
"""
|
|
1093
|
+
Sets the requested thread count for the task
|
|
1094
|
+
|
|
1095
|
+
Args:
|
|
1096
|
+
max_threads (int): if provided the requested thread count
|
|
1097
|
+
will be set this value, otherwise the current machines
|
|
1098
|
+
core count will be used.
|
|
1099
|
+
clobber (bool): overwrite existing value
|
|
1100
|
+
"""
|
|
1101
|
+
if max_threads is None or max_threads <= 0:
|
|
1102
|
+
max_threads = utils.get_cores(None)
|
|
1103
|
+
|
|
1104
|
+
return self.set("threads", max_threads, clobber=clobber)
|
|
1105
|
+
|
|
1106
|
+
def get_threads(self) -> int:
|
|
1107
|
+
"""
|
|
1108
|
+
Returns the number of threads requested.
|
|
1109
|
+
"""
|
|
1110
|
+
return self.get("threads")
|
|
1111
|
+
|
|
1112
|
+
def add_commandline_option(self, option: Union[List[str], str], clobber: bool = False):
|
|
1113
|
+
"""
|
|
1114
|
+
Add to the command line options for the task
|
|
1115
|
+
|
|
1116
|
+
Args:
|
|
1117
|
+
option (list of str or str): options to add to the commandline
|
|
1118
|
+
clobber (bool): overwrite existing value
|
|
1119
|
+
"""
|
|
1120
|
+
|
|
1121
|
+
if clobber:
|
|
1122
|
+
return self.set("option", option)
|
|
1123
|
+
else:
|
|
1124
|
+
return self.add("option", option)
|
|
1125
|
+
|
|
1126
|
+
def get_commandline_options(self) -> List[str]:
|
|
1127
|
+
"""
|
|
1128
|
+
Returns the command line options specified
|
|
1129
|
+
"""
|
|
1130
|
+
return self.get("option")
|
|
1131
|
+
|
|
1132
|
+
def add_input_file(self, file: str = None, ext: str = None, clobber: bool = False):
|
|
1133
|
+
"""
|
|
1134
|
+
Add a required input file from the previous step in the flow.
|
|
1135
|
+
file and ext are mutually exclusive.
|
|
1136
|
+
|
|
1137
|
+
Args:
|
|
1138
|
+
file (str): full filename
|
|
1139
|
+
ext (str): file extension, if specified, the filename will be <top>.<ext>
|
|
1140
|
+
clobber (bool): overwrite existing value
|
|
1141
|
+
"""
|
|
1142
|
+
if file and ext:
|
|
1143
|
+
raise ValueError("only file or ext can be specified")
|
|
1144
|
+
|
|
1145
|
+
if ext:
|
|
1146
|
+
file = f"{self.design_topmodule()}.{ext}"
|
|
1147
|
+
|
|
1148
|
+
if clobber:
|
|
1149
|
+
return self.set("input", file)
|
|
1150
|
+
else:
|
|
1151
|
+
return self.add("input", file)
|
|
1152
|
+
|
|
1153
|
+
def add_output_file(self, file: str = None, ext: str = None, clobber: bool = False):
|
|
1154
|
+
"""
|
|
1155
|
+
Add an output file that this task will produce
|
|
1156
|
+
file and ext are mutually exclusive.
|
|
1157
|
+
|
|
1158
|
+
Args:
|
|
1159
|
+
file (str): full filename
|
|
1160
|
+
ext (str): file extension, if specified, the filename will be <top>.<ext>
|
|
1161
|
+
clobber (bool): overwrite existing value
|
|
1162
|
+
"""
|
|
1163
|
+
if file and ext:
|
|
1164
|
+
raise ValueError("only file or ext can be specified")
|
|
1165
|
+
|
|
1166
|
+
if ext:
|
|
1167
|
+
file = f"{self.design_topmodule()}.{ext}"
|
|
1168
|
+
|
|
1169
|
+
if clobber:
|
|
1170
|
+
return self.set("output", file)
|
|
1171
|
+
else:
|
|
1172
|
+
return self.add("output", file)
|
|
1173
|
+
|
|
1174
|
+
def record_metric(self, metric, value, source_file=None, source_unit=None):
|
|
1175
|
+
'''
|
|
1176
|
+
Records a metric and associates the source file with it.
|
|
1177
|
+
|
|
1178
|
+
Args:
|
|
1179
|
+
metric (str): metric to record
|
|
1180
|
+
value (float/int): value of the metric that is being recorded
|
|
1181
|
+
source (str): file the value came from
|
|
1182
|
+
source_unit (str): unit of the value, if not provided it is assumed to have no units
|
|
1183
|
+
|
|
1184
|
+
Examples:
|
|
1185
|
+
>>> self.record_metric('cellarea', 500.0, 'reports/metrics.json', \\
|
|
1186
|
+
source_units='um^2')
|
|
1187
|
+
Records the metric cell area and notes the source as 'reports/metrics.json'
|
|
1188
|
+
'''
|
|
1189
|
+
|
|
1190
|
+
if metric not in self.schema("metric").getkeys():
|
|
1191
|
+
self.logger().warning(f"{metric} is not a valid metric")
|
|
1192
|
+
return
|
|
1193
|
+
|
|
1194
|
+
self.schema("metric").record(self.__step, self.__index, metric, value, unit=source_unit)
|
|
1195
|
+
if source_file:
|
|
1196
|
+
self.add("report", metric, source_file)
|
|
1197
|
+
|
|
1198
|
+
###############################################################
|
|
1199
|
+
def get(self, *keypath, field='value'):
|
|
1200
|
+
return super().get(*keypath, field=field,
|
|
1201
|
+
step=self.__step, index=self.__index)
|
|
1202
|
+
|
|
1203
|
+
def set(self, *args, field='value', clobber=True):
|
|
1204
|
+
return super().set(*args, field=field, clobber=clobber,
|
|
1205
|
+
step=self.__step, index=self.__index)
|
|
1206
|
+
|
|
1207
|
+
def add(self, *args, field='value'):
|
|
1208
|
+
return super().add(*args, field=field, step=self.__step, index=self.__index)
|
|
1209
|
+
|
|
1210
|
+
def _find_files_search_paths(self, keypath, step, index):
|
|
1211
|
+
paths = super()._find_files_search_paths(keypath, step, index)
|
|
1212
|
+
if keypath == "script":
|
|
1213
|
+
paths.extend(self.find_files(
|
|
1214
|
+
"refdir",
|
|
1215
|
+
step=step, index=index,
|
|
1216
|
+
cwd=self.__cwd,
|
|
1217
|
+
collection_dir=self.__collection_path))
|
|
1218
|
+
elif keypath == "input":
|
|
1219
|
+
paths.append(os.path.join(self._parent(root=True).getworkdir(step=step, index=index),
|
|
1220
|
+
"inputs"))
|
|
1221
|
+
elif keypath == "report":
|
|
1222
|
+
paths.append(os.path.join(self._parent(root=True).getworkdir(step=step, index=index),
|
|
1223
|
+
"report"))
|
|
1224
|
+
elif keypath == "output":
|
|
1225
|
+
paths.append(os.path.join(self._parent(root=True).getworkdir(step=step, index=index),
|
|
1226
|
+
"outputs"))
|
|
1227
|
+
return paths
|
|
821
1228
|
|
|
822
1229
|
###############################################################
|
|
823
1230
|
def parse_version(self, stdout):
|
|
@@ -830,25 +1237,23 @@ class ToolSchema(NamedSchema):
|
|
|
830
1237
|
pass
|
|
831
1238
|
|
|
832
1239
|
def select_input_nodes(self):
|
|
833
|
-
|
|
834
|
-
|
|
835
|
-
flow,
|
|
836
|
-
from_steps=set([step for step, _ in flow.get_entry_nodes()]),
|
|
837
|
-
prune_nodes=self.__chip.get('option', 'prune'))
|
|
838
|
-
|
|
839
|
-
return runtime.get_node_inputs(self.__step, self.__index, record=self.schema("record"))
|
|
1240
|
+
return self.schema("runtimeflow").get_node_inputs(
|
|
1241
|
+
self.__step, self.__index, record=self.schema("record"))
|
|
840
1242
|
|
|
841
1243
|
def pre_process(self):
|
|
842
1244
|
pass
|
|
843
1245
|
|
|
844
1246
|
def runtime_options(self):
|
|
845
1247
|
cmdargs = []
|
|
846
|
-
cmdargs.extend(self.get(
|
|
847
|
-
step=self.__step, index=self.__index))
|
|
1248
|
+
cmdargs.extend(self.get("option"))
|
|
848
1249
|
|
|
849
1250
|
# Add scripts files / TODO:
|
|
850
|
-
scripts = self.
|
|
851
|
-
|
|
1251
|
+
scripts = self.find_files(
|
|
1252
|
+
'script',
|
|
1253
|
+
step=self.__step, index=self.__index,
|
|
1254
|
+
cwd=self.__cwd,
|
|
1255
|
+
collection_dir=self.__collection_path,
|
|
1256
|
+
missing_ok=True)
|
|
852
1257
|
|
|
853
1258
|
cmdargs.extend(scripts)
|
|
854
1259
|
|
|
@@ -861,12 +1266,33 @@ class ToolSchema(NamedSchema):
|
|
|
861
1266
|
pass
|
|
862
1267
|
|
|
863
1268
|
|
|
1269
|
+
class ToolSchema(NamedSchema):
|
|
1270
|
+
def __init__(self, name=None):
|
|
1271
|
+
super().__init__()
|
|
1272
|
+
self.set_name(name)
|
|
1273
|
+
|
|
1274
|
+
schema_tool(self)
|
|
1275
|
+
|
|
1276
|
+
schema = EditableSchema(self)
|
|
1277
|
+
schema.insert("task", "default", TaskSchema(None))
|
|
1278
|
+
|
|
1279
|
+
|
|
864
1280
|
###########################################################################
|
|
865
1281
|
# Migration helper
|
|
866
1282
|
###########################################################################
|
|
867
|
-
class ToolSchemaTmp(
|
|
1283
|
+
class ToolSchemaTmp(NamedSchema):
|
|
868
1284
|
def __init__(self):
|
|
869
|
-
super().__init__(
|
|
1285
|
+
super().__init__()
|
|
1286
|
+
|
|
1287
|
+
schema_tool(self)
|
|
1288
|
+
|
|
1289
|
+
schema = EditableSchema(self)
|
|
1290
|
+
schema.insert("task", "default", TaskSchemaTmp())
|
|
1291
|
+
|
|
1292
|
+
|
|
1293
|
+
class TaskSchemaTmp(TaskSchema):
|
|
1294
|
+
def __init__(self):
|
|
1295
|
+
super().__init__()
|
|
870
1296
|
|
|
871
1297
|
def __module_func(self, name, modules):
|
|
872
1298
|
for module in modules:
|
|
@@ -877,103 +1303,101 @@ class ToolSchemaTmp(ToolSchema):
|
|
|
877
1303
|
|
|
878
1304
|
def __tool_task_modules(self):
|
|
879
1305
|
step, index = self.node()
|
|
880
|
-
flow = self.
|
|
1306
|
+
flow = self._TaskSchema__chip.get('option', 'flow')
|
|
881
1307
|
return \
|
|
882
|
-
self.
|
|
883
|
-
self.
|
|
1308
|
+
self._TaskSchema__chip._get_tool_module(step, index, flow=flow), \
|
|
1309
|
+
self._TaskSchema__chip._get_task_module(step, index, flow=flow)
|
|
1310
|
+
|
|
1311
|
+
@contextlib.contextmanager
|
|
1312
|
+
def __in_step_index(self):
|
|
1313
|
+
prev_step, prev_index = self._TaskSchema__chip.get('arg', 'step'), \
|
|
1314
|
+
self._TaskSchema__chip.get('arg', 'index')
|
|
1315
|
+
step, index = self.node()
|
|
1316
|
+
self._TaskSchema__chip.set('arg', 'step', step)
|
|
1317
|
+
self._TaskSchema__chip.set('arg', 'index', index)
|
|
1318
|
+
yield
|
|
1319
|
+
self._TaskSchema__chip.set('arg', 'step', prev_step)
|
|
1320
|
+
self._TaskSchema__chip.set('arg', 'index', prev_index)
|
|
1321
|
+
|
|
1322
|
+
def tool(self):
|
|
1323
|
+
return self.schema("flow").get(*self.node(), 'tool')
|
|
1324
|
+
|
|
1325
|
+
def task(self):
|
|
1326
|
+
return self.schema("flow").get(*self.node(), 'task')
|
|
1327
|
+
|
|
1328
|
+
def get_exe(self):
|
|
1329
|
+
if self.tool() == "execute" and self.task() == "exec_input":
|
|
1330
|
+
return self.schema("tool").get("exe")
|
|
1331
|
+
return super().get_exe()
|
|
1332
|
+
|
|
1333
|
+
def schema(self, type=None):
|
|
1334
|
+
if type is None:
|
|
1335
|
+
return self._TaskSchema__chip
|
|
1336
|
+
return super().schema(type)
|
|
884
1337
|
|
|
885
1338
|
def get_output_files(self):
|
|
886
1339
|
_, task = self.__tool_task_modules()
|
|
887
1340
|
method = self.__module_func("_gather_outputs", [task])
|
|
888
1341
|
if method:
|
|
889
|
-
return method(self.
|
|
890
|
-
return
|
|
1342
|
+
return method(self._TaskSchema__chip, *self.node())
|
|
1343
|
+
return TaskSchema.get_output_files(self)
|
|
891
1344
|
|
|
892
1345
|
def parse_version(self, stdout):
|
|
893
1346
|
tool, _ = self.__tool_task_modules()
|
|
894
1347
|
method = self.__module_func("parse_version", [tool])
|
|
895
1348
|
if method:
|
|
896
1349
|
return method(stdout)
|
|
897
|
-
return
|
|
1350
|
+
return TaskSchema.parse_version(self, stdout)
|
|
898
1351
|
|
|
899
1352
|
def normalize_version(self, version):
|
|
900
1353
|
tool, _ = self.__tool_task_modules()
|
|
901
1354
|
method = self.__module_func("normalize_version", [tool])
|
|
902
1355
|
if method:
|
|
903
1356
|
return method(version)
|
|
904
|
-
return
|
|
1357
|
+
return TaskSchema.normalize_version(self, version)
|
|
905
1358
|
|
|
906
1359
|
def generate_replay_script(self, filepath, workdir, include_path=True):
|
|
907
|
-
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
self._ToolSchema__chip.set('arg', 'step', step)
|
|
911
|
-
self._ToolSchema__chip.set('arg', 'index', index)
|
|
912
|
-
ret = ToolSchema.generate_replay_script(self, filepath, workdir, include_path=include_path)
|
|
913
|
-
self._ToolSchema__chip.set('arg', 'step', prev_step)
|
|
914
|
-
self._ToolSchema__chip.set('arg', 'index', prev_index)
|
|
1360
|
+
with self.__in_step_index():
|
|
1361
|
+
ret = TaskSchema.generate_replay_script(self, filepath, workdir,
|
|
1362
|
+
include_path=include_path)
|
|
915
1363
|
return ret
|
|
916
1364
|
|
|
917
1365
|
def setup(self):
|
|
918
1366
|
_, task = self.__tool_task_modules()
|
|
919
1367
|
method = self.__module_func("setup", [task])
|
|
920
1368
|
if method:
|
|
921
|
-
|
|
922
|
-
self.
|
|
923
|
-
step, index = self.node()
|
|
924
|
-
self._ToolSchema__chip.set('arg', 'step', step)
|
|
925
|
-
self._ToolSchema__chip.set('arg', 'index', index)
|
|
926
|
-
ret = method(self._ToolSchema__chip)
|
|
927
|
-
self._ToolSchema__chip.set('arg', 'step', prev_step)
|
|
928
|
-
self._ToolSchema__chip.set('arg', 'index', prev_index)
|
|
1369
|
+
with self.__in_step_index():
|
|
1370
|
+
ret = method(self._TaskSchema__chip)
|
|
929
1371
|
return ret
|
|
930
|
-
return
|
|
1372
|
+
return TaskSchema.setup(self)
|
|
931
1373
|
|
|
932
1374
|
def select_input_nodes(self):
|
|
933
1375
|
_, task = self.__tool_task_modules()
|
|
934
1376
|
method = self.__module_func("_select_inputs", [task])
|
|
935
1377
|
if method:
|
|
936
|
-
|
|
937
|
-
self.
|
|
938
|
-
step, index = self.node()
|
|
939
|
-
self._ToolSchema__chip.set('arg', 'step', step)
|
|
940
|
-
self._ToolSchema__chip.set('arg', 'index', index)
|
|
941
|
-
ret = method(self._ToolSchema__chip, *self.node())
|
|
942
|
-
self._ToolSchema__chip.set('arg', 'step', prev_step)
|
|
943
|
-
self._ToolSchema__chip.set('arg', 'index', prev_index)
|
|
1378
|
+
with self.__in_step_index():
|
|
1379
|
+
ret = method(self._TaskSchema__chip, *self.node())
|
|
944
1380
|
return ret
|
|
945
|
-
return
|
|
1381
|
+
return TaskSchema.select_input_nodes(self)
|
|
946
1382
|
|
|
947
1383
|
def pre_process(self):
|
|
948
1384
|
_, task = self.__tool_task_modules()
|
|
949
1385
|
method = self.__module_func("pre_process", [task])
|
|
950
1386
|
if method:
|
|
951
|
-
|
|
952
|
-
self.
|
|
953
|
-
step, index = self.node()
|
|
954
|
-
self._ToolSchema__chip.set('arg', 'step', step)
|
|
955
|
-
self._ToolSchema__chip.set('arg', 'index', index)
|
|
956
|
-
ret = method(self._ToolSchema__chip)
|
|
957
|
-
self._ToolSchema__chip.set('arg', 'step', prev_step)
|
|
958
|
-
self._ToolSchema__chip.set('arg', 'index', prev_index)
|
|
1387
|
+
with self.__in_step_index():
|
|
1388
|
+
ret = method(self._TaskSchema__chip)
|
|
959
1389
|
return ret
|
|
960
|
-
return
|
|
1390
|
+
return TaskSchema.pre_process(self)
|
|
961
1391
|
|
|
962
1392
|
def runtime_options(self):
|
|
963
1393
|
tool, task = self.__tool_task_modules()
|
|
964
1394
|
method = self.__module_func("runtime_options", [task, tool])
|
|
965
1395
|
if method:
|
|
966
|
-
|
|
967
|
-
|
|
968
|
-
|
|
969
|
-
self._ToolSchema__chip.set('arg', 'step', step)
|
|
970
|
-
self._ToolSchema__chip.set('arg', 'index', index)
|
|
971
|
-
ret = ToolSchema.runtime_options(self)
|
|
972
|
-
ret.extend(method(self._ToolSchema__chip))
|
|
973
|
-
self._ToolSchema__chip.set('arg', 'step', prev_step)
|
|
974
|
-
self._ToolSchema__chip.set('arg', 'index', prev_index)
|
|
1396
|
+
with self.__in_step_index():
|
|
1397
|
+
ret = TaskSchema.runtime_options(self)
|
|
1398
|
+
ret.extend(method(self._TaskSchema__chip))
|
|
975
1399
|
return ret
|
|
976
|
-
return
|
|
1400
|
+
return TaskSchema.runtime_options(self)
|
|
977
1401
|
|
|
978
1402
|
def run(self):
|
|
979
1403
|
_, task = self.__tool_task_modules()
|
|
@@ -981,38 +1405,26 @@ class ToolSchemaTmp(ToolSchema):
|
|
|
981
1405
|
if method:
|
|
982
1406
|
# Handle logger stdout suppression if quiet
|
|
983
1407
|
step, index = self.node()
|
|
984
|
-
stdout_handler_level = self.
|
|
985
|
-
if self.
|
|
986
|
-
self.
|
|
1408
|
+
stdout_handler_level = self._TaskSchema__chip._logger_console.level
|
|
1409
|
+
if self._TaskSchema__chip.get('option', 'quiet', step=step, index=index):
|
|
1410
|
+
self._TaskSchema__chip._logger_console.setLevel(logging.CRITICAL)
|
|
987
1411
|
|
|
988
|
-
|
|
989
|
-
self.
|
|
990
|
-
step, index = self.node()
|
|
991
|
-
self._ToolSchema__chip.set('arg', 'step', step)
|
|
992
|
-
self._ToolSchema__chip.set('arg', 'index', index)
|
|
993
|
-
retcode = method(self._ToolSchema__chip)
|
|
994
|
-
self._ToolSchema__chip.set('arg', 'step', prev_step)
|
|
995
|
-
self._ToolSchema__chip.set('arg', 'index', prev_index)
|
|
1412
|
+
with self.__in_step_index():
|
|
1413
|
+
retcode = method(self._TaskSchema__chip)
|
|
996
1414
|
|
|
997
|
-
self.
|
|
1415
|
+
self._TaskSchema__chip._logger_console.setLevel(stdout_handler_level)
|
|
998
1416
|
|
|
999
1417
|
return retcode
|
|
1000
|
-
return
|
|
1418
|
+
return TaskSchema.run(self)
|
|
1001
1419
|
|
|
1002
1420
|
def post_process(self):
|
|
1003
1421
|
_, task = self.__tool_task_modules()
|
|
1004
1422
|
method = self.__module_func("post_process", [task])
|
|
1005
1423
|
if method:
|
|
1006
|
-
|
|
1007
|
-
self.
|
|
1008
|
-
step, index = self.node()
|
|
1009
|
-
self._ToolSchema__chip.set('arg', 'step', step)
|
|
1010
|
-
self._ToolSchema__chip.set('arg', 'index', index)
|
|
1011
|
-
ret = method(self._ToolSchema__chip)
|
|
1012
|
-
self._ToolSchema__chip.set('arg', 'step', prev_step)
|
|
1013
|
-
self._ToolSchema__chip.set('arg', 'index', prev_index)
|
|
1424
|
+
with self.__in_step_index():
|
|
1425
|
+
ret = method(self._TaskSchema__chip)
|
|
1014
1426
|
return ret
|
|
1015
|
-
return
|
|
1427
|
+
return TaskSchema.post_process(self)
|
|
1016
1428
|
|
|
1017
1429
|
|
|
1018
1430
|
###########################################################################
|