siliconcompiler 0.32.3__py3-none-any.whl → 0.33.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- siliconcompiler/__init__.py +19 -2
- siliconcompiler/_common.py +5 -0
- siliconcompiler/_metadata.py +1 -1
- siliconcompiler/apps/sc.py +2 -2
- siliconcompiler/apps/sc_install.py +10 -3
- siliconcompiler/apps/sc_issue.py +1 -1
- siliconcompiler/apps/sc_remote.py +10 -5
- siliconcompiler/apps/sc_show.py +2 -2
- siliconcompiler/apps/utils/replay.py +5 -3
- siliconcompiler/asic.py +120 -0
- siliconcompiler/checklist.py +150 -0
- siliconcompiler/core.py +299 -299
- siliconcompiler/flowgraph.py +803 -515
- siliconcompiler/fpga.py +84 -0
- siliconcompiler/metric.py +479 -0
- siliconcompiler/optimizer/vizier.py +2 -3
- siliconcompiler/package/__init__.py +29 -6
- siliconcompiler/pdk.py +415 -0
- siliconcompiler/record.py +453 -0
- siliconcompiler/remote/client.py +15 -5
- siliconcompiler/remote/schema.py +116 -112
- siliconcompiler/remote/server.py +9 -6
- siliconcompiler/report/dashboard/cli/__init__.py +14 -721
- siliconcompiler/report/dashboard/cli/board.py +899 -0
- siliconcompiler/report/dashboard/web/__init__.py +10 -10
- siliconcompiler/report/dashboard/web/components/__init__.py +5 -4
- siliconcompiler/report/dashboard/web/components/flowgraph.py +3 -3
- siliconcompiler/report/dashboard/web/components/graph.py +6 -3
- siliconcompiler/report/dashboard/web/state.py +1 -1
- siliconcompiler/report/dashboard/web/utils/__init__.py +4 -3
- siliconcompiler/report/html_report.py +2 -3
- siliconcompiler/report/report.py +22 -11
- siliconcompiler/report/summary_image.py +1 -1
- siliconcompiler/report/summary_table.py +3 -3
- siliconcompiler/report/utils.py +21 -14
- siliconcompiler/scheduler/__init__.py +234 -1206
- siliconcompiler/scheduler/run_node.py +2 -1
- siliconcompiler/scheduler/send_messages.py +11 -5
- siliconcompiler/scheduler/slurm.py +11 -44
- siliconcompiler/scheduler/taskscheduler.py +320 -0
- siliconcompiler/schema/__init__.py +19 -2
- siliconcompiler/schema/baseschema.py +493 -0
- siliconcompiler/schema/cmdlineschema.py +250 -0
- siliconcompiler/{sphinx_ext → schema/docs}/__init__.py +3 -1
- siliconcompiler/{sphinx_ext → schema/docs}/dynamicgen.py +63 -81
- siliconcompiler/{sphinx_ext → schema/docs}/schemagen.py +73 -85
- siliconcompiler/{sphinx_ext → schema/docs}/utils.py +12 -13
- siliconcompiler/schema/editableschema.py +136 -0
- siliconcompiler/schema/journalingschema.py +238 -0
- siliconcompiler/schema/namedschema.py +41 -0
- siliconcompiler/schema/packageschema.py +101 -0
- siliconcompiler/schema/parameter.py +791 -0
- siliconcompiler/schema/parametertype.py +323 -0
- siliconcompiler/schema/parametervalue.py +736 -0
- siliconcompiler/schema/safeschema.py +37 -0
- siliconcompiler/schema/schema_cfg.py +109 -1789
- siliconcompiler/schema/utils.py +5 -68
- siliconcompiler/schema_obj.py +119 -0
- siliconcompiler/tool.py +1416 -0
- siliconcompiler/tools/_common/__init__.py +6 -10
- siliconcompiler/tools/_common/asic.py +5 -5
- siliconcompiler/tools/_common/sdc/sc_constraints.sdc +1 -1
- siliconcompiler/tools/bluespec/convert.py +9 -8
- siliconcompiler/tools/builtin/_common.py +9 -2
- siliconcompiler/tools/builtin/concatenate.py +7 -3
- siliconcompiler/tools/builtin/minimum.py +7 -2
- siliconcompiler/tools/builtin/mux.py +8 -2
- siliconcompiler/tools/builtin/nop.py +7 -2
- siliconcompiler/tools/builtin/verify.py +11 -5
- siliconcompiler/tools/chisel/convert.py +10 -10
- siliconcompiler/tools/genfasm/bitstream.py +3 -3
- siliconcompiler/tools/ghdl/convert.py +1 -1
- siliconcompiler/tools/icarus/compile.py +4 -4
- siliconcompiler/tools/icepack/bitstream.py +6 -1
- siliconcompiler/tools/klayout/convert_drc_db.py +5 -0
- siliconcompiler/tools/klayout/drc.py +2 -2
- siliconcompiler/tools/klayout/klayout_export.py +0 -1
- siliconcompiler/tools/klayout/klayout_show.py +6 -6
- siliconcompiler/tools/klayout/klayout_utils.py +15 -22
- siliconcompiler/tools/netgen/count_lvs.py +2 -2
- siliconcompiler/tools/netgen/lvs.py +1 -1
- siliconcompiler/tools/nextpnr/apr.py +6 -1
- siliconcompiler/tools/nextpnr/nextpnr.py +4 -4
- siliconcompiler/tools/openroad/_apr.py +15 -2
- siliconcompiler/tools/openroad/rdlroute.py +3 -3
- siliconcompiler/tools/openroad/scripts/apr/postamble.tcl +1 -1
- siliconcompiler/tools/openroad/scripts/apr/preamble.tcl +5 -5
- siliconcompiler/tools/openroad/scripts/apr/sc_antenna_repair.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_clock_tree_synthesis.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_detailed_placement.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_detailed_route.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_endcap_tapcell_insertion.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_fillercell_insertion.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_fillmetal_insertion.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_global_placement.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_global_route.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_init_floorplan.tcl +3 -9
- siliconcompiler/tools/openroad/scripts/apr/sc_macro_placement.tcl +3 -3
- siliconcompiler/tools/openroad/scripts/apr/sc_metrics.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_pin_placement.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_power_grid.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_repair_design.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_repair_timing.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_write_data.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/common/procs.tcl +75 -1
- siliconcompiler/tools/openroad/scripts/common/read_input_files.tcl +1 -7
- siliconcompiler/tools/openroad/scripts/common/screenshot.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/common/write_images.tcl +28 -3
- siliconcompiler/tools/openroad/scripts/sc_rcx.tcl +1 -1
- siliconcompiler/tools/openroad/scripts/sc_rdlroute.tcl +3 -3
- siliconcompiler/tools/openroad/scripts/sc_show.tcl +6 -6
- siliconcompiler/tools/opensta/scripts/sc_timing.tcl +10 -0
- siliconcompiler/tools/opensta/timing.py +11 -0
- siliconcompiler/tools/slang/__init__.py +13 -13
- siliconcompiler/tools/slang/elaborate.py +6 -6
- siliconcompiler/tools/slang/lint.py +1 -3
- siliconcompiler/tools/surelog/parse.py +4 -4
- siliconcompiler/tools/sv2v/convert.py +20 -3
- siliconcompiler/tools/verilator/compile.py +2 -2
- siliconcompiler/tools/verilator/verilator.py +3 -3
- siliconcompiler/tools/vpr/_xml_constraint.py +8 -8
- siliconcompiler/tools/vpr/place.py +1 -1
- siliconcompiler/tools/vpr/route.py +4 -4
- siliconcompiler/tools/vpr/screenshot.py +1 -1
- siliconcompiler/tools/vpr/show.py +5 -5
- siliconcompiler/tools/vpr/vpr.py +24 -24
- siliconcompiler/tools/xdm/convert.py +2 -2
- siliconcompiler/tools/xyce/simulate.py +1 -1
- siliconcompiler/tools/yosys/prepareLib.py +2 -2
- siliconcompiler/tools/yosys/sc_synth_asic.tcl +111 -63
- siliconcompiler/tools/yosys/screenshot.py +1 -1
- siliconcompiler/tools/yosys/syn_asic.py +7 -7
- siliconcompiler/toolscripts/_tools.json +12 -10
- siliconcompiler/toolscripts/rhel8/install-chisel.sh +9 -2
- siliconcompiler/toolscripts/rhel8/install-icarus.sh +10 -3
- siliconcompiler/toolscripts/rhel8/install-klayout.sh +8 -1
- siliconcompiler/toolscripts/rhel8/install-magic.sh +9 -2
- siliconcompiler/toolscripts/rhel8/install-montage.sh +1 -1
- siliconcompiler/toolscripts/rhel8/install-netgen.sh +9 -2
- siliconcompiler/toolscripts/rhel8/install-slang.sh +11 -4
- siliconcompiler/toolscripts/rhel8/install-surelog.sh +9 -2
- siliconcompiler/toolscripts/rhel8/install-sv2v.sh +11 -4
- siliconcompiler/toolscripts/rhel8/install-verible.sh +11 -3
- siliconcompiler/toolscripts/rhel8/install-verilator.sh +10 -3
- siliconcompiler/toolscripts/rhel8/install-xyce.sh +15 -10
- siliconcompiler/toolscripts/rhel9/install-chisel.sh +9 -2
- siliconcompiler/toolscripts/rhel9/install-ghdl.sh +9 -2
- siliconcompiler/toolscripts/rhel9/install-gtkwave.sh +10 -3
- siliconcompiler/toolscripts/rhel9/install-icarus.sh +10 -3
- siliconcompiler/toolscripts/rhel9/install-klayout.sh +8 -1
- siliconcompiler/toolscripts/rhel9/install-magic.sh +9 -2
- siliconcompiler/toolscripts/rhel9/install-montage.sh +1 -1
- siliconcompiler/toolscripts/rhel9/install-netgen.sh +9 -2
- siliconcompiler/toolscripts/rhel9/install-openroad.sh +16 -3
- siliconcompiler/toolscripts/rhel9/install-opensta.sh +17 -5
- siliconcompiler/toolscripts/rhel9/install-slang.sh +11 -4
- siliconcompiler/toolscripts/rhel9/install-surelog.sh +9 -2
- siliconcompiler/toolscripts/rhel9/install-sv2v.sh +11 -4
- siliconcompiler/toolscripts/rhel9/install-verible.sh +11 -3
- siliconcompiler/toolscripts/rhel9/install-verilator.sh +10 -3
- siliconcompiler/toolscripts/rhel9/install-vpr.sh +9 -2
- siliconcompiler/toolscripts/rhel9/install-xdm.sh +10 -2
- siliconcompiler/toolscripts/rhel9/install-xyce.sh +15 -10
- siliconcompiler/toolscripts/rhel9/install-yosys-moosic.sh +9 -2
- siliconcompiler/toolscripts/rhel9/install-yosys-parmys.sh +10 -3
- siliconcompiler/toolscripts/rhel9/install-yosys-slang.sh +10 -2
- siliconcompiler/toolscripts/rhel9/install-yosys.sh +9 -2
- siliconcompiler/toolscripts/ubuntu20/install-bambu.sh +10 -2
- siliconcompiler/toolscripts/ubuntu20/install-bluespec.sh +10 -3
- siliconcompiler/toolscripts/ubuntu20/install-chisel.sh +9 -2
- siliconcompiler/toolscripts/ubuntu20/install-ghdl.sh +9 -2
- siliconcompiler/toolscripts/ubuntu20/install-gtkwave.sh +9 -2
- siliconcompiler/toolscripts/ubuntu20/install-icarus.sh +9 -2
- siliconcompiler/toolscripts/ubuntu20/install-icepack.sh +9 -2
- siliconcompiler/toolscripts/ubuntu20/install-klayout.sh +8 -1
- siliconcompiler/toolscripts/ubuntu20/install-magic.sh +9 -2
- siliconcompiler/toolscripts/ubuntu20/install-montage.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-netgen.sh +9 -2
- siliconcompiler/toolscripts/ubuntu20/install-nextpnr.sh +9 -2
- siliconcompiler/toolscripts/ubuntu20/install-openroad.sh +16 -3
- siliconcompiler/toolscripts/ubuntu20/install-opensta.sh +16 -5
- siliconcompiler/toolscripts/ubuntu20/install-slang.sh +11 -4
- siliconcompiler/toolscripts/ubuntu20/install-slurm.sh +9 -2
- siliconcompiler/toolscripts/ubuntu20/install-surelog.sh +10 -2
- siliconcompiler/toolscripts/ubuntu20/install-sv2v.sh +11 -4
- siliconcompiler/toolscripts/ubuntu20/install-verible.sh +11 -3
- siliconcompiler/toolscripts/ubuntu20/install-verilator.sh +9 -2
- siliconcompiler/toolscripts/ubuntu20/install-xdm.sh +10 -2
- siliconcompiler/toolscripts/ubuntu20/install-xyce.sh +13 -8
- siliconcompiler/toolscripts/ubuntu20/install-yosys-moosic.sh +9 -2
- siliconcompiler/toolscripts/ubuntu20/install-yosys.sh +9 -2
- siliconcompiler/toolscripts/ubuntu22/install-bambu.sh +10 -2
- siliconcompiler/toolscripts/ubuntu22/install-bluespec.sh +10 -3
- siliconcompiler/toolscripts/ubuntu22/install-chisel.sh +9 -2
- siliconcompiler/toolscripts/ubuntu22/install-ghdl.sh +9 -2
- siliconcompiler/toolscripts/ubuntu22/install-gtkwave.sh +9 -2
- siliconcompiler/toolscripts/ubuntu22/install-icarus.sh +9 -2
- siliconcompiler/toolscripts/ubuntu22/install-icepack.sh +9 -2
- siliconcompiler/toolscripts/ubuntu22/install-klayout.sh +8 -1
- siliconcompiler/toolscripts/ubuntu22/install-magic.sh +9 -2
- siliconcompiler/toolscripts/ubuntu22/install-montage.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-netgen.sh +9 -2
- siliconcompiler/toolscripts/ubuntu22/install-nextpnr.sh +9 -2
- siliconcompiler/toolscripts/ubuntu22/install-openroad.sh +16 -3
- siliconcompiler/toolscripts/ubuntu22/install-opensta.sh +17 -5
- siliconcompiler/toolscripts/ubuntu22/install-slang.sh +11 -4
- siliconcompiler/toolscripts/ubuntu22/install-slurm.sh +9 -2
- siliconcompiler/toolscripts/ubuntu22/install-surelog.sh +10 -2
- siliconcompiler/toolscripts/ubuntu22/install-sv2v.sh +11 -4
- siliconcompiler/toolscripts/ubuntu22/install-verible.sh +11 -3
- siliconcompiler/toolscripts/ubuntu22/install-verilator.sh +9 -2
- siliconcompiler/toolscripts/ubuntu22/install-vpr.sh +9 -4
- siliconcompiler/toolscripts/ubuntu22/install-xdm.sh +10 -2
- siliconcompiler/toolscripts/ubuntu22/install-xyce.sh +13 -8
- siliconcompiler/toolscripts/ubuntu22/install-yosys-moosic.sh +9 -2
- siliconcompiler/toolscripts/ubuntu22/install-yosys-parmys.sh +10 -3
- siliconcompiler/toolscripts/ubuntu22/install-yosys-slang.sh +10 -2
- siliconcompiler/toolscripts/ubuntu22/install-yosys.sh +9 -2
- siliconcompiler/toolscripts/ubuntu24/install-bambu.sh +12 -4
- siliconcompiler/toolscripts/ubuntu24/install-bluespec.sh +10 -3
- siliconcompiler/toolscripts/ubuntu24/install-chisel.sh +9 -2
- siliconcompiler/toolscripts/ubuntu24/install-ghdl.sh +9 -2
- siliconcompiler/toolscripts/ubuntu24/install-gtkwave.sh +9 -2
- siliconcompiler/toolscripts/ubuntu24/install-icarus.sh +9 -2
- siliconcompiler/toolscripts/ubuntu24/install-icepack.sh +9 -2
- siliconcompiler/toolscripts/ubuntu24/install-klayout.sh +8 -1
- siliconcompiler/toolscripts/ubuntu24/install-magic.sh +9 -2
- siliconcompiler/toolscripts/ubuntu24/install-montage.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-netgen.sh +9 -2
- siliconcompiler/toolscripts/ubuntu24/install-nextpnr.sh +9 -2
- siliconcompiler/toolscripts/ubuntu24/install-openroad.sh +16 -3
- siliconcompiler/toolscripts/ubuntu24/install-opensta.sh +17 -5
- siliconcompiler/toolscripts/ubuntu24/install-slang.sh +11 -4
- siliconcompiler/toolscripts/ubuntu24/install-slurm.sh +9 -2
- siliconcompiler/toolscripts/ubuntu24/install-surelog.sh +10 -2
- siliconcompiler/toolscripts/ubuntu24/install-sv2v.sh +11 -4
- siliconcompiler/toolscripts/ubuntu24/install-verible.sh +11 -3
- siliconcompiler/toolscripts/ubuntu24/install-verilator.sh +9 -2
- siliconcompiler/toolscripts/ubuntu24/install-vpr.sh +9 -4
- siliconcompiler/toolscripts/ubuntu24/install-xdm.sh +10 -2
- siliconcompiler/toolscripts/ubuntu24/install-xyce.sh +13 -8
- siliconcompiler/toolscripts/ubuntu24/install-yosys-moosic.sh +9 -2
- siliconcompiler/toolscripts/ubuntu24/install-yosys-parmys.sh +10 -3
- siliconcompiler/toolscripts/ubuntu24/install-yosys-slang.sh +10 -2
- siliconcompiler/toolscripts/ubuntu24/install-yosys.sh +9 -2
- siliconcompiler/utils/__init__.py +19 -112
- siliconcompiler/utils/flowgraph.py +244 -0
- siliconcompiler/{issue.py → utils/issue.py} +18 -25
- siliconcompiler/utils/logging.py +3 -4
- {siliconcompiler-0.32.3.dist-info → siliconcompiler-0.33.1.dist-info}/METADATA +9 -8
- siliconcompiler-0.33.1.dist-info/RECORD +488 -0
- {siliconcompiler-0.32.3.dist-info → siliconcompiler-0.33.1.dist-info}/WHEEL +1 -1
- {siliconcompiler-0.32.3.dist-info → siliconcompiler-0.33.1.dist-info}/entry_points.txt +8 -8
- siliconcompiler/schema/schema_obj.py +0 -1936
- siliconcompiler/toolscripts/ubuntu20/install-vpr.sh +0 -29
- siliconcompiler/toolscripts/ubuntu20/install-yosys-parmys.sh +0 -61
- siliconcompiler-0.32.3.dist-info/RECORD +0 -470
- /siliconcompiler/{templates → data/templates}/__init__.py +0 -0
- /siliconcompiler/{templates → data/templates}/email/__init__.py +0 -0
- /siliconcompiler/{templates → data/templates}/email/general.j2 +0 -0
- /siliconcompiler/{templates → data/templates}/email/summary.j2 +0 -0
- /siliconcompiler/{templates → data/templates}/issue/README.txt +0 -0
- /siliconcompiler/{templates → data/templates}/issue/__init__.py +0 -0
- /siliconcompiler/{templates → data/templates}/issue/run.sh +0 -0
- /siliconcompiler/{templates → data/templates}/replay/replay.py.j2 +0 -0
- /siliconcompiler/{templates → data/templates}/replay/replay.sh.j2 +0 -0
- /siliconcompiler/{templates → data/templates}/replay/requirements.txt +0 -0
- /siliconcompiler/{templates → data/templates}/replay/setup.sh +0 -0
- /siliconcompiler/{templates → data/templates}/report/__init__.py +0 -0
- /siliconcompiler/{templates → data/templates}/report/bootstrap.min.css +0 -0
- /siliconcompiler/{templates → data/templates}/report/bootstrap.min.js +0 -0
- /siliconcompiler/{templates → data/templates}/report/bootstrap_LICENSE.md +0 -0
- /siliconcompiler/{templates → data/templates}/report/sc_report.j2 +0 -0
- /siliconcompiler/{templates → data/templates}/slurm/__init__.py +0 -0
- /siliconcompiler/{templates → data/templates}/slurm/run.sh +0 -0
- /siliconcompiler/{templates → data/templates}/tcl/__init__.py +0 -0
- /siliconcompiler/{templates → data/templates}/tcl/manifest.tcl.j2 +0 -0
- /siliconcompiler/{units.py → utils/units.py} +0 -0
- {siliconcompiler-0.32.3.dist-info → siliconcompiler-0.33.1.dist-info}/licenses/LICENSE +0 -0
- {siliconcompiler-0.32.3.dist-info → siliconcompiler-0.33.1.dist-info}/top_level.txt +0 -0
siliconcompiler/core.py
CHANGED
|
@@ -11,13 +11,16 @@ import logging
|
|
|
11
11
|
import hashlib
|
|
12
12
|
import shutil
|
|
13
13
|
import importlib
|
|
14
|
-
import inspect
|
|
15
14
|
import textwrap
|
|
16
15
|
import graphviz
|
|
17
16
|
import codecs
|
|
18
|
-
import
|
|
17
|
+
import csv
|
|
18
|
+
import yaml
|
|
19
19
|
from inspect import getfullargspec
|
|
20
|
-
from siliconcompiler
|
|
20
|
+
from siliconcompiler import Schema
|
|
21
|
+
from siliconcompiler.schema import SCHEMA_VERSION, PerNode, JournalingSchema, EditableSchema
|
|
22
|
+
from siliconcompiler.schema.parametertype import NodeType
|
|
23
|
+
from siliconcompiler.schema.parametervalue import FileNodeValue, PathNodeValue
|
|
21
24
|
from siliconcompiler.schema import utils as schema_utils
|
|
22
25
|
from siliconcompiler import utils
|
|
23
26
|
from siliconcompiler.utils.logging import SCColorLoggerFormatter, \
|
|
@@ -34,12 +37,10 @@ from siliconcompiler.report.dashboard import DashboardType
|
|
|
34
37
|
from siliconcompiler import package as sc_package
|
|
35
38
|
import glob
|
|
36
39
|
from siliconcompiler.scheduler import run as sc_runner
|
|
37
|
-
from siliconcompiler.flowgraph import
|
|
38
|
-
_get_pruned_node_inputs, _get_flowgraph_exit_nodes, get_executed_nodes, \
|
|
39
|
-
_get_flowgraph_execution_order, _check_flowgraph_io, \
|
|
40
|
-
_get_flowgraph_information
|
|
40
|
+
from siliconcompiler.utils.flowgraph import _check_flowgraph_io, _get_flowgraph_information
|
|
41
41
|
from siliconcompiler.tools._common import get_tool_task
|
|
42
42
|
from types import FunctionType, ModuleType
|
|
43
|
+
from siliconcompiler.flowgraph import RuntimeFlowgraph
|
|
43
44
|
|
|
44
45
|
|
|
45
46
|
class Chip:
|
|
@@ -388,7 +389,7 @@ class Chip:
|
|
|
388
389
|
|
|
389
390
|
is_list = '[' in paramtype
|
|
390
391
|
|
|
391
|
-
for vals, step, index in self.schema.
|
|
392
|
+
for vals, step, index in self.schema.get(*key, field=None).getvalues():
|
|
392
393
|
if not vals:
|
|
393
394
|
continue
|
|
394
395
|
if not self.get(*key, field='pernode').is_never():
|
|
@@ -397,9 +398,10 @@ class Chip:
|
|
|
397
398
|
if index is None:
|
|
398
399
|
index = Schema.GLOBAL_KEY
|
|
399
400
|
|
|
401
|
+
packages = self.get(*key, field='package', step=step, index=index)
|
|
400
402
|
if not is_list:
|
|
401
403
|
vals = [vals]
|
|
402
|
-
|
|
404
|
+
packages = [packages]
|
|
403
405
|
if len(packages) == len(vals):
|
|
404
406
|
continue
|
|
405
407
|
|
|
@@ -476,7 +478,6 @@ class Chip:
|
|
|
476
478
|
progname=progname,
|
|
477
479
|
description=description,
|
|
478
480
|
switchlist=switchlist,
|
|
479
|
-
input_map=input_map,
|
|
480
481
|
additional_args=additional_args,
|
|
481
482
|
version=_metadata.version,
|
|
482
483
|
print_banner=print_banner,
|
|
@@ -659,11 +660,10 @@ class Chip:
|
|
|
659
660
|
|
|
660
661
|
elif isinstance(use_module, (Library, Chip)):
|
|
661
662
|
self._loaded_modules['libs'].append(use_module.design)
|
|
662
|
-
cfg = use_module.schema.cfg
|
|
663
663
|
keep_inputs = True
|
|
664
664
|
if not isinstance(use_module, Library):
|
|
665
665
|
keep_inputs = False
|
|
666
|
-
self.__import_library(use_module.design,
|
|
666
|
+
self.__import_library(use_module.design, use_module,
|
|
667
667
|
keep_input=keep_inputs)
|
|
668
668
|
|
|
669
669
|
is_auto_enable = getattr(use_module, 'is_auto_enable', None)
|
|
@@ -677,26 +677,13 @@ class Chip:
|
|
|
677
677
|
raise ValueError(f"{module_name} returned an object with an "
|
|
678
678
|
f"unsupported type: {class_name}")
|
|
679
679
|
|
|
680
|
-
def __import_data_sources(self,
|
|
681
|
-
if 'package'
|
|
680
|
+
def __import_data_sources(self, schema):
|
|
681
|
+
if not schema.valid('package', 'source'):
|
|
682
682
|
return
|
|
683
683
|
|
|
684
|
-
for source
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
if 'path' not in config or \
|
|
689
|
-
Schema.GLOBAL_KEY not in config['path']['node'] or \
|
|
690
|
-
Schema.GLOBAL_KEY not in config['path']['node'][Schema.GLOBAL_KEY]:
|
|
691
|
-
continue
|
|
692
|
-
|
|
693
|
-
path = config['path']['node'][Schema.GLOBAL_KEY][Schema.GLOBAL_KEY]['value']
|
|
694
|
-
|
|
695
|
-
ref = None
|
|
696
|
-
if 'ref' in config and \
|
|
697
|
-
Schema.GLOBAL_KEY in config['ref']['node'] and \
|
|
698
|
-
Schema.GLOBAL_KEY in config['ref']['node'][Schema.GLOBAL_KEY]:
|
|
699
|
-
ref = config['ref']['node'][Schema.GLOBAL_KEY][Schema.GLOBAL_KEY]['value']
|
|
684
|
+
for source in schema.getkeys('package', 'source'):
|
|
685
|
+
path = schema.get('package', 'source', source, 'path')
|
|
686
|
+
ref = schema.get('package', 'source', source, 'ref')
|
|
700
687
|
|
|
701
688
|
self.register_source(
|
|
702
689
|
name=source,
|
|
@@ -714,15 +701,24 @@ class Chip:
|
|
|
714
701
|
|
|
715
702
|
importname = module.design
|
|
716
703
|
|
|
717
|
-
|
|
704
|
+
if hasattr(module, 'schema'):
|
|
705
|
+
module = module.schema
|
|
718
706
|
|
|
719
|
-
if importname
|
|
707
|
+
if self.valid(group, importname):
|
|
720
708
|
self.logger.warning(f'Overwriting existing {group} {importname}')
|
|
721
|
-
del src_cfg[importname]
|
|
722
709
|
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
710
|
+
try:
|
|
711
|
+
insert_schema = EditableSchema(module).search(group, importname)
|
|
712
|
+
except KeyError:
|
|
713
|
+
self.logger.warning(f'{group} {importname} is not valid')
|
|
714
|
+
return
|
|
715
|
+
|
|
716
|
+
EditableSchema(self.schema).insert(
|
|
717
|
+
group,
|
|
718
|
+
importname,
|
|
719
|
+
insert_schema,
|
|
720
|
+
clobber=True)
|
|
721
|
+
self.__import_data_sources(module)
|
|
726
722
|
|
|
727
723
|
###########################################################################
|
|
728
724
|
def help(self, *keypath):
|
|
@@ -745,13 +741,15 @@ class Chip:
|
|
|
745
741
|
|
|
746
742
|
# Fetch Values
|
|
747
743
|
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
744
|
+
param = self.get(*keypath, field=None)
|
|
745
|
+
|
|
746
|
+
description = param.get(field='shorthelp')
|
|
747
|
+
typestr = param.get(field='type')
|
|
748
|
+
switchstr = str(param.get(field='switch'))
|
|
749
|
+
defstr = str(param.default.get())
|
|
750
|
+
requirement = str(param.get(field='require'))
|
|
751
|
+
helpstr = param.get(field='help')
|
|
752
|
+
example = param.get(field='example')
|
|
755
753
|
|
|
756
754
|
examplestr = ("\nExamples: " + example[0] + ''.join(
|
|
757
755
|
["\n " + ex for ex in example[1:]]))
|
|
@@ -809,9 +807,12 @@ class Chip:
|
|
|
809
807
|
>>> check = chip.valid('metric', 'foo', '0', 'tasktime', default_valid=True)
|
|
810
808
|
Returns True, even if "foo" and "0" aren't in current configuration.
|
|
811
809
|
"""
|
|
810
|
+
if job:
|
|
811
|
+
return self.schema.history(job).valid(*keypath,
|
|
812
|
+
default_valid=default_valid,
|
|
813
|
+
check_complete=check_complete)
|
|
812
814
|
return self.schema.valid(*keypath,
|
|
813
815
|
default_valid=default_valid,
|
|
814
|
-
job=job,
|
|
815
816
|
check_complete=check_complete)
|
|
816
817
|
|
|
817
818
|
###########################################################################
|
|
@@ -851,7 +852,7 @@ class Chip:
|
|
|
851
852
|
strict = self.schema.get('option', 'strict')
|
|
852
853
|
if field == 'value' and strict:
|
|
853
854
|
pernode = self.schema.get(*keypath, field='pernode')
|
|
854
|
-
if pernode ==
|
|
855
|
+
if pernode == PerNode.OPTIONAL and \
|
|
855
856
|
(step is None or index is None) and \
|
|
856
857
|
(Schema.GLOBAL_KEY not in (step, index)): # allow explicit access to global
|
|
857
858
|
self.error(
|
|
@@ -861,7 +862,10 @@ class Chip:
|
|
|
861
862
|
)
|
|
862
863
|
return None
|
|
863
864
|
|
|
864
|
-
|
|
865
|
+
if job:
|
|
866
|
+
return self.schema.history(job).get(*keypath, field=field, step=step, index=index)
|
|
867
|
+
|
|
868
|
+
return self.schema.get(*keypath, field=field, step=step, index=index)
|
|
865
869
|
except (ValueError, TypeError) as e:
|
|
866
870
|
self.error(str(e))
|
|
867
871
|
return None
|
|
@@ -894,7 +898,10 @@ class Chip:
|
|
|
894
898
|
self.logger.debug('Getting all schema parameter keys.')
|
|
895
899
|
|
|
896
900
|
try:
|
|
897
|
-
|
|
901
|
+
if job:
|
|
902
|
+
return self.schema.history(job).getkeys(*keypath)
|
|
903
|
+
|
|
904
|
+
return self.schema.getkeys(*keypath)
|
|
898
905
|
except (ValueError, TypeError) as e:
|
|
899
906
|
self.error(str(e))
|
|
900
907
|
return None
|
|
@@ -937,23 +944,17 @@ class Chip:
|
|
|
937
944
|
return None
|
|
938
945
|
|
|
939
946
|
###########################################################################
|
|
940
|
-
def __add_set_package(self,
|
|
941
|
-
|
|
942
|
-
|
|
943
|
-
|
|
944
|
-
|
|
945
|
-
|
|
946
|
-
|
|
947
|
-
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
|
|
951
|
-
if add:
|
|
952
|
-
self.schema.add(*keypath, package, field='package',
|
|
953
|
-
step=step, index=index)
|
|
954
|
-
else:
|
|
955
|
-
self.schema.set(*keypath, package, field='package',
|
|
956
|
-
step=step, index=index, clobber=clobber)
|
|
947
|
+
def __add_set_package(self, value_success, package):
|
|
948
|
+
if not isinstance(value_success, (list, tuple)):
|
|
949
|
+
value_success = [value_success]
|
|
950
|
+
if not isinstance(package, (list, tuple)):
|
|
951
|
+
package = [package]
|
|
952
|
+
if len(value_success) != len(package):
|
|
953
|
+
package = len(value_success) * package
|
|
954
|
+
|
|
955
|
+
for val, package in zip(value_success, package):
|
|
956
|
+
if val.type in ('file', 'dir'):
|
|
957
|
+
val.set(package, field='package')
|
|
957
958
|
|
|
958
959
|
###########################################################################
|
|
959
960
|
def set(self, *args, field='value', clobber=True, step=None, index=None, package=None):
|
|
@@ -999,8 +1000,8 @@ class Chip:
|
|
|
999
1000
|
try:
|
|
1000
1001
|
value_success = self.schema.set(*keypath, value, field=field, clobber=clobber,
|
|
1001
1002
|
step=step, index=index)
|
|
1002
|
-
if field == 'value' and value_success:
|
|
1003
|
-
self.__add_set_package(
|
|
1003
|
+
if field == 'value' and value_success and package:
|
|
1004
|
+
self.__add_set_package(value_success, package)
|
|
1004
1005
|
|
|
1005
1006
|
except (ValueError, TypeError) as e:
|
|
1006
1007
|
self.error(e)
|
|
@@ -1086,8 +1087,8 @@ class Chip:
|
|
|
1086
1087
|
try:
|
|
1087
1088
|
value_success = self.schema.add(*args, field=field, step=step, index=index)
|
|
1088
1089
|
|
|
1089
|
-
if field == 'value' and value_success:
|
|
1090
|
-
self.__add_set_package(
|
|
1090
|
+
if field == 'value' and value_success and package:
|
|
1091
|
+
self.__add_set_package(value_success, package)
|
|
1091
1092
|
except (ValueError, TypeError) as e:
|
|
1092
1093
|
self.error(str(e))
|
|
1093
1094
|
|
|
@@ -1109,8 +1110,10 @@ class Chip:
|
|
|
1109
1110
|
package_name = f'flist-{os.path.basename(filename)}'
|
|
1110
1111
|
package_dir = os.path.dirname(os.path.abspath(filename))
|
|
1111
1112
|
|
|
1113
|
+
env_vars = utils.get_env_vars(self, None, None)
|
|
1114
|
+
|
|
1112
1115
|
def __make_path(rel, path):
|
|
1113
|
-
path =
|
|
1116
|
+
path = PathNodeValue.resolve_env_vars(path, envvars=env_vars)
|
|
1114
1117
|
if os.path.isabs(path):
|
|
1115
1118
|
if path.startswith(rel):
|
|
1116
1119
|
return os.path.relpath(path, rel), package_name
|
|
@@ -1281,7 +1284,7 @@ class Chip:
|
|
|
1281
1284
|
"""
|
|
1282
1285
|
strict = self.get('option', 'strict')
|
|
1283
1286
|
pernode = self.get(*keypath, field='pernode')
|
|
1284
|
-
if strict and pernode ==
|
|
1287
|
+
if strict and pernode == PerNode.OPTIONAL and (step is None or index is None):
|
|
1285
1288
|
self.error(
|
|
1286
1289
|
f"Invalid args to find_files() of keypath {keypath}: step and "
|
|
1287
1290
|
"index are required for reading from this parameter while "
|
|
@@ -1313,7 +1316,9 @@ class Chip:
|
|
|
1313
1316
|
"""Internal find_files() that allows you to skip step/index for optional
|
|
1314
1317
|
params, regardless of [option, strict]."""
|
|
1315
1318
|
|
|
1316
|
-
|
|
1319
|
+
param = self.get(*keypath, field=None, job=job)
|
|
1320
|
+
|
|
1321
|
+
paramtype = param.get(field='type')
|
|
1317
1322
|
|
|
1318
1323
|
if 'file' not in paramtype and 'dir' not in paramtype:
|
|
1319
1324
|
self.error('Can only call find_files on file or dir types')
|
|
@@ -1321,15 +1326,15 @@ class Chip:
|
|
|
1321
1326
|
|
|
1322
1327
|
is_list = bool(re.match(r'\[', paramtype))
|
|
1323
1328
|
|
|
1324
|
-
paths =
|
|
1325
|
-
dependencies =
|
|
1326
|
-
|
|
1329
|
+
paths = param.get(step=step, index=index)
|
|
1330
|
+
dependencies = param.get(field='package', step=step, index=index)
|
|
1331
|
+
|
|
1327
1332
|
# Convert to list if we have scalar
|
|
1328
1333
|
if not is_list:
|
|
1329
1334
|
# Dependencies are always specified as list with default []
|
|
1330
1335
|
# If paths is a scalar we convert the default [] to [None]
|
|
1331
1336
|
# to have a matching list with one element
|
|
1332
|
-
if dependencies
|
|
1337
|
+
if not dependencies:
|
|
1333
1338
|
dependencies = [None]
|
|
1334
1339
|
paths = [paths]
|
|
1335
1340
|
|
|
@@ -1339,7 +1344,6 @@ class Chip:
|
|
|
1339
1344
|
dependencies = [dependencies[list_index]]
|
|
1340
1345
|
|
|
1341
1346
|
paths = self.__convert_paths_to_posix(paths)
|
|
1342
|
-
dependencies = self.__convert_paths_to_posix(dependencies)
|
|
1343
1347
|
|
|
1344
1348
|
result = []
|
|
1345
1349
|
|
|
@@ -1373,29 +1377,32 @@ class Chip:
|
|
|
1373
1377
|
|
|
1374
1378
|
if search_paths:
|
|
1375
1379
|
search_paths = self.__convert_paths_to_posix(search_paths)
|
|
1380
|
+
else:
|
|
1381
|
+
search_paths = [self.cwd]
|
|
1376
1382
|
|
|
1383
|
+
env_vars = utils.get_env_vars(self, step, index)
|
|
1377
1384
|
for (dependency, path) in zip(dependencies, paths):
|
|
1378
|
-
|
|
1379
|
-
|
|
1380
|
-
|
|
1381
|
-
|
|
1382
|
-
|
|
1383
|
-
|
|
1384
|
-
depdendency_path = os.path.abspath(
|
|
1385
|
-
os.path.join(sc_package.path(self, dependency), path))
|
|
1386
|
-
if os.path.exists(depdendency_path):
|
|
1387
|
-
result.append(depdendency_path)
|
|
1385
|
+
faux_param = FileNodeValue()
|
|
1386
|
+
faux_param.set(path)
|
|
1387
|
+
try:
|
|
1388
|
+
if dependency:
|
|
1389
|
+
faux_param.set(dependency, field='package')
|
|
1390
|
+
faux_search = [os.path.abspath(os.path.join(sc_package.path(self, dependency)))]
|
|
1388
1391
|
else:
|
|
1389
|
-
|
|
1390
|
-
|
|
1391
|
-
|
|
1392
|
-
|
|
1393
|
-
|
|
1394
|
-
|
|
1395
|
-
|
|
1396
|
-
|
|
1397
|
-
|
|
1398
|
-
|
|
1392
|
+
faux_search = search_paths
|
|
1393
|
+
resolved = faux_param.resolve_path(
|
|
1394
|
+
envvars=env_vars,
|
|
1395
|
+
search=faux_search,
|
|
1396
|
+
collection_dir=collection_dir)
|
|
1397
|
+
except FileNotFoundError:
|
|
1398
|
+
resolved = None
|
|
1399
|
+
if not missing_ok:
|
|
1400
|
+
if dependency:
|
|
1401
|
+
self.error(f'Could not find {path} in {dependency}. [{",".join(keypath)}]')
|
|
1402
|
+
else:
|
|
1403
|
+
self.error(f'Could not find {path}. [{",".join(keypath)}]')
|
|
1404
|
+
|
|
1405
|
+
result.append(resolved)
|
|
1399
1406
|
|
|
1400
1407
|
if self._relative_path and not abs_path_only:
|
|
1401
1408
|
rel_result = []
|
|
@@ -1408,6 +1415,8 @@ class Chip:
|
|
|
1408
1415
|
|
|
1409
1416
|
# Convert back to scalar if that was original type
|
|
1410
1417
|
if not is_list:
|
|
1418
|
+
if not result:
|
|
1419
|
+
return None
|
|
1411
1420
|
return result[0]
|
|
1412
1421
|
|
|
1413
1422
|
return result
|
|
@@ -1420,33 +1429,20 @@ class Chip:
|
|
|
1420
1429
|
|
|
1421
1430
|
Returns none if not found
|
|
1422
1431
|
"""
|
|
1423
|
-
if not
|
|
1424
|
-
return None
|
|
1425
|
-
|
|
1426
|
-
collected_files = os.listdir(collected_dir)
|
|
1427
|
-
if not collected_files:
|
|
1432
|
+
if not collected_dir:
|
|
1428
1433
|
return None
|
|
1429
1434
|
|
|
1430
|
-
|
|
1431
|
-
|
|
1432
|
-
|
|
1433
|
-
# have been imported
|
|
1434
|
-
|
|
1435
|
-
n += 1
|
|
1436
|
-
basename = str(pathlib.PurePosixPath(*path_paths[0:n]))
|
|
1437
|
-
endname = str(pathlib.PurePosixPath(*path_paths[n:]))
|
|
1438
|
-
|
|
1439
|
-
import_name = utils.get_hashed_filename(basename, package=package)
|
|
1440
|
-
if import_name not in collected_files:
|
|
1441
|
-
continue
|
|
1435
|
+
faux_param = FileNodeValue()
|
|
1436
|
+
faux_param.set(path)
|
|
1437
|
+
faux_param.set(package, field='package')
|
|
1442
1438
|
|
|
1443
|
-
|
|
1444
|
-
|
|
1445
|
-
|
|
1446
|
-
|
|
1447
|
-
if os.path.exists(abspath):
|
|
1448
|
-
return abspath
|
|
1439
|
+
try:
|
|
1440
|
+
resolved = faux_param.resolve_path(collection_dir=collected_dir)
|
|
1441
|
+
except FileNotFoundError:
|
|
1442
|
+
return None
|
|
1449
1443
|
|
|
1444
|
+
if resolved.startswith(collected_dir):
|
|
1445
|
+
return resolved
|
|
1450
1446
|
return None
|
|
1451
1447
|
|
|
1452
1448
|
def find_node_file(self, path, step, jobname=None, index='0'):
|
|
@@ -1530,7 +1526,7 @@ class Chip:
|
|
|
1530
1526
|
# only do something if type is file or dir
|
|
1531
1527
|
continue
|
|
1532
1528
|
|
|
1533
|
-
values = self.schema.
|
|
1529
|
+
values = self.schema.get(*keypath, field=None).getvalues()
|
|
1534
1530
|
for value, step, index in values:
|
|
1535
1531
|
if not value:
|
|
1536
1532
|
continue
|
|
@@ -1565,7 +1561,7 @@ class Chip:
|
|
|
1565
1561
|
# exist
|
|
1566
1562
|
continue
|
|
1567
1563
|
|
|
1568
|
-
for check_files, step, index in self.schema.
|
|
1564
|
+
for check_files, step, index in self.schema.get(*keypath, field=None).getvalues():
|
|
1569
1565
|
if not check_files:
|
|
1570
1566
|
continue
|
|
1571
1567
|
|
|
@@ -1623,11 +1619,26 @@ class Chip:
|
|
|
1623
1619
|
error = True
|
|
1624
1620
|
self.logger.error(f"flowgraph {flow} not defined.")
|
|
1625
1621
|
|
|
1626
|
-
|
|
1622
|
+
runtime = RuntimeFlowgraph(
|
|
1623
|
+
self.schema.get("flowgraph", flow, field='schema'),
|
|
1624
|
+
args=(self.get('arg', 'step'), self.get('arg', 'index')),
|
|
1625
|
+
from_steps=self.get('option', 'from'),
|
|
1626
|
+
to_steps=self.get('option', 'to'),
|
|
1627
|
+
prune_nodes=self.get('option', 'prune'))
|
|
1628
|
+
|
|
1629
|
+
nodes = [node for node in runtime.get_nodes()
|
|
1627
1630
|
if self.get('record', 'status', step=node[0], index=node[1])
|
|
1628
1631
|
!= NodeStatus.SKIPPED]
|
|
1632
|
+
flow_schema = self.schema.get("flowgraph", flow, field="schema")
|
|
1633
|
+
runtime_io = RuntimeFlowgraph(
|
|
1634
|
+
flow_schema,
|
|
1635
|
+
args=(self.get('arg', 'step'), self.get('arg', 'index')),
|
|
1636
|
+
from_steps=set([step for step, _ in flow_schema.get_entry_nodes()]),
|
|
1637
|
+
prune_nodes=self.get('option', 'prune'))
|
|
1638
|
+
|
|
1629
1639
|
for (step, index) in nodes:
|
|
1630
|
-
for in_step, in_index in
|
|
1640
|
+
for in_step, in_index in runtime_io.get_node_inputs(
|
|
1641
|
+
step, index, record=self.schema.get("record", field="schema")):
|
|
1631
1642
|
if (in_step, in_index) in nodes:
|
|
1632
1643
|
# we're gonna run this step, OK
|
|
1633
1644
|
continue
|
|
@@ -1652,7 +1663,7 @@ class Chip:
|
|
|
1652
1663
|
lib_node_check.append((step, None))
|
|
1653
1664
|
lib_node_check.extend(nodes)
|
|
1654
1665
|
for lib_key in libs_to_check:
|
|
1655
|
-
for val, step, index in self.schema.
|
|
1666
|
+
for val, step, index in self.schema.get(*lib_key, field=None).getvalues():
|
|
1656
1667
|
if (step, index) in lib_node_check:
|
|
1657
1668
|
libraries.update(val)
|
|
1658
1669
|
|
|
@@ -1666,9 +1677,8 @@ class Chip:
|
|
|
1666
1677
|
for key in allkeys:
|
|
1667
1678
|
keypath = ",".join(key)
|
|
1668
1679
|
if 'default' not in key and 'history' not in key and 'library' not in key:
|
|
1669
|
-
|
|
1670
|
-
|
|
1671
|
-
if key_empty and requirement:
|
|
1680
|
+
param = self.get(*key, field=None)
|
|
1681
|
+
if param.is_empty() and param.get(field='require'):
|
|
1672
1682
|
error = True
|
|
1673
1683
|
self.logger.error(f"Global requirement missing for [{keypath}].")
|
|
1674
1684
|
|
|
@@ -1710,16 +1720,16 @@ class Chip:
|
|
|
1710
1720
|
step=step, index=index)
|
|
1711
1721
|
for item in all_required:
|
|
1712
1722
|
keypath = item.split(',')
|
|
1713
|
-
if self.schema.
|
|
1723
|
+
if self.schema.get(*keypath, field=None).is_empty():
|
|
1714
1724
|
error = True
|
|
1715
1725
|
self.logger.error(f"Value empty for {keypath} for {tool}.")
|
|
1716
1726
|
|
|
1717
1727
|
task_run = getattr(task_module, 'run', None)
|
|
1718
|
-
if self.schema.
|
|
1728
|
+
if self.schema.get('tool', tool, 'exe', field=None).is_empty() and not task_run:
|
|
1719
1729
|
error = True
|
|
1720
1730
|
self.logger.error(f'No executable or run() function specified for {tool}/{task}')
|
|
1721
1731
|
|
|
1722
|
-
if not _check_flowgraph_io(self, nodes=nodes):
|
|
1732
|
+
if not error and not _check_flowgraph_io(self, nodes=nodes):
|
|
1723
1733
|
error = True
|
|
1724
1734
|
|
|
1725
1735
|
return not error
|
|
@@ -1743,26 +1753,8 @@ class Chip:
|
|
|
1743
1753
|
Loads the file mychip.json into the current Chip object.
|
|
1744
1754
|
"""
|
|
1745
1755
|
|
|
1746
|
-
# Read from file into new schema object
|
|
1747
|
-
schema = Schema(manifest=filename, logger=self.logger)
|
|
1748
|
-
|
|
1749
1756
|
# Merge data in schema with Chip configuration
|
|
1750
|
-
self.schema.
|
|
1751
|
-
|
|
1752
|
-
# Read history, if we're not already reading into a job
|
|
1753
|
-
if 'history' in schema.cfg and not job:
|
|
1754
|
-
for historic_job in schema.cfg['history'].keys():
|
|
1755
|
-
self.schema.merge_manifest(schema.history(historic_job),
|
|
1756
|
-
job=historic_job,
|
|
1757
|
-
clear=clear,
|
|
1758
|
-
clobber=clobber)
|
|
1759
|
-
|
|
1760
|
-
# TODO: better way to handle this?
|
|
1761
|
-
if 'library' in schema.cfg:
|
|
1762
|
-
for libname in schema.cfg['library'].keys():
|
|
1763
|
-
self.__import_library(libname, schema.cfg['library'][libname],
|
|
1764
|
-
job=job,
|
|
1765
|
-
clobber=clobber)
|
|
1757
|
+
self.schema.read_manifest(filename)
|
|
1766
1758
|
|
|
1767
1759
|
###########################################################################
|
|
1768
1760
|
def write_manifest(self, filename, prune=False, abspath=False):
|
|
@@ -1796,12 +1788,14 @@ class Chip:
|
|
|
1796
1788
|
if abspath:
|
|
1797
1789
|
schema = self.__abspath()
|
|
1798
1790
|
|
|
1799
|
-
if
|
|
1800
|
-
|
|
1801
|
-
|
|
1791
|
+
if re.search(r'(\.json|\.sup)(\.gz)*$', filepath):
|
|
1792
|
+
schema.write_manifest(filepath)
|
|
1793
|
+
return
|
|
1802
1794
|
|
|
1803
|
-
|
|
1804
|
-
|
|
1795
|
+
tcl_record = False
|
|
1796
|
+
if isinstance(schema, JournalingSchema):
|
|
1797
|
+
tcl_record = "get" in schema.get_journaling_types()
|
|
1798
|
+
schema = schema.get_base_schema()
|
|
1805
1799
|
|
|
1806
1800
|
is_csv = re.search(r'(\.csv)(\.gz)*$', filepath)
|
|
1807
1801
|
|
|
@@ -1817,26 +1811,85 @@ class Chip:
|
|
|
1817
1811
|
|
|
1818
1812
|
# format specific printing
|
|
1819
1813
|
try:
|
|
1820
|
-
if re.search(r'(\.
|
|
1821
|
-
|
|
1822
|
-
|
|
1823
|
-
|
|
1814
|
+
if re.search(r'(\.yaml|\.yml)(\.gz)*$', filepath):
|
|
1815
|
+
class YamlIndentDumper(yaml.Dumper):
|
|
1816
|
+
def increase_indent(self, flow=False, indentless=False):
|
|
1817
|
+
return super().increase_indent(flow=flow, indentless=False)
|
|
1818
|
+
|
|
1819
|
+
fout.write(yaml.dump(schema.getdict(), Dumper=YamlIndentDumper,
|
|
1820
|
+
default_flow_style=False))
|
|
1821
|
+
|
|
1824
1822
|
elif re.search(r'(\.tcl)(\.gz)*$', filepath):
|
|
1825
1823
|
# TCL only gets values associated with the current node.
|
|
1826
1824
|
step = self.get('arg', 'step')
|
|
1827
1825
|
index = self.get('arg', 'index')
|
|
1828
|
-
|
|
1826
|
+
self.__write_tcl(fout,
|
|
1827
|
+
schema,
|
|
1829
1828
|
prefix="dict set sc_cfg",
|
|
1830
1829
|
step=step,
|
|
1831
1830
|
index=index,
|
|
1832
|
-
template=utils.get_file_template('tcl/manifest.tcl.j2')
|
|
1831
|
+
template=utils.get_file_template('tcl/manifest.tcl.j2'),
|
|
1832
|
+
record=tcl_record)
|
|
1833
1833
|
elif is_csv:
|
|
1834
|
-
|
|
1834
|
+
csvwriter = csv.writer(fout)
|
|
1835
|
+
csvwriter.writerow(['Keypath', 'Value'])
|
|
1836
|
+
|
|
1837
|
+
allkeys = schema.allkeys()
|
|
1838
|
+
for key in allkeys:
|
|
1839
|
+
keypath = ','.join(key)
|
|
1840
|
+
param = schema.get(*key, field=None)
|
|
1841
|
+
for value, step, index in param.getvalues():
|
|
1842
|
+
if step is None and index is None:
|
|
1843
|
+
keypath = ','.join(key)
|
|
1844
|
+
elif index is None:
|
|
1845
|
+
keypath = ','.join([*key, step, 'default'])
|
|
1846
|
+
else:
|
|
1847
|
+
keypath = ','.join([*key, step, index])
|
|
1848
|
+
|
|
1849
|
+
if isinstance(value, list):
|
|
1850
|
+
for item in value:
|
|
1851
|
+
csvwriter.writerow([keypath, item])
|
|
1852
|
+
else:
|
|
1853
|
+
csvwriter.writerow([keypath, value])
|
|
1835
1854
|
else:
|
|
1836
1855
|
self.error(f'File format not recognized {filepath}')
|
|
1837
1856
|
finally:
|
|
1838
1857
|
fout.close()
|
|
1839
1858
|
|
|
1859
|
+
def __write_tcl(self, fout, schema,
|
|
1860
|
+
prefix="", step=None, index=None, template=None, record=False):
|
|
1861
|
+
tcl_set_cmds = []
|
|
1862
|
+
for key in sorted(schema.allkeys()):
|
|
1863
|
+
# print out all non default values
|
|
1864
|
+
if 'default' in key:
|
|
1865
|
+
continue
|
|
1866
|
+
|
|
1867
|
+
param = schema.get(*key, field=None)
|
|
1868
|
+
|
|
1869
|
+
# create a TCL dict
|
|
1870
|
+
keystr = ' '.join([NodeType.to_tcl(keypart, 'str') for keypart in key])
|
|
1871
|
+
|
|
1872
|
+
valstr = param.gettcl(step=step, index=index)
|
|
1873
|
+
if valstr is None:
|
|
1874
|
+
continue
|
|
1875
|
+
|
|
1876
|
+
# Ensure empty values get something
|
|
1877
|
+
if valstr == '':
|
|
1878
|
+
valstr = '{}'
|
|
1879
|
+
|
|
1880
|
+
tcl_set_cmds.append(f"{prefix} {keystr} {valstr}")
|
|
1881
|
+
|
|
1882
|
+
if template:
|
|
1883
|
+
fout.write(template.render(manifest_dict='\n'.join(tcl_set_cmds),
|
|
1884
|
+
scroot=os.path.abspath(
|
|
1885
|
+
os.path.join(os.path.dirname(__file__))),
|
|
1886
|
+
record_access=record,
|
|
1887
|
+
record_access_id=Schema._RECORD_ACCESS_IDENTIFIER))
|
|
1888
|
+
else:
|
|
1889
|
+
for cmd in tcl_set_cmds:
|
|
1890
|
+
fout.write(cmd + '\n')
|
|
1891
|
+
fout.write('\n')
|
|
1892
|
+
|
|
1840
1893
|
###########################################################################
|
|
1841
1894
|
def check_checklist(self, standard, items=None,
|
|
1842
1895
|
check_ok=False, verbose=False, require_reports=True):
|
|
@@ -1978,15 +2031,17 @@ class Chip:
|
|
|
1978
2031
|
self.get('tool', tool, 'task', task, 'report', metric, job=job,
|
|
1979
2032
|
step=step, index=index)
|
|
1980
2033
|
|
|
1981
|
-
if
|
|
2034
|
+
if allow_missing_reports and not has_reports:
|
|
1982
2035
|
# No reports available and it is allowed
|
|
1983
2036
|
continue
|
|
1984
2037
|
|
|
2038
|
+
reports = []
|
|
1985
2039
|
try:
|
|
1986
|
-
|
|
1987
|
-
|
|
1988
|
-
|
|
1989
|
-
|
|
2040
|
+
if has_reports:
|
|
2041
|
+
reports = self.find_files('tool', tool, 'task', task, 'report', metric,
|
|
2042
|
+
job=job,
|
|
2043
|
+
step=step, index=index,
|
|
2044
|
+
missing_ok=not require_reports)
|
|
1990
2045
|
except SiliconCompilerError:
|
|
1991
2046
|
reports = []
|
|
1992
2047
|
continue
|
|
@@ -2022,33 +2077,38 @@ class Chip:
|
|
|
2022
2077
|
return not error
|
|
2023
2078
|
|
|
2024
2079
|
###########################################################################
|
|
2025
|
-
def __import_library(self, libname,
|
|
2080
|
+
def __import_library(self, libname, library, job=None, clobber=True, keep_input=True):
|
|
2026
2081
|
'''Helper to import library with config 'libconfig' as a library
|
|
2027
2082
|
'libname' in current Chip object.'''
|
|
2028
|
-
if job:
|
|
2029
|
-
cfg = self.schema.cfg['history'][job]['library']
|
|
2030
|
-
else:
|
|
2031
|
-
cfg = self.schema.cfg['library']
|
|
2032
2083
|
|
|
2033
|
-
if 'library'
|
|
2034
|
-
for sublib_name, sublibcfg in libcfg['library'].items():
|
|
2035
|
-
self.__import_library(sublib_name, sublibcfg,
|
|
2036
|
-
job=job, clobber=clobber, keep_input=keep_input)
|
|
2037
|
-
|
|
2038
|
-
if libname in cfg:
|
|
2084
|
+
if libname in self.schema.getkeys('library'):
|
|
2039
2085
|
if not clobber:
|
|
2040
2086
|
return
|
|
2087
|
+
if hasattr(library, 'schema'):
|
|
2088
|
+
library = library.schema
|
|
2089
|
+
|
|
2090
|
+
try:
|
|
2091
|
+
for sublib in library.getkeys('library'):
|
|
2092
|
+
self.__import_library(sublib,
|
|
2093
|
+
EditableSchema(library).search('library', sublib),
|
|
2094
|
+
job=job, clobber=clobber, keep_input=keep_input)
|
|
2095
|
+
except KeyError:
|
|
2096
|
+
pass
|
|
2041
2097
|
|
|
2042
|
-
self.__import_data_sources(
|
|
2043
|
-
cfg[libname] = {}
|
|
2098
|
+
self.__import_data_sources(library)
|
|
2044
2099
|
|
|
2045
2100
|
# Only keep some sections to avoid recursive bloat
|
|
2046
2101
|
keeps = ['asic', 'design', 'fpga', 'option', 'output', 'package']
|
|
2047
2102
|
if keep_input:
|
|
2048
2103
|
keeps.append('input')
|
|
2049
|
-
|
|
2050
|
-
|
|
2051
|
-
|
|
2104
|
+
|
|
2105
|
+
importlibrary = library.copy()
|
|
2106
|
+
edit_lib = EditableSchema(importlibrary)
|
|
2107
|
+
for section in list(importlibrary.getkeys()):
|
|
2108
|
+
if section not in keeps:
|
|
2109
|
+
edit_lib.remove(section)
|
|
2110
|
+
|
|
2111
|
+
EditableSchema(self.schema).insert("library", libname, importlibrary, clobber=True)
|
|
2052
2112
|
|
|
2053
2113
|
###########################################################################
|
|
2054
2114
|
def write_flowgraph(self, filename, flow=None,
|
|
@@ -2333,6 +2393,8 @@ class Chip:
|
|
|
2333
2393
|
|
|
2334
2394
|
nodes = {}
|
|
2335
2395
|
|
|
2396
|
+
search_schema = EditableSchema(self.schema)
|
|
2397
|
+
|
|
2336
2398
|
def collect_library(root_type, lib, name=None):
|
|
2337
2399
|
if not name:
|
|
2338
2400
|
name = lib.design
|
|
@@ -2371,15 +2433,15 @@ class Chip:
|
|
|
2371
2433
|
|
|
2372
2434
|
for in_lib in lib.get('option', 'library',
|
|
2373
2435
|
step=Schema.GLOBAL_KEY, index=Schema.GLOBAL_KEY):
|
|
2374
|
-
collect_library("library",
|
|
2436
|
+
collect_library("library", search_schema.search('library', in_lib),
|
|
2375
2437
|
name=in_lib)
|
|
2376
2438
|
for in_lib in lib.get('asic', 'logiclib',
|
|
2377
2439
|
step=Schema.GLOBAL_KEY, index=Schema.GLOBAL_KEY):
|
|
2378
|
-
collect_library("logiclib",
|
|
2440
|
+
collect_library("logiclib", search_schema.search('library', in_lib),
|
|
2379
2441
|
name=in_lib)
|
|
2380
2442
|
for in_lib in lib.get('asic', 'macrolib',
|
|
2381
2443
|
step=Schema.GLOBAL_KEY, index=Schema.GLOBAL_KEY):
|
|
2382
|
-
collect_library("macrolib",
|
|
2444
|
+
collect_library("macrolib", search_schema.search('library', in_lib),
|
|
2383
2445
|
name=in_lib)
|
|
2384
2446
|
|
|
2385
2447
|
collect_library("design", self)
|
|
@@ -2417,6 +2479,8 @@ class Chip:
|
|
|
2417
2479
|
all_libraries = self.getkeys('library')
|
|
2418
2480
|
|
|
2419
2481
|
def swap(*key):
|
|
2482
|
+
if not self.schema.valid(*key):
|
|
2483
|
+
return
|
|
2420
2484
|
if step is not None:
|
|
2421
2485
|
r_step = step
|
|
2422
2486
|
r_index = index
|
|
@@ -2432,7 +2496,7 @@ class Chip:
|
|
|
2432
2496
|
list(map(lambda x: new_library if x == org_library else x, val)),
|
|
2433
2497
|
step=r_step, index=r_index)
|
|
2434
2498
|
else:
|
|
2435
|
-
for val, r_step, r_index in self.schema.
|
|
2499
|
+
for val, r_step, r_index in self.schema.get(*key, field=None).getvalues():
|
|
2436
2500
|
if r_step is None:
|
|
2437
2501
|
r_step = Schema.GLOBAL_KEY
|
|
2438
2502
|
if r_index is None:
|
|
@@ -2510,7 +2574,7 @@ class Chip:
|
|
|
2510
2574
|
is_file = re.search('file', leaftype)
|
|
2511
2575
|
if is_dir or is_file:
|
|
2512
2576
|
if self.get(*key, field='copy'):
|
|
2513
|
-
for value, step, index in self.schema.
|
|
2577
|
+
for value, step, index in self.schema.get(*key, field=None).getvalues():
|
|
2514
2578
|
if not value:
|
|
2515
2579
|
continue
|
|
2516
2580
|
packages = self.get(*key, field='package', step=step, index=index)
|
|
@@ -2694,9 +2758,15 @@ class Chip:
|
|
|
2694
2758
|
flowgraph_nodes = [(step, index)]
|
|
2695
2759
|
elif step:
|
|
2696
2760
|
flow = self.get('option', 'flow')
|
|
2697
|
-
flowgraph_nodes =
|
|
2761
|
+
flowgraph_nodes = [(step, index) for index in self.getkeys("flowgraph", flow, step)]
|
|
2698
2762
|
else:
|
|
2699
|
-
|
|
2763
|
+
flow = self.get('option', 'flow')
|
|
2764
|
+
runtime = RuntimeFlowgraph(
|
|
2765
|
+
self.schema.get("flowgraph", flow, field='schema'),
|
|
2766
|
+
from_steps=self.get('option', 'from'),
|
|
2767
|
+
to_steps=self.get('option', 'to'),
|
|
2768
|
+
prune_nodes=self.get('option', 'prune'))
|
|
2769
|
+
flowgraph_nodes = runtime.get_nodes()
|
|
2700
2770
|
|
|
2701
2771
|
if not archive_name:
|
|
2702
2772
|
if step and index:
|
|
@@ -2815,8 +2885,12 @@ class Chip:
|
|
|
2815
2885
|
if check:
|
|
2816
2886
|
# compare previous hash to new hash
|
|
2817
2887
|
oldhash = self.schema.get(*keypath, step=step, index=index, field='filehash')
|
|
2888
|
+
if not isinstance(oldhash, list):
|
|
2889
|
+
oldhash = [oldhash]
|
|
2818
2890
|
check_failed = False
|
|
2819
2891
|
for i, item in enumerate(oldhash):
|
|
2892
|
+
if item is None:
|
|
2893
|
+
continue
|
|
2820
2894
|
if item != hashlist[i]:
|
|
2821
2895
|
self.logger.error(f"Hash mismatch for [{keypath}]")
|
|
2822
2896
|
check_failed = True
|
|
@@ -2829,11 +2903,11 @@ class Chip:
|
|
|
2829
2903
|
set_step = None
|
|
2830
2904
|
set_index = None
|
|
2831
2905
|
pernode = self.get(*keypath, field='pernode')
|
|
2832
|
-
if pernode ==
|
|
2906
|
+
if pernode == PerNode.REQUIRED:
|
|
2833
2907
|
set_step = step
|
|
2834
2908
|
set_index = index
|
|
2835
|
-
elif pernode ==
|
|
2836
|
-
for vals, key_step, key_index in self.schema.
|
|
2909
|
+
elif pernode == PerNode.OPTIONAL:
|
|
2910
|
+
for vals, key_step, key_index in self.schema.get(*keypath, field=None).getvalues():
|
|
2837
2911
|
if key_step == step and key_index == index and vals:
|
|
2838
2912
|
set_step = step
|
|
2839
2913
|
set_index = index
|
|
@@ -2918,9 +2992,12 @@ class Chip:
|
|
|
2918
2992
|
|
|
2919
2993
|
# display whole flowgraph if no from/to specified
|
|
2920
2994
|
flow = self.get('option', 'flow')
|
|
2921
|
-
|
|
2922
|
-
|
|
2923
|
-
|
|
2995
|
+
runtime = RuntimeFlowgraph(
|
|
2996
|
+
self.schema.get("flowgraph", flow, field='schema'),
|
|
2997
|
+
to_steps=self.get('option', 'to'),
|
|
2998
|
+
prune_nodes=self.get('option', 'prune'))
|
|
2999
|
+
_show_summary_table(self, flow, list(runtime.get_nodes()),
|
|
3000
|
+
show_all_indices=show_all_indices)
|
|
2924
3001
|
|
|
2925
3002
|
# dashboard does not generate any data
|
|
2926
3003
|
self.logger.info('Dashboard at "sc-dashboard '
|
|
@@ -3015,40 +3092,15 @@ class Chip:
|
|
|
3015
3092
|
Creates a 'place' task with step='apr_place' and index=0 and binds it to the
|
|
3016
3093
|
'openroad' tool.
|
|
3017
3094
|
'''
|
|
3095
|
+
from siliconcompiler import FlowgraphSchema
|
|
3096
|
+
from siliconcompiler.schema import EditableSchema
|
|
3018
3097
|
|
|
3019
|
-
if
|
|
3020
|
-
|
|
3021
|
-
|
|
3022
|
-
|
|
3023
|
-
index = str(index)
|
|
3024
|
-
|
|
3025
|
-
# Determine task name and module
|
|
3026
|
-
task_module = None
|
|
3027
|
-
if (isinstance(task, str)):
|
|
3028
|
-
task_module = task
|
|
3029
|
-
elif inspect.ismodule(task):
|
|
3030
|
-
task_module = task.__name__
|
|
3031
|
-
self.modules[task_module] = task
|
|
3098
|
+
if not self.schema.valid("flowgraph", flow):
|
|
3099
|
+
graph = FlowgraphSchema(flow)
|
|
3100
|
+
EditableSchema(self.schema).insert("flowgraph", flow, graph)
|
|
3032
3101
|
else:
|
|
3033
|
-
|
|
3034
|
-
|
|
3035
|
-
chip=self)
|
|
3036
|
-
|
|
3037
|
-
task_parts = task_module.split('.')
|
|
3038
|
-
if len(task_parts) < 2:
|
|
3039
|
-
raise SiliconCompilerError(
|
|
3040
|
-
f"{task} is not a valid task, it must be associated with a tool '<tool>.<task>'.",
|
|
3041
|
-
chip=self)
|
|
3042
|
-
tool_name, task_name = task_parts[-2:]
|
|
3043
|
-
|
|
3044
|
-
# bind tool to node
|
|
3045
|
-
self.set('flowgraph', flow, step, index, 'tool', tool_name)
|
|
3046
|
-
self.set('flowgraph', flow, step, index, 'task', task_name)
|
|
3047
|
-
self.set('flowgraph', flow, step, index, 'taskmodule', task_module)
|
|
3048
|
-
|
|
3049
|
-
# set default weights
|
|
3050
|
-
for metric in self.getkeys('metric'):
|
|
3051
|
-
self.set('flowgraph', flow, step, index, 'weight', metric, 0)
|
|
3102
|
+
graph = self.schema.get("flowgraph", flow, field="schema")
|
|
3103
|
+
graph.node(step, task, index=index)
|
|
3052
3104
|
|
|
3053
3105
|
###########################################################################
|
|
3054
3106
|
def edge(self, flow, tail, head, tail_index=0, head_index=0):
|
|
@@ -3073,21 +3125,9 @@ class Chip:
|
|
|
3073
3125
|
>>> chip.edge('place', 'cts')
|
|
3074
3126
|
Creates a directed edge from place to cts.
|
|
3075
3127
|
'''
|
|
3076
|
-
head_index = str(head_index)
|
|
3077
|
-
tail_index = str(tail_index)
|
|
3078
|
-
|
|
3079
|
-
for step in (head, tail):
|
|
3080
|
-
if step in (Schema.GLOBAL_KEY, 'default'):
|
|
3081
|
-
self.error(f'Illegal step name: {step} is reserved')
|
|
3082
|
-
return
|
|
3083
|
-
|
|
3084
|
-
tail_node = (tail, tail_index)
|
|
3085
|
-
if tail_node in self.get('flowgraph', flow, head, head_index, 'input'):
|
|
3086
|
-
self.logger.warning(f'Edge from {tail}{tail_index} to {head}{head_index} already '
|
|
3087
|
-
'exists, skipping')
|
|
3088
|
-
return
|
|
3089
3128
|
|
|
3090
|
-
self.
|
|
3129
|
+
graph = self.schema.get("flowgraph", flow, field="schema")
|
|
3130
|
+
graph.edge(tail, head, tail_index=tail_index, head_index=head_index)
|
|
3091
3131
|
|
|
3092
3132
|
###########################################################################
|
|
3093
3133
|
def remove_node(self, flow, step, index=None):
|
|
@@ -3103,34 +3143,8 @@ class Chip:
|
|
|
3103
3143
|
if flow not in self.getkeys('flowgraph'):
|
|
3104
3144
|
raise ValueError(f'{flow} is not in the manifest')
|
|
3105
3145
|
|
|
3106
|
-
|
|
3107
|
-
|
|
3108
|
-
|
|
3109
|
-
if index is None:
|
|
3110
|
-
# Iterate over all indexes
|
|
3111
|
-
for index in self.getkeys('flowgraph', flow, step):
|
|
3112
|
-
self.remove_node(flow, step, index)
|
|
3113
|
-
return
|
|
3114
|
-
|
|
3115
|
-
index = str(index)
|
|
3116
|
-
if index not in self.getkeys('flowgraph', flow, step):
|
|
3117
|
-
raise ValueError(f'{index} is not a valid index for {step} in {flow}')
|
|
3118
|
-
|
|
3119
|
-
# Save input edges
|
|
3120
|
-
node = (step, index)
|
|
3121
|
-
node_inputs = self.get('flowgraph', flow, step, index, 'input')
|
|
3122
|
-
self.remove('flowgraph', flow, step, index)
|
|
3123
|
-
|
|
3124
|
-
if len(self.getkeys('flowgraph', flow, step)) == 0:
|
|
3125
|
-
self.remove('flowgraph', flow, step)
|
|
3126
|
-
|
|
3127
|
-
for flow_step in self.getkeys('flowgraph', flow):
|
|
3128
|
-
for flow_index in self.getkeys('flowgraph', flow, flow_step):
|
|
3129
|
-
inputs = self.get('flowgraph', flow, flow_step, flow_index, 'input')
|
|
3130
|
-
if node in inputs:
|
|
3131
|
-
inputs = [inode for inode in inputs if inode != node]
|
|
3132
|
-
inputs.extend(node_inputs)
|
|
3133
|
-
self.set('flowgraph', flow, flow_step, flow_index, 'input', set(inputs))
|
|
3146
|
+
graph = self.schema.get("flowgraph", flow, field="schema")
|
|
3147
|
+
graph.remove_node(step, index=index)
|
|
3134
3148
|
|
|
3135
3149
|
###########################################################################
|
|
3136
3150
|
def graph(self, flow, subflow, name=None):
|
|
@@ -3146,27 +3160,9 @@ class Chip:
|
|
|
3146
3160
|
>>> chip.graph('asicflow')
|
|
3147
3161
|
Instantiates a flow named 'asicflow'.
|
|
3148
3162
|
'''
|
|
3149
|
-
|
|
3150
|
-
|
|
3151
|
-
|
|
3152
|
-
newstep = step
|
|
3153
|
-
else:
|
|
3154
|
-
newstep = name + "." + step
|
|
3155
|
-
|
|
3156
|
-
for keys in self.allkeys('flowgraph', subflow, step):
|
|
3157
|
-
val = self.get('flowgraph', subflow, step, *keys)
|
|
3158
|
-
self.set('flowgraph', flow, newstep, *keys, val)
|
|
3159
|
-
|
|
3160
|
-
if name is None:
|
|
3161
|
-
continue
|
|
3162
|
-
|
|
3163
|
-
for index in self.getkeys('flowgraph', flow, newstep):
|
|
3164
|
-
# rename inputs
|
|
3165
|
-
all_inputs = self.get('flowgraph', flow, newstep, index, 'input')
|
|
3166
|
-
self.set('flowgraph', flow, newstep, index, 'input', [])
|
|
3167
|
-
for in_step, in_index in all_inputs:
|
|
3168
|
-
newin = name + "." + in_step
|
|
3169
|
-
self.add('flowgraph', flow, newstep, index, 'input', (newin, in_index))
|
|
3163
|
+
graph = self.schema.get("flowgraph", flow, field="schema")
|
|
3164
|
+
subgraph = self.schema.get("flowgraph", subflow, field="schema")
|
|
3165
|
+
graph.graph(subgraph, name=name)
|
|
3170
3166
|
|
|
3171
3167
|
###########################################################################
|
|
3172
3168
|
def run(self, raise_exception=False):
|
|
@@ -3253,14 +3249,18 @@ class Chip:
|
|
|
3253
3249
|
search_nodes.append((sc_step, sc_index))
|
|
3254
3250
|
elif sc_step:
|
|
3255
3251
|
if flow is not None:
|
|
3256
|
-
|
|
3252
|
+
runtime = RuntimeFlowgraph(
|
|
3253
|
+
self.schema.get("flowgraph", flow, field='schema'),
|
|
3254
|
+
from_steps=self.get('option', 'from'),
|
|
3255
|
+
to_steps=self.get('option', 'to'),
|
|
3256
|
+
prune_nodes=self.get('option', 'prune'))
|
|
3257
|
+
for check_step, check_index in runtime.get_nodes():
|
|
3257
3258
|
if sc_step == check_step:
|
|
3258
3259
|
search_nodes.append((check_step, check_index))
|
|
3259
3260
|
else:
|
|
3260
3261
|
if flow is not None:
|
|
3261
|
-
for nodes in
|
|
3262
|
-
|
|
3263
|
-
reverse=True):
|
|
3262
|
+
for nodes in self.schema.get(
|
|
3263
|
+
"flowgraph", flow, field="schema").get_execution_order(reverse=True):
|
|
3264
3264
|
search_nodes.extend(nodes)
|
|
3265
3265
|
|
|
3266
3266
|
for ext in self._showtools.keys():
|
|
@@ -3334,7 +3334,7 @@ class Chip:
|
|
|
3334
3334
|
self.set('option', 'jobname', f'_{taskname}_{sc_job}_{sc_step}{sc_index}', clobber=True)
|
|
3335
3335
|
|
|
3336
3336
|
# Setup in step/index variables
|
|
3337
|
-
for
|
|
3337
|
+
for step, index in self.get("flowgraph", "showflow", field="schema").get_nodes():
|
|
3338
3338
|
if step != taskname:
|
|
3339
3339
|
continue
|
|
3340
3340
|
show_tool, _ = get_tool_task(self, step, index, flow='showflow')
|
|
@@ -3356,7 +3356,8 @@ class Chip:
|
|
|
3356
3356
|
try:
|
|
3357
3357
|
self.run(raise_exception=True)
|
|
3358
3358
|
if screenshot:
|
|
3359
|
-
step, index =
|
|
3359
|
+
step, index = self.schema.get("flowgraph", 'showflow',
|
|
3360
|
+
field="schema").get_exit_nodes()[0]
|
|
3360
3361
|
success = self.find_result('png', step=step, index=index)
|
|
3361
3362
|
else:
|
|
3362
3363
|
success = True
|
|
@@ -3434,8 +3435,8 @@ class Chip:
|
|
|
3434
3435
|
if hasattr(self, 'logger'):
|
|
3435
3436
|
self.logger.error(msg)
|
|
3436
3437
|
|
|
3437
|
-
step = self.get('arg', 'step')
|
|
3438
|
-
index = self.get('arg', 'index')
|
|
3438
|
+
step = self.schema.get('arg', 'step')
|
|
3439
|
+
index = self.schema.get('arg', 'index')
|
|
3439
3440
|
if self.schema.get('option', 'continue', step=step, index=index):
|
|
3440
3441
|
self._error = True
|
|
3441
3442
|
return
|
|
@@ -3467,4 +3468,3 @@ class Chip:
|
|
|
3467
3468
|
|
|
3468
3469
|
# Reinitialize logger on restore
|
|
3469
3470
|
self._init_logger()
|
|
3470
|
-
self.schema._init_logger(self.logger)
|