siliconcompiler 0.32.2__py3-none-any.whl → 0.33.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- siliconcompiler/__init__.py +19 -2
- siliconcompiler/_metadata.py +3 -2
- siliconcompiler/apps/sc.py +2 -2
- siliconcompiler/apps/sc_install.py +3 -3
- siliconcompiler/apps/sc_issue.py +1 -1
- siliconcompiler/apps/sc_remote.py +4 -4
- siliconcompiler/apps/sc_show.py +2 -2
- siliconcompiler/apps/utils/replay.py +5 -3
- siliconcompiler/asic.py +120 -0
- siliconcompiler/checklist.py +150 -0
- siliconcompiler/core.py +319 -345
- siliconcompiler/{templates → data/templates}/replay/replay.sh.j2 +2 -2
- siliconcompiler/flowgraph.py +803 -515
- siliconcompiler/fpga.py +84 -0
- siliconcompiler/metric.py +420 -0
- siliconcompiler/optimizer/vizier.py +2 -3
- siliconcompiler/package/__init__.py +29 -6
- siliconcompiler/pdk.py +415 -0
- siliconcompiler/record.py +449 -0
- siliconcompiler/remote/client.py +61 -19
- siliconcompiler/remote/schema.py +116 -112
- siliconcompiler/remote/server.py +3 -5
- siliconcompiler/report/__init__.py +3 -2
- siliconcompiler/report/dashboard/__init__.py +61 -170
- siliconcompiler/report/dashboard/cli/__init__.py +79 -0
- siliconcompiler/report/dashboard/cli/board.py +895 -0
- siliconcompiler/report/dashboard/web/__init__.py +196 -0
- siliconcompiler/report/dashboard/{components → web/components}/__init__.py +9 -8
- siliconcompiler/report/dashboard/{components → web/components}/flowgraph.py +3 -3
- siliconcompiler/report/dashboard/{components → web/components}/graph.py +7 -4
- siliconcompiler/report/dashboard/{layouts → web/layouts}/__init__.py +3 -3
- siliconcompiler/report/dashboard/{layouts → web/layouts}/_common.py +1 -1
- siliconcompiler/report/dashboard/{layouts → web/layouts}/vertical_flowgraph.py +5 -5
- siliconcompiler/report/dashboard/{layouts → web/layouts}/vertical_flowgraph_node_tab.py +6 -6
- siliconcompiler/report/dashboard/{layouts → web/layouts}/vertical_flowgraph_sac_tabs.py +6 -6
- siliconcompiler/report/dashboard/{state.py → web/state.py} +1 -1
- siliconcompiler/report/dashboard/{utils → web/utils}/__init__.py +4 -3
- siliconcompiler/report/dashboard/{viewer.py → web/viewer.py} +4 -4
- siliconcompiler/report/html_report.py +2 -3
- siliconcompiler/report/report.py +13 -7
- siliconcompiler/report/summary_image.py +1 -1
- siliconcompiler/report/summary_table.py +3 -3
- siliconcompiler/report/utils.py +11 -10
- siliconcompiler/scheduler/__init__.py +153 -286
- siliconcompiler/scheduler/run_node.py +2 -1
- siliconcompiler/scheduler/send_messages.py +4 -4
- siliconcompiler/scheduler/slurm.py +2 -2
- siliconcompiler/schema/__init__.py +19 -2
- siliconcompiler/schema/baseschema.py +493 -0
- siliconcompiler/schema/cmdlineschema.py +250 -0
- siliconcompiler/{sphinx_ext → schema/docs}/__init__.py +3 -1
- siliconcompiler/{sphinx_ext → schema/docs}/dynamicgen.py +63 -81
- siliconcompiler/{sphinx_ext → schema/docs}/schemagen.py +73 -85
- siliconcompiler/{sphinx_ext → schema/docs}/utils.py +12 -13
- siliconcompiler/schema/editableschema.py +136 -0
- siliconcompiler/schema/journalingschema.py +238 -0
- siliconcompiler/schema/namedschema.py +41 -0
- siliconcompiler/schema/packageschema.py +101 -0
- siliconcompiler/schema/parameter.py +791 -0
- siliconcompiler/schema/parametertype.py +323 -0
- siliconcompiler/schema/parametervalue.py +736 -0
- siliconcompiler/schema/safeschema.py +37 -0
- siliconcompiler/schema/schema_cfg.py +109 -1789
- siliconcompiler/schema/utils.py +5 -68
- siliconcompiler/schema_obj.py +119 -0
- siliconcompiler/tool.py +1308 -0
- siliconcompiler/tools/_common/__init__.py +8 -10
- siliconcompiler/tools/_common/sdc/sc_constraints.sdc +1 -1
- siliconcompiler/tools/bluespec/convert.py +7 -7
- siliconcompiler/tools/builtin/_common.py +1 -1
- siliconcompiler/tools/builtin/concatenate.py +2 -2
- siliconcompiler/tools/builtin/minimum.py +1 -1
- siliconcompiler/tools/builtin/mux.py +2 -1
- siliconcompiler/tools/builtin/nop.py +1 -1
- siliconcompiler/tools/builtin/verify.py +6 -4
- siliconcompiler/tools/chisel/convert.py +4 -4
- siliconcompiler/tools/genfasm/bitstream.py +3 -3
- siliconcompiler/tools/ghdl/convert.py +1 -1
- siliconcompiler/tools/icarus/compile.py +4 -4
- siliconcompiler/tools/icepack/bitstream.py +6 -1
- siliconcompiler/tools/klayout/convert_drc_db.py +5 -0
- siliconcompiler/tools/klayout/klayout_export.py +0 -1
- siliconcompiler/tools/klayout/klayout_utils.py +3 -10
- siliconcompiler/tools/nextpnr/apr.py +6 -1
- siliconcompiler/tools/nextpnr/nextpnr.py +4 -4
- siliconcompiler/tools/openroad/_apr.py +17 -0
- siliconcompiler/tools/openroad/fillmetal_insertion.py +14 -14
- siliconcompiler/tools/openroad/rdlroute.py +3 -3
- siliconcompiler/tools/openroad/scripts/apr/postamble.tcl +1 -1
- siliconcompiler/tools/openroad/scripts/apr/preamble.tcl +5 -5
- siliconcompiler/tools/openroad/scripts/apr/sc_antenna_repair.tcl +3 -3
- siliconcompiler/tools/openroad/scripts/apr/sc_clock_tree_synthesis.tcl +3 -3
- siliconcompiler/tools/openroad/scripts/apr/sc_detailed_placement.tcl +3 -3
- siliconcompiler/tools/openroad/scripts/apr/sc_detailed_route.tcl +3 -3
- siliconcompiler/tools/openroad/scripts/apr/sc_endcap_tapcell_insertion.tcl +3 -3
- siliconcompiler/tools/openroad/scripts/apr/sc_fillercell_insertion.tcl +3 -3
- siliconcompiler/tools/openroad/scripts/apr/sc_fillmetal_insertion.tcl +4 -4
- siliconcompiler/tools/openroad/scripts/apr/sc_global_placement.tcl +3 -3
- siliconcompiler/tools/openroad/scripts/apr/sc_global_route.tcl +3 -3
- siliconcompiler/tools/openroad/scripts/apr/sc_init_floorplan.tcl +3 -3
- siliconcompiler/tools/openroad/scripts/apr/sc_macro_placement.tcl +4 -4
- siliconcompiler/tools/openroad/scripts/apr/sc_metrics.tcl +3 -3
- siliconcompiler/tools/openroad/scripts/apr/sc_pin_placement.tcl +3 -3
- siliconcompiler/tools/openroad/scripts/apr/sc_power_grid.tcl +3 -3
- siliconcompiler/tools/openroad/scripts/apr/sc_repair_design.tcl +3 -3
- siliconcompiler/tools/openroad/scripts/apr/sc_repair_timing.tcl +4 -4
- siliconcompiler/tools/openroad/scripts/apr/sc_write_data.tcl +3 -3
- siliconcompiler/tools/openroad/scripts/common/procs.tcl +58 -2
- siliconcompiler/tools/openroad/scripts/common/reports.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/common/screenshot.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/common/write_images.tcl +28 -3
- siliconcompiler/tools/openroad/scripts/sc_rcx.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/sc_rdlroute.tcl +4 -4
- siliconcompiler/tools/openroad/scripts/sc_show.tcl +7 -7
- siliconcompiler/tools/opensta/__init__.py +1 -1
- siliconcompiler/tools/opensta/scripts/sc_check_library.tcl +1 -1
- siliconcompiler/tools/opensta/scripts/sc_procs.tcl +16 -0
- siliconcompiler/tools/opensta/scripts/sc_report_libraries.tcl +1 -1
- siliconcompiler/tools/opensta/scripts/sc_timing.tcl +35 -7
- siliconcompiler/tools/opensta/timing.py +6 -2
- siliconcompiler/tools/slang/__init__.py +10 -10
- siliconcompiler/tools/surelog/parse.py +4 -4
- siliconcompiler/tools/sv2v/convert.py +20 -3
- siliconcompiler/tools/verilator/compile.py +2 -2
- siliconcompiler/tools/verilator/verilator.py +3 -3
- siliconcompiler/tools/vpr/place.py +1 -1
- siliconcompiler/tools/vpr/route.py +4 -4
- siliconcompiler/tools/vpr/screenshot.py +1 -1
- siliconcompiler/tools/vpr/show.py +5 -5
- siliconcompiler/tools/vpr/vpr.py +24 -24
- siliconcompiler/tools/xdm/convert.py +2 -2
- siliconcompiler/tools/xyce/simulate.py +1 -1
- siliconcompiler/tools/yosys/sc_synth_asic.tcl +104 -90
- siliconcompiler/tools/yosys/syn_asic.py +13 -4
- siliconcompiler/toolscripts/_tools.json +12 -7
- siliconcompiler/toolscripts/rhel8/install-chisel.sh +2 -0
- siliconcompiler/toolscripts/rhel8/install-icarus.sh +1 -0
- siliconcompiler/toolscripts/rhel8/install-klayout.sh +2 -0
- siliconcompiler/toolscripts/rhel8/install-magic.sh +1 -2
- siliconcompiler/toolscripts/rhel8/install-netgen.sh +1 -1
- siliconcompiler/toolscripts/rhel8/install-slang.sh +2 -0
- siliconcompiler/toolscripts/rhel8/install-surelog.sh +3 -1
- siliconcompiler/toolscripts/rhel8/install-sv2v.sh +1 -0
- siliconcompiler/toolscripts/rhel8/install-verible.sh +2 -0
- siliconcompiler/toolscripts/rhel8/install-verilator.sh +1 -0
- siliconcompiler/toolscripts/rhel8/install-xyce.sh +2 -0
- siliconcompiler/toolscripts/rhel9/install-chisel.sh +2 -0
- siliconcompiler/toolscripts/rhel9/install-ghdl.sh +1 -0
- siliconcompiler/toolscripts/rhel9/install-gtkwave.sh +1 -0
- siliconcompiler/toolscripts/rhel9/install-icarus.sh +1 -0
- siliconcompiler/toolscripts/rhel9/install-klayout.sh +2 -0
- siliconcompiler/toolscripts/rhel9/install-magic.sh +1 -2
- siliconcompiler/toolscripts/rhel9/install-netgen.sh +1 -1
- siliconcompiler/toolscripts/rhel9/install-openroad.sh +2 -0
- siliconcompiler/toolscripts/rhel9/install-opensta.sh +76 -0
- siliconcompiler/toolscripts/rhel9/install-slang.sh +3 -1
- siliconcompiler/toolscripts/rhel9/install-surelog.sh +2 -1
- siliconcompiler/toolscripts/rhel9/install-sv2v.sh +1 -0
- siliconcompiler/toolscripts/rhel9/install-verible.sh +2 -0
- siliconcompiler/toolscripts/rhel9/install-verilator.sh +1 -0
- siliconcompiler/toolscripts/rhel9/install-vpr.sh +2 -0
- siliconcompiler/toolscripts/rhel9/install-xdm.sh +2 -0
- siliconcompiler/toolscripts/rhel9/install-xyce.sh +2 -0
- siliconcompiler/toolscripts/rhel9/install-yosys-moosic.sh +2 -0
- siliconcompiler/toolscripts/rhel9/install-yosys-parmys.sh +2 -0
- siliconcompiler/toolscripts/rhel9/install-yosys-slang.sh +3 -1
- siliconcompiler/toolscripts/rhel9/install-yosys.sh +2 -0
- siliconcompiler/toolscripts/ubuntu20/install-bambu.sh +2 -0
- siliconcompiler/toolscripts/ubuntu20/install-bluespec.sh +2 -0
- siliconcompiler/toolscripts/ubuntu20/install-chisel.sh +2 -0
- siliconcompiler/toolscripts/ubuntu20/install-ghdl.sh +2 -0
- siliconcompiler/toolscripts/ubuntu20/install-gtkwave.sh +2 -0
- siliconcompiler/toolscripts/ubuntu20/install-icarus.sh +2 -0
- siliconcompiler/toolscripts/ubuntu20/install-icepack.sh +2 -0
- siliconcompiler/toolscripts/ubuntu20/install-klayout.sh +2 -0
- siliconcompiler/toolscripts/ubuntu20/install-magic.sh +2 -0
- siliconcompiler/toolscripts/ubuntu20/install-netgen.sh +2 -0
- siliconcompiler/toolscripts/ubuntu20/install-nextpnr.sh +1 -3
- siliconcompiler/toolscripts/ubuntu20/install-openroad.sh +2 -0
- siliconcompiler/toolscripts/ubuntu20/install-opensta.sh +72 -0
- siliconcompiler/toolscripts/ubuntu20/install-slang.sh +3 -1
- siliconcompiler/toolscripts/ubuntu20/install-slurm.sh +2 -0
- siliconcompiler/toolscripts/ubuntu20/install-surelog.sh +3 -1
- siliconcompiler/toolscripts/ubuntu20/install-sv2v.sh +1 -1
- siliconcompiler/toolscripts/ubuntu20/install-verible.sh +2 -0
- siliconcompiler/toolscripts/ubuntu20/install-verilator.sh +2 -0
- siliconcompiler/toolscripts/ubuntu20/install-xdm.sh +2 -0
- siliconcompiler/toolscripts/ubuntu20/install-xyce.sh +2 -0
- siliconcompiler/toolscripts/ubuntu20/install-yosys-moosic.sh +2 -0
- siliconcompiler/toolscripts/ubuntu20/install-yosys.sh +2 -0
- siliconcompiler/toolscripts/ubuntu22/install-bambu.sh +2 -0
- siliconcompiler/toolscripts/ubuntu22/install-bluespec.sh +2 -0
- siliconcompiler/toolscripts/ubuntu22/install-chisel.sh +2 -0
- siliconcompiler/toolscripts/ubuntu22/install-ghdl.sh +2 -0
- siliconcompiler/toolscripts/ubuntu22/install-gtkwave.sh +2 -0
- siliconcompiler/toolscripts/ubuntu22/install-icarus.sh +2 -0
- siliconcompiler/toolscripts/ubuntu22/install-icepack.sh +2 -0
- siliconcompiler/toolscripts/ubuntu22/install-klayout.sh +2 -0
- siliconcompiler/toolscripts/ubuntu22/install-magic.sh +2 -0
- siliconcompiler/toolscripts/ubuntu22/install-netgen.sh +2 -0
- siliconcompiler/toolscripts/ubuntu22/install-nextpnr.sh +1 -2
- siliconcompiler/toolscripts/ubuntu22/install-openroad.sh +2 -0
- siliconcompiler/toolscripts/ubuntu22/install-opensta.sh +72 -0
- siliconcompiler/toolscripts/ubuntu22/install-slang.sh +3 -1
- siliconcompiler/toolscripts/ubuntu22/install-slurm.sh +2 -0
- siliconcompiler/toolscripts/ubuntu22/install-surelog.sh +3 -1
- siliconcompiler/toolscripts/ubuntu22/install-sv2v.sh +1 -1
- siliconcompiler/toolscripts/ubuntu22/install-verible.sh +2 -0
- siliconcompiler/toolscripts/ubuntu22/install-verilator.sh +2 -0
- siliconcompiler/toolscripts/ubuntu22/install-vpr.sh +2 -2
- siliconcompiler/toolscripts/ubuntu22/install-xdm.sh +2 -0
- siliconcompiler/toolscripts/ubuntu22/install-xyce.sh +2 -0
- siliconcompiler/toolscripts/ubuntu22/install-yosys-moosic.sh +2 -0
- siliconcompiler/toolscripts/ubuntu22/install-yosys-parmys.sh +2 -0
- siliconcompiler/toolscripts/ubuntu22/install-yosys-slang.sh +3 -1
- siliconcompiler/toolscripts/ubuntu22/install-yosys.sh +2 -0
- siliconcompiler/toolscripts/ubuntu24/install-bambu.sh +2 -0
- siliconcompiler/toolscripts/ubuntu24/install-bluespec.sh +2 -0
- siliconcompiler/toolscripts/ubuntu24/install-chisel.sh +2 -0
- siliconcompiler/toolscripts/ubuntu24/install-ghdl.sh +2 -0
- siliconcompiler/toolscripts/ubuntu24/install-gtkwave.sh +2 -0
- siliconcompiler/toolscripts/ubuntu24/install-icarus.sh +2 -0
- siliconcompiler/toolscripts/ubuntu24/install-icepack.sh +2 -0
- siliconcompiler/toolscripts/ubuntu24/install-klayout.sh +2 -0
- siliconcompiler/toolscripts/ubuntu24/install-magic.sh +2 -0
- siliconcompiler/toolscripts/ubuntu24/install-netgen.sh +2 -0
- siliconcompiler/toolscripts/ubuntu24/install-nextpnr.sh +1 -3
- siliconcompiler/toolscripts/ubuntu24/install-openroad.sh +2 -0
- siliconcompiler/toolscripts/ubuntu24/install-opensta.sh +72 -0
- siliconcompiler/toolscripts/ubuntu24/install-slang.sh +3 -1
- siliconcompiler/toolscripts/ubuntu24/install-slurm.sh +2 -0
- siliconcompiler/toolscripts/ubuntu24/install-surelog.sh +3 -1
- siliconcompiler/toolscripts/ubuntu24/install-sv2v.sh +1 -1
- siliconcompiler/toolscripts/ubuntu24/install-verible.sh +2 -0
- siliconcompiler/toolscripts/ubuntu24/install-verilator.sh +2 -0
- siliconcompiler/toolscripts/ubuntu24/install-vpr.sh +2 -2
- siliconcompiler/toolscripts/ubuntu24/install-xdm.sh +2 -0
- siliconcompiler/toolscripts/ubuntu24/install-xyce.sh +2 -0
- siliconcompiler/toolscripts/ubuntu24/install-yosys-moosic.sh +2 -0
- siliconcompiler/toolscripts/ubuntu24/install-yosys-parmys.sh +2 -0
- siliconcompiler/toolscripts/ubuntu24/install-yosys-slang.sh +3 -1
- siliconcompiler/toolscripts/ubuntu24/install-yosys.sh +2 -0
- siliconcompiler/utils/__init__.py +8 -112
- siliconcompiler/utils/flowgraph.py +339 -0
- siliconcompiler/{issue.py → utils/issue.py} +7 -4
- siliconcompiler/utils/logging.py +86 -33
- {siliconcompiler-0.32.2.dist-info → siliconcompiler-0.33.0.dist-info}/METADATA +10 -8
- siliconcompiler-0.33.0.dist-info/RECORD +487 -0
- {siliconcompiler-0.32.2.dist-info → siliconcompiler-0.33.0.dist-info}/WHEEL +1 -1
- {siliconcompiler-0.32.2.dist-info → siliconcompiler-0.33.0.dist-info}/entry_points.txt +8 -8
- siliconcompiler/schema/schema_obj.py +0 -1936
- siliconcompiler/toolscripts/ubuntu20/install-vpr.sh +0 -27
- siliconcompiler/toolscripts/ubuntu20/install-yosys-parmys.sh +0 -59
- siliconcompiler-0.32.2.dist-info/RECORD +0 -464
- /siliconcompiler/{templates → data/templates}/__init__.py +0 -0
- /siliconcompiler/{templates → data/templates}/email/__init__.py +0 -0
- /siliconcompiler/{templates → data/templates}/email/general.j2 +0 -0
- /siliconcompiler/{templates → data/templates}/email/summary.j2 +0 -0
- /siliconcompiler/{templates → data/templates}/issue/README.txt +0 -0
- /siliconcompiler/{templates → data/templates}/issue/__init__.py +0 -0
- /siliconcompiler/{templates → data/templates}/issue/run.sh +0 -0
- /siliconcompiler/{templates → data/templates}/replay/replay.py.j2 +0 -0
- /siliconcompiler/{templates → data/templates}/replay/requirements.txt +0 -0
- /siliconcompiler/{templates → data/templates}/replay/setup.sh +0 -0
- /siliconcompiler/{templates → data/templates}/report/__init__.py +0 -0
- /siliconcompiler/{templates → data/templates}/report/bootstrap.min.css +0 -0
- /siliconcompiler/{templates → data/templates}/report/bootstrap.min.js +0 -0
- /siliconcompiler/{templates → data/templates}/report/bootstrap_LICENSE.md +0 -0
- /siliconcompiler/{templates → data/templates}/report/sc_report.j2 +0 -0
- /siliconcompiler/{templates → data/templates}/slurm/__init__.py +0 -0
- /siliconcompiler/{templates → data/templates}/slurm/run.sh +0 -0
- /siliconcompiler/{templates → data/templates}/tcl/__init__.py +0 -0
- /siliconcompiler/{templates → data/templates}/tcl/manifest.tcl.j2 +0 -0
- /siliconcompiler/report/dashboard/{utils → web/utils}/file_utils.py +0 -0
- /siliconcompiler/{units.py → utils/units.py} +0 -0
- {siliconcompiler-0.32.2.dist-info → siliconcompiler-0.33.0.dist-info}/licenses/LICENSE +0 -0
- {siliconcompiler-0.32.2.dist-info → siliconcompiler-0.33.0.dist-info}/top_level.txt +0 -0
siliconcompiler/core.py
CHANGED
|
@@ -11,31 +11,39 @@ import logging
|
|
|
11
11
|
import hashlib
|
|
12
12
|
import shutil
|
|
13
13
|
import importlib
|
|
14
|
-
import inspect
|
|
15
14
|
import textwrap
|
|
16
15
|
import graphviz
|
|
17
16
|
import codecs
|
|
18
|
-
import
|
|
17
|
+
import csv
|
|
18
|
+
import yaml
|
|
19
19
|
from inspect import getfullargspec
|
|
20
|
-
from siliconcompiler
|
|
21
|
-
from siliconcompiler.schema import
|
|
20
|
+
from siliconcompiler import Schema
|
|
21
|
+
from siliconcompiler.schema import SCHEMA_VERSION, PerNode, JournalingSchema, EditableSchema
|
|
22
|
+
from siliconcompiler.schema.parametertype import NodeType
|
|
23
|
+
from siliconcompiler.schema.parametervalue import FileNodeValue, PathNodeValue
|
|
22
24
|
from siliconcompiler.schema import utils as schema_utils
|
|
23
25
|
from siliconcompiler import utils
|
|
24
|
-
from siliconcompiler.utils.logging import
|
|
26
|
+
from siliconcompiler.utils.logging import SCColorLoggerFormatter, \
|
|
27
|
+
SCLoggerFormatter, SCInRunLoggerFormatter, \
|
|
28
|
+
SCDebugLoggerFormatter, SCDebugInRunLoggerFormatter, \
|
|
29
|
+
SCBlankLoggerFormatter
|
|
25
30
|
from siliconcompiler import _metadata
|
|
26
31
|
from siliconcompiler import NodeStatus, SiliconCompilerError
|
|
27
32
|
from siliconcompiler.report import _show_summary_table
|
|
28
33
|
from siliconcompiler.report import _generate_summary_image, _open_summary_image
|
|
29
|
-
from siliconcompiler.report import
|
|
34
|
+
from siliconcompiler.report.dashboard.web import WebDashboard
|
|
35
|
+
from siliconcompiler.report.dashboard.cli import CliDashboard
|
|
36
|
+
from siliconcompiler.report.dashboard import DashboardType
|
|
30
37
|
from siliconcompiler import package as sc_package
|
|
31
38
|
import glob
|
|
32
39
|
from siliconcompiler.scheduler import run as sc_runner
|
|
33
|
-
from siliconcompiler.flowgraph import
|
|
34
|
-
_get_pruned_node_inputs,
|
|
40
|
+
from siliconcompiler.utils.flowgraph import nodes_to_execute, \
|
|
41
|
+
_get_pruned_node_inputs, \
|
|
35
42
|
_get_flowgraph_execution_order, _check_flowgraph_io, \
|
|
36
43
|
_get_flowgraph_information
|
|
37
44
|
from siliconcompiler.tools._common import get_tool_task
|
|
38
45
|
from types import FunctionType, ModuleType
|
|
46
|
+
from siliconcompiler.flowgraph import RuntimeFlowgraph
|
|
39
47
|
|
|
40
48
|
|
|
41
49
|
class Chip:
|
|
@@ -238,62 +246,39 @@ class Chip:
|
|
|
238
246
|
self.logger._console = stream_handler
|
|
239
247
|
self.logger.addHandler(stream_handler)
|
|
240
248
|
|
|
241
|
-
self.logger._support_color =
|
|
249
|
+
self.logger._support_color = SCColorLoggerFormatter.supports_color(stream_handler)
|
|
242
250
|
|
|
243
251
|
self._init_logger_formats(loglevel=loglevel)
|
|
244
252
|
|
|
245
253
|
def _init_logger_formats(self, loglevel=None):
|
|
246
254
|
if not loglevel:
|
|
247
|
-
self.schema.get('option', 'loglevel',
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
max_index_len = min(max_index_len, max_column_width)
|
|
269
|
-
|
|
270
|
-
jobname = self.get('option', 'jobname')
|
|
271
|
-
|
|
272
|
-
step = self.logger._in_step
|
|
273
|
-
index = self.logger._in_index
|
|
274
|
-
|
|
275
|
-
if step is None:
|
|
276
|
-
step = '-' * max(max_step_len // 4, 1)
|
|
277
|
-
if index is None:
|
|
278
|
-
index = '-' * max(max_index_len // 4, 1)
|
|
279
|
-
|
|
280
|
-
log_format.append(utils.truncate_text(jobname, max_column_width))
|
|
281
|
-
log_format.append(f'{utils.truncate_text(step, max_step_len): <{max_step_len}}')
|
|
282
|
-
log_format.append(f'{utils.truncate_text(index, max_step_len): >{max_index_len}}')
|
|
283
|
-
|
|
284
|
-
log_formatprefix = "| "
|
|
285
|
-
if loglevel == "quiet":
|
|
286
|
-
log_format = []
|
|
287
|
-
log_formatprefix = ""
|
|
288
|
-
|
|
289
|
-
log_format.append('%(message)s')
|
|
290
|
-
stream_logformat = log_formatprefix + ' | '.join(log_format[1:])
|
|
255
|
+
loglevel = self.schema.get('option', 'loglevel',
|
|
256
|
+
step=self.logger._in_step, index=self.logger._in_index)
|
|
257
|
+
|
|
258
|
+
if loglevel == 'quiet':
|
|
259
|
+
base_format = SCBlankLoggerFormatter()
|
|
260
|
+
elif self.logger._in_run:
|
|
261
|
+
if loglevel == 'debug':
|
|
262
|
+
base_format = SCDebugInRunLoggerFormatter(
|
|
263
|
+
self,
|
|
264
|
+
self.get('option', 'jobname'),
|
|
265
|
+
self.logger._in_step, self.logger._in_index)
|
|
266
|
+
else:
|
|
267
|
+
base_format = SCInRunLoggerFormatter(
|
|
268
|
+
self,
|
|
269
|
+
self.get('option', 'jobname'),
|
|
270
|
+
self.logger._in_step, self.logger._in_index)
|
|
271
|
+
else:
|
|
272
|
+
if loglevel == 'debug':
|
|
273
|
+
base_format = SCDebugLoggerFormatter()
|
|
274
|
+
else:
|
|
275
|
+
base_format = SCLoggerFormatter()
|
|
291
276
|
|
|
292
277
|
for handler in self.logger.handlers.copy():
|
|
293
278
|
if handler == self.logger._console and self.logger._support_color:
|
|
294
|
-
formatter =
|
|
279
|
+
formatter = SCColorLoggerFormatter(base_format)
|
|
295
280
|
else:
|
|
296
|
-
formatter =
|
|
281
|
+
formatter = base_format
|
|
297
282
|
handler.setFormatter(formatter)
|
|
298
283
|
|
|
299
284
|
###########################################################################
|
|
@@ -407,7 +392,7 @@ class Chip:
|
|
|
407
392
|
|
|
408
393
|
is_list = '[' in paramtype
|
|
409
394
|
|
|
410
|
-
for vals, step, index in self.schema.
|
|
395
|
+
for vals, step, index in self.schema.get(*key, field=None).getvalues():
|
|
411
396
|
if not vals:
|
|
412
397
|
continue
|
|
413
398
|
if not self.get(*key, field='pernode').is_never():
|
|
@@ -416,9 +401,10 @@ class Chip:
|
|
|
416
401
|
if index is None:
|
|
417
402
|
index = Schema.GLOBAL_KEY
|
|
418
403
|
|
|
404
|
+
packages = self.get(*key, field='package', step=step, index=index)
|
|
419
405
|
if not is_list:
|
|
420
406
|
vals = [vals]
|
|
421
|
-
|
|
407
|
+
packages = [packages]
|
|
422
408
|
if len(packages) == len(vals):
|
|
423
409
|
continue
|
|
424
410
|
|
|
@@ -495,7 +481,6 @@ class Chip:
|
|
|
495
481
|
progname=progname,
|
|
496
482
|
description=description,
|
|
497
483
|
switchlist=switchlist,
|
|
498
|
-
input_map=input_map,
|
|
499
484
|
additional_args=additional_args,
|
|
500
485
|
version=_metadata.version,
|
|
501
486
|
print_banner=print_banner,
|
|
@@ -678,11 +663,10 @@ class Chip:
|
|
|
678
663
|
|
|
679
664
|
elif isinstance(use_module, (Library, Chip)):
|
|
680
665
|
self._loaded_modules['libs'].append(use_module.design)
|
|
681
|
-
cfg = use_module.schema.cfg
|
|
682
666
|
keep_inputs = True
|
|
683
667
|
if not isinstance(use_module, Library):
|
|
684
668
|
keep_inputs = False
|
|
685
|
-
self.__import_library(use_module.design,
|
|
669
|
+
self.__import_library(use_module.design, use_module,
|
|
686
670
|
keep_input=keep_inputs)
|
|
687
671
|
|
|
688
672
|
is_auto_enable = getattr(use_module, 'is_auto_enable', None)
|
|
@@ -696,26 +680,13 @@ class Chip:
|
|
|
696
680
|
raise ValueError(f"{module_name} returned an object with an "
|
|
697
681
|
f"unsupported type: {class_name}")
|
|
698
682
|
|
|
699
|
-
def __import_data_sources(self,
|
|
700
|
-
if 'package'
|
|
683
|
+
def __import_data_sources(self, schema):
|
|
684
|
+
if not schema.valid('package', 'source'):
|
|
701
685
|
return
|
|
702
686
|
|
|
703
|
-
for source
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
if 'path' not in config or \
|
|
708
|
-
Schema.GLOBAL_KEY not in config['path']['node'] or \
|
|
709
|
-
Schema.GLOBAL_KEY not in config['path']['node'][Schema.GLOBAL_KEY]:
|
|
710
|
-
continue
|
|
711
|
-
|
|
712
|
-
path = config['path']['node'][Schema.GLOBAL_KEY][Schema.GLOBAL_KEY]['value']
|
|
713
|
-
|
|
714
|
-
ref = None
|
|
715
|
-
if 'ref' in config and \
|
|
716
|
-
Schema.GLOBAL_KEY in config['ref']['node'] and \
|
|
717
|
-
Schema.GLOBAL_KEY in config['ref']['node'][Schema.GLOBAL_KEY]:
|
|
718
|
-
ref = config['ref']['node'][Schema.GLOBAL_KEY][Schema.GLOBAL_KEY]['value']
|
|
687
|
+
for source in schema.getkeys('package', 'source'):
|
|
688
|
+
path = schema.get('package', 'source', source, 'path')
|
|
689
|
+
ref = schema.get('package', 'source', source, 'ref')
|
|
719
690
|
|
|
720
691
|
self.register_source(
|
|
721
692
|
name=source,
|
|
@@ -733,15 +704,24 @@ class Chip:
|
|
|
733
704
|
|
|
734
705
|
importname = module.design
|
|
735
706
|
|
|
736
|
-
|
|
707
|
+
if hasattr(module, 'schema'):
|
|
708
|
+
module = module.schema
|
|
737
709
|
|
|
738
|
-
if importname
|
|
710
|
+
if self.valid(group, importname):
|
|
739
711
|
self.logger.warning(f'Overwriting existing {group} {importname}')
|
|
740
|
-
del src_cfg[importname]
|
|
741
712
|
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
713
|
+
try:
|
|
714
|
+
insert_schema = EditableSchema(module).search(group, importname)
|
|
715
|
+
except KeyError:
|
|
716
|
+
self.logger.warning(f'{group} {importname} is not valid')
|
|
717
|
+
return
|
|
718
|
+
|
|
719
|
+
EditableSchema(self.schema).insert(
|
|
720
|
+
group,
|
|
721
|
+
importname,
|
|
722
|
+
insert_schema,
|
|
723
|
+
clobber=True)
|
|
724
|
+
self.__import_data_sources(module)
|
|
745
725
|
|
|
746
726
|
###########################################################################
|
|
747
727
|
def help(self, *keypath):
|
|
@@ -764,13 +744,15 @@ class Chip:
|
|
|
764
744
|
|
|
765
745
|
# Fetch Values
|
|
766
746
|
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
747
|
+
param = self.get(*keypath, field=None)
|
|
748
|
+
|
|
749
|
+
description = param.get(field='shorthelp')
|
|
750
|
+
typestr = param.get(field='type')
|
|
751
|
+
switchstr = str(param.get(field='switch'))
|
|
752
|
+
defstr = str(param.default.get())
|
|
753
|
+
requirement = str(param.get(field='require'))
|
|
754
|
+
helpstr = param.get(field='help')
|
|
755
|
+
example = param.get(field='example')
|
|
774
756
|
|
|
775
757
|
examplestr = ("\nExamples: " + example[0] + ''.join(
|
|
776
758
|
["\n " + ex for ex in example[1:]]))
|
|
@@ -828,9 +810,12 @@ class Chip:
|
|
|
828
810
|
>>> check = chip.valid('metric', 'foo', '0', 'tasktime', default_valid=True)
|
|
829
811
|
Returns True, even if "foo" and "0" aren't in current configuration.
|
|
830
812
|
"""
|
|
813
|
+
if job:
|
|
814
|
+
return self.schema.history(job).valid(*keypath,
|
|
815
|
+
default_valid=default_valid,
|
|
816
|
+
check_complete=check_complete)
|
|
831
817
|
return self.schema.valid(*keypath,
|
|
832
818
|
default_valid=default_valid,
|
|
833
|
-
job=job,
|
|
834
819
|
check_complete=check_complete)
|
|
835
820
|
|
|
836
821
|
###########################################################################
|
|
@@ -870,7 +855,7 @@ class Chip:
|
|
|
870
855
|
strict = self.schema.get('option', 'strict')
|
|
871
856
|
if field == 'value' and strict:
|
|
872
857
|
pernode = self.schema.get(*keypath, field='pernode')
|
|
873
|
-
if pernode ==
|
|
858
|
+
if pernode == PerNode.OPTIONAL and \
|
|
874
859
|
(step is None or index is None) and \
|
|
875
860
|
(Schema.GLOBAL_KEY not in (step, index)): # allow explicit access to global
|
|
876
861
|
self.error(
|
|
@@ -880,7 +865,10 @@ class Chip:
|
|
|
880
865
|
)
|
|
881
866
|
return None
|
|
882
867
|
|
|
883
|
-
|
|
868
|
+
if job:
|
|
869
|
+
return self.schema.history(job).get(*keypath, field=field, step=step, index=index)
|
|
870
|
+
|
|
871
|
+
return self.schema.get(*keypath, field=field, step=step, index=index)
|
|
884
872
|
except (ValueError, TypeError) as e:
|
|
885
873
|
self.error(str(e))
|
|
886
874
|
return None
|
|
@@ -913,7 +901,10 @@ class Chip:
|
|
|
913
901
|
self.logger.debug('Getting all schema parameter keys.')
|
|
914
902
|
|
|
915
903
|
try:
|
|
916
|
-
|
|
904
|
+
if job:
|
|
905
|
+
return self.schema.history(job).getkeys(*keypath)
|
|
906
|
+
|
|
907
|
+
return self.schema.getkeys(*keypath)
|
|
917
908
|
except (ValueError, TypeError) as e:
|
|
918
909
|
self.error(str(e))
|
|
919
910
|
return None
|
|
@@ -956,23 +947,17 @@ class Chip:
|
|
|
956
947
|
return None
|
|
957
948
|
|
|
958
949
|
###########################################################################
|
|
959
|
-
def __add_set_package(self,
|
|
960
|
-
|
|
961
|
-
|
|
962
|
-
|
|
963
|
-
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
|
|
967
|
-
|
|
968
|
-
|
|
969
|
-
|
|
970
|
-
if add:
|
|
971
|
-
self.schema.add(*keypath, package, field='package',
|
|
972
|
-
step=step, index=index)
|
|
973
|
-
else:
|
|
974
|
-
self.schema.set(*keypath, package, field='package',
|
|
975
|
-
step=step, index=index, clobber=clobber)
|
|
950
|
+
def __add_set_package(self, value_success, package):
|
|
951
|
+
if not isinstance(value_success, (list, tuple)):
|
|
952
|
+
value_success = [value_success]
|
|
953
|
+
if not isinstance(package, (list, tuple)):
|
|
954
|
+
package = [package]
|
|
955
|
+
if len(value_success) != len(package):
|
|
956
|
+
package = len(value_success) * package
|
|
957
|
+
|
|
958
|
+
for val, package in zip(value_success, package):
|
|
959
|
+
if val.type in ('file', 'dir'):
|
|
960
|
+
val.set(package, field='package')
|
|
976
961
|
|
|
977
962
|
###########################################################################
|
|
978
963
|
def set(self, *args, field='value', clobber=True, step=None, index=None, package=None):
|
|
@@ -1018,8 +1003,8 @@ class Chip:
|
|
|
1018
1003
|
try:
|
|
1019
1004
|
value_success = self.schema.set(*keypath, value, field=field, clobber=clobber,
|
|
1020
1005
|
step=step, index=index)
|
|
1021
|
-
if field == 'value' and value_success:
|
|
1022
|
-
self.__add_set_package(
|
|
1006
|
+
if field == 'value' and value_success and package:
|
|
1007
|
+
self.__add_set_package(value_success, package)
|
|
1023
1008
|
|
|
1024
1009
|
except (ValueError, TypeError) as e:
|
|
1025
1010
|
self.error(e)
|
|
@@ -1105,8 +1090,8 @@ class Chip:
|
|
|
1105
1090
|
try:
|
|
1106
1091
|
value_success = self.schema.add(*args, field=field, step=step, index=index)
|
|
1107
1092
|
|
|
1108
|
-
if field == 'value' and value_success:
|
|
1109
|
-
self.__add_set_package(
|
|
1093
|
+
if field == 'value' and value_success and package:
|
|
1094
|
+
self.__add_set_package(value_success, package)
|
|
1110
1095
|
except (ValueError, TypeError) as e:
|
|
1111
1096
|
self.error(str(e))
|
|
1112
1097
|
|
|
@@ -1128,8 +1113,10 @@ class Chip:
|
|
|
1128
1113
|
package_name = f'flist-{os.path.basename(filename)}'
|
|
1129
1114
|
package_dir = os.path.dirname(os.path.abspath(filename))
|
|
1130
1115
|
|
|
1116
|
+
env_vars = utils.get_env_vars(self, None, None)
|
|
1117
|
+
|
|
1131
1118
|
def __make_path(rel, path):
|
|
1132
|
-
path =
|
|
1119
|
+
path = PathNodeValue.resolve_env_vars(path, envvars=env_vars)
|
|
1133
1120
|
if os.path.isabs(path):
|
|
1134
1121
|
if path.startswith(rel):
|
|
1135
1122
|
return os.path.relpath(path, rel), package_name
|
|
@@ -1300,7 +1287,7 @@ class Chip:
|
|
|
1300
1287
|
"""
|
|
1301
1288
|
strict = self.get('option', 'strict')
|
|
1302
1289
|
pernode = self.get(*keypath, field='pernode')
|
|
1303
|
-
if strict and pernode ==
|
|
1290
|
+
if strict and pernode == PerNode.OPTIONAL and (step is None or index is None):
|
|
1304
1291
|
self.error(
|
|
1305
1292
|
f"Invalid args to find_files() of keypath {keypath}: step and "
|
|
1306
1293
|
"index are required for reading from this parameter while "
|
|
@@ -1332,7 +1319,9 @@ class Chip:
|
|
|
1332
1319
|
"""Internal find_files() that allows you to skip step/index for optional
|
|
1333
1320
|
params, regardless of [option, strict]."""
|
|
1334
1321
|
|
|
1335
|
-
|
|
1322
|
+
param = self.get(*keypath, field=None, job=job)
|
|
1323
|
+
|
|
1324
|
+
paramtype = param.get(field='type')
|
|
1336
1325
|
|
|
1337
1326
|
if 'file' not in paramtype and 'dir' not in paramtype:
|
|
1338
1327
|
self.error('Can only call find_files on file or dir types')
|
|
@@ -1340,15 +1329,15 @@ class Chip:
|
|
|
1340
1329
|
|
|
1341
1330
|
is_list = bool(re.match(r'\[', paramtype))
|
|
1342
1331
|
|
|
1343
|
-
paths =
|
|
1344
|
-
dependencies =
|
|
1345
|
-
|
|
1332
|
+
paths = param.get(step=step, index=index)
|
|
1333
|
+
dependencies = param.get(field='package', step=step, index=index)
|
|
1334
|
+
|
|
1346
1335
|
# Convert to list if we have scalar
|
|
1347
1336
|
if not is_list:
|
|
1348
1337
|
# Dependencies are always specified as list with default []
|
|
1349
1338
|
# If paths is a scalar we convert the default [] to [None]
|
|
1350
1339
|
# to have a matching list with one element
|
|
1351
|
-
if dependencies
|
|
1340
|
+
if not dependencies:
|
|
1352
1341
|
dependencies = [None]
|
|
1353
1342
|
paths = [paths]
|
|
1354
1343
|
|
|
@@ -1358,7 +1347,6 @@ class Chip:
|
|
|
1358
1347
|
dependencies = [dependencies[list_index]]
|
|
1359
1348
|
|
|
1360
1349
|
paths = self.__convert_paths_to_posix(paths)
|
|
1361
|
-
dependencies = self.__convert_paths_to_posix(dependencies)
|
|
1362
1350
|
|
|
1363
1351
|
result = []
|
|
1364
1352
|
|
|
@@ -1392,29 +1380,32 @@ class Chip:
|
|
|
1392
1380
|
|
|
1393
1381
|
if search_paths:
|
|
1394
1382
|
search_paths = self.__convert_paths_to_posix(search_paths)
|
|
1383
|
+
else:
|
|
1384
|
+
search_paths = [self.cwd]
|
|
1395
1385
|
|
|
1386
|
+
env_vars = utils.get_env_vars(self, step, index)
|
|
1396
1387
|
for (dependency, path) in zip(dependencies, paths):
|
|
1397
|
-
|
|
1398
|
-
|
|
1399
|
-
|
|
1400
|
-
|
|
1401
|
-
|
|
1402
|
-
|
|
1403
|
-
depdendency_path = os.path.abspath(
|
|
1404
|
-
os.path.join(sc_package.path(self, dependency), path))
|
|
1405
|
-
if os.path.exists(depdendency_path):
|
|
1406
|
-
result.append(depdendency_path)
|
|
1388
|
+
faux_param = FileNodeValue()
|
|
1389
|
+
faux_param.set(path)
|
|
1390
|
+
try:
|
|
1391
|
+
if dependency:
|
|
1392
|
+
faux_param.set(dependency, field='package')
|
|
1393
|
+
faux_search = [os.path.abspath(os.path.join(sc_package.path(self, dependency)))]
|
|
1407
1394
|
else:
|
|
1408
|
-
|
|
1409
|
-
|
|
1410
|
-
|
|
1411
|
-
|
|
1412
|
-
|
|
1413
|
-
|
|
1414
|
-
|
|
1415
|
-
|
|
1416
|
-
|
|
1417
|
-
|
|
1395
|
+
faux_search = search_paths
|
|
1396
|
+
resolved = faux_param.resolve_path(
|
|
1397
|
+
envvars=env_vars,
|
|
1398
|
+
search=faux_search,
|
|
1399
|
+
collection_dir=collection_dir)
|
|
1400
|
+
except FileNotFoundError:
|
|
1401
|
+
resolved = None
|
|
1402
|
+
if not missing_ok:
|
|
1403
|
+
if dependency:
|
|
1404
|
+
self.error(f'Could not find {path} in {dependency}. [{",".join(keypath)}]')
|
|
1405
|
+
else:
|
|
1406
|
+
self.error(f'Could not find {path}. [{",".join(keypath)}]')
|
|
1407
|
+
|
|
1408
|
+
result.append(resolved)
|
|
1418
1409
|
|
|
1419
1410
|
if self._relative_path and not abs_path_only:
|
|
1420
1411
|
rel_result = []
|
|
@@ -1427,6 +1418,8 @@ class Chip:
|
|
|
1427
1418
|
|
|
1428
1419
|
# Convert back to scalar if that was original type
|
|
1429
1420
|
if not is_list:
|
|
1421
|
+
if not result:
|
|
1422
|
+
return None
|
|
1430
1423
|
return result[0]
|
|
1431
1424
|
|
|
1432
1425
|
return result
|
|
@@ -1439,33 +1432,20 @@ class Chip:
|
|
|
1439
1432
|
|
|
1440
1433
|
Returns none if not found
|
|
1441
1434
|
"""
|
|
1442
|
-
if not
|
|
1435
|
+
if not collected_dir:
|
|
1443
1436
|
return None
|
|
1444
1437
|
|
|
1445
|
-
|
|
1446
|
-
|
|
1447
|
-
|
|
1448
|
-
|
|
1449
|
-
path_paths = pathlib.PurePosixPath(path).parts
|
|
1450
|
-
for n in range(len(path_paths)):
|
|
1451
|
-
# Search through the path elements to see if any of the previous path parts
|
|
1452
|
-
# have been imported
|
|
1438
|
+
faux_param = FileNodeValue()
|
|
1439
|
+
faux_param.set(path)
|
|
1440
|
+
faux_param.set(package, field='package')
|
|
1453
1441
|
|
|
1454
|
-
|
|
1455
|
-
|
|
1456
|
-
|
|
1457
|
-
|
|
1458
|
-
import_name = utils.get_hashed_filename(basename, package=package)
|
|
1459
|
-
if import_name not in collected_files:
|
|
1460
|
-
continue
|
|
1461
|
-
|
|
1462
|
-
abspath = os.path.join(collected_dir, import_name)
|
|
1463
|
-
if endname:
|
|
1464
|
-
abspath = os.path.join(abspath, endname)
|
|
1465
|
-
abspath = os.path.abspath(abspath)
|
|
1466
|
-
if os.path.exists(abspath):
|
|
1467
|
-
return abspath
|
|
1442
|
+
try:
|
|
1443
|
+
resolved = faux_param.resolve_path(collection_dir=collected_dir)
|
|
1444
|
+
except FileNotFoundError:
|
|
1445
|
+
return None
|
|
1468
1446
|
|
|
1447
|
+
if resolved.startswith(collected_dir):
|
|
1448
|
+
return resolved
|
|
1469
1449
|
return None
|
|
1470
1450
|
|
|
1471
1451
|
def find_node_file(self, path, step, jobname=None, index='0'):
|
|
@@ -1549,7 +1529,7 @@ class Chip:
|
|
|
1549
1529
|
# only do something if type is file or dir
|
|
1550
1530
|
continue
|
|
1551
1531
|
|
|
1552
|
-
values = self.schema.
|
|
1532
|
+
values = self.schema.get(*keypath, field=None).getvalues()
|
|
1553
1533
|
for value, step, index in values:
|
|
1554
1534
|
if not value:
|
|
1555
1535
|
continue
|
|
@@ -1584,7 +1564,7 @@ class Chip:
|
|
|
1584
1564
|
# exist
|
|
1585
1565
|
continue
|
|
1586
1566
|
|
|
1587
|
-
for check_files, step, index in self.schema.
|
|
1567
|
+
for check_files, step, index in self.schema.get(*keypath, field=None).getvalues():
|
|
1588
1568
|
if not check_files:
|
|
1589
1569
|
continue
|
|
1590
1570
|
|
|
@@ -1671,7 +1651,7 @@ class Chip:
|
|
|
1671
1651
|
lib_node_check.append((step, None))
|
|
1672
1652
|
lib_node_check.extend(nodes)
|
|
1673
1653
|
for lib_key in libs_to_check:
|
|
1674
|
-
for val, step, index in self.schema.
|
|
1654
|
+
for val, step, index in self.schema.get(*lib_key, field=None).getvalues():
|
|
1675
1655
|
if (step, index) in lib_node_check:
|
|
1676
1656
|
libraries.update(val)
|
|
1677
1657
|
|
|
@@ -1685,9 +1665,8 @@ class Chip:
|
|
|
1685
1665
|
for key in allkeys:
|
|
1686
1666
|
keypath = ",".join(key)
|
|
1687
1667
|
if 'default' not in key and 'history' not in key and 'library' not in key:
|
|
1688
|
-
|
|
1689
|
-
|
|
1690
|
-
if key_empty and requirement:
|
|
1668
|
+
param = self.get(*key, field=None)
|
|
1669
|
+
if param.is_empty() and param.get(field='require'):
|
|
1691
1670
|
error = True
|
|
1692
1671
|
self.logger.error(f"Global requirement missing for [{keypath}].")
|
|
1693
1672
|
|
|
@@ -1729,12 +1708,12 @@ class Chip:
|
|
|
1729
1708
|
step=step, index=index)
|
|
1730
1709
|
for item in all_required:
|
|
1731
1710
|
keypath = item.split(',')
|
|
1732
|
-
if self.schema.
|
|
1711
|
+
if self.schema.get(*keypath, field=None).is_empty():
|
|
1733
1712
|
error = True
|
|
1734
1713
|
self.logger.error(f"Value empty for {keypath} for {tool}.")
|
|
1735
1714
|
|
|
1736
1715
|
task_run = getattr(task_module, 'run', None)
|
|
1737
|
-
if self.schema.
|
|
1716
|
+
if self.schema.get('tool', tool, 'exe', field=None).is_empty() and not task_run:
|
|
1738
1717
|
error = True
|
|
1739
1718
|
self.logger.error(f'No executable or run() function specified for {tool}/{task}')
|
|
1740
1719
|
|
|
@@ -1762,26 +1741,8 @@ class Chip:
|
|
|
1762
1741
|
Loads the file mychip.json into the current Chip object.
|
|
1763
1742
|
"""
|
|
1764
1743
|
|
|
1765
|
-
# Read from file into new schema object
|
|
1766
|
-
schema = Schema(manifest=filename, logger=self.logger)
|
|
1767
|
-
|
|
1768
1744
|
# Merge data in schema with Chip configuration
|
|
1769
|
-
self.schema.
|
|
1770
|
-
|
|
1771
|
-
# Read history, if we're not already reading into a job
|
|
1772
|
-
if 'history' in schema.cfg and not job:
|
|
1773
|
-
for historic_job in schema.cfg['history'].keys():
|
|
1774
|
-
self.schema.merge_manifest(schema.history(historic_job),
|
|
1775
|
-
job=historic_job,
|
|
1776
|
-
clear=clear,
|
|
1777
|
-
clobber=clobber)
|
|
1778
|
-
|
|
1779
|
-
# TODO: better way to handle this?
|
|
1780
|
-
if 'library' in schema.cfg:
|
|
1781
|
-
for libname in schema.cfg['library'].keys():
|
|
1782
|
-
self.__import_library(libname, schema.cfg['library'][libname],
|
|
1783
|
-
job=job,
|
|
1784
|
-
clobber=clobber)
|
|
1745
|
+
self.schema.read_manifest(filename)
|
|
1785
1746
|
|
|
1786
1747
|
###########################################################################
|
|
1787
1748
|
def write_manifest(self, filename, prune=False, abspath=False):
|
|
@@ -1815,12 +1776,14 @@ class Chip:
|
|
|
1815
1776
|
if abspath:
|
|
1816
1777
|
schema = self.__abspath()
|
|
1817
1778
|
|
|
1818
|
-
if
|
|
1819
|
-
|
|
1820
|
-
|
|
1779
|
+
if re.search(r'(\.json|\.sup)(\.gz)*$', filepath):
|
|
1780
|
+
schema.write_manifest(filepath)
|
|
1781
|
+
return
|
|
1821
1782
|
|
|
1822
|
-
|
|
1823
|
-
|
|
1783
|
+
tcl_record = False
|
|
1784
|
+
if isinstance(schema, JournalingSchema):
|
|
1785
|
+
tcl_record = "get" in schema.get_journaling_types()
|
|
1786
|
+
schema = schema.get_base_schema()
|
|
1824
1787
|
|
|
1825
1788
|
is_csv = re.search(r'(\.csv)(\.gz)*$', filepath)
|
|
1826
1789
|
|
|
@@ -1836,26 +1799,85 @@ class Chip:
|
|
|
1836
1799
|
|
|
1837
1800
|
# format specific printing
|
|
1838
1801
|
try:
|
|
1839
|
-
if re.search(r'(\.
|
|
1840
|
-
|
|
1841
|
-
|
|
1842
|
-
|
|
1802
|
+
if re.search(r'(\.yaml|\.yml)(\.gz)*$', filepath):
|
|
1803
|
+
class YamlIndentDumper(yaml.Dumper):
|
|
1804
|
+
def increase_indent(self, flow=False, indentless=False):
|
|
1805
|
+
return super().increase_indent(flow=flow, indentless=False)
|
|
1806
|
+
|
|
1807
|
+
fout.write(yaml.dump(schema.getdict(), Dumper=YamlIndentDumper,
|
|
1808
|
+
default_flow_style=False))
|
|
1809
|
+
|
|
1843
1810
|
elif re.search(r'(\.tcl)(\.gz)*$', filepath):
|
|
1844
1811
|
# TCL only gets values associated with the current node.
|
|
1845
1812
|
step = self.get('arg', 'step')
|
|
1846
1813
|
index = self.get('arg', 'index')
|
|
1847
|
-
|
|
1814
|
+
self.__write_tcl(fout,
|
|
1815
|
+
schema,
|
|
1848
1816
|
prefix="dict set sc_cfg",
|
|
1849
1817
|
step=step,
|
|
1850
1818
|
index=index,
|
|
1851
|
-
template=utils.get_file_template('tcl/manifest.tcl.j2')
|
|
1819
|
+
template=utils.get_file_template('tcl/manifest.tcl.j2'),
|
|
1820
|
+
record=tcl_record)
|
|
1852
1821
|
elif is_csv:
|
|
1853
|
-
|
|
1822
|
+
csvwriter = csv.writer(fout)
|
|
1823
|
+
csvwriter.writerow(['Keypath', 'Value'])
|
|
1824
|
+
|
|
1825
|
+
allkeys = schema.allkeys()
|
|
1826
|
+
for key in allkeys:
|
|
1827
|
+
keypath = ','.join(key)
|
|
1828
|
+
param = schema.get(*key, field=None)
|
|
1829
|
+
for value, step, index in param.getvalues():
|
|
1830
|
+
if step is None and index is None:
|
|
1831
|
+
keypath = ','.join(key)
|
|
1832
|
+
elif index is None:
|
|
1833
|
+
keypath = ','.join([*key, step, 'default'])
|
|
1834
|
+
else:
|
|
1835
|
+
keypath = ','.join([*key, step, index])
|
|
1836
|
+
|
|
1837
|
+
if isinstance(value, list):
|
|
1838
|
+
for item in value:
|
|
1839
|
+
csvwriter.writerow([keypath, item])
|
|
1840
|
+
else:
|
|
1841
|
+
csvwriter.writerow([keypath, value])
|
|
1854
1842
|
else:
|
|
1855
1843
|
self.error(f'File format not recognized {filepath}')
|
|
1856
1844
|
finally:
|
|
1857
1845
|
fout.close()
|
|
1858
1846
|
|
|
1847
|
+
def __write_tcl(self, fout, schema,
|
|
1848
|
+
prefix="", step=None, index=None, template=None, record=False):
|
|
1849
|
+
tcl_set_cmds = []
|
|
1850
|
+
for key in sorted(schema.allkeys()):
|
|
1851
|
+
# print out all non default values
|
|
1852
|
+
if 'default' in key:
|
|
1853
|
+
continue
|
|
1854
|
+
|
|
1855
|
+
param = schema.get(*key, field=None)
|
|
1856
|
+
|
|
1857
|
+
# create a TCL dict
|
|
1858
|
+
keystr = ' '.join([NodeType.to_tcl(keypart, 'str') for keypart in key])
|
|
1859
|
+
|
|
1860
|
+
valstr = param.gettcl(step=step, index=index)
|
|
1861
|
+
if valstr is None:
|
|
1862
|
+
continue
|
|
1863
|
+
|
|
1864
|
+
# Ensure empty values get something
|
|
1865
|
+
if valstr == '':
|
|
1866
|
+
valstr = '{}'
|
|
1867
|
+
|
|
1868
|
+
tcl_set_cmds.append(f"{prefix} {keystr} {valstr}")
|
|
1869
|
+
|
|
1870
|
+
if template:
|
|
1871
|
+
fout.write(template.render(manifest_dict='\n'.join(tcl_set_cmds),
|
|
1872
|
+
scroot=os.path.abspath(
|
|
1873
|
+
os.path.join(os.path.dirname(__file__))),
|
|
1874
|
+
record_access=record,
|
|
1875
|
+
record_access_id=Schema._RECORD_ACCESS_IDENTIFIER))
|
|
1876
|
+
else:
|
|
1877
|
+
for cmd in tcl_set_cmds:
|
|
1878
|
+
fout.write(cmd + '\n')
|
|
1879
|
+
fout.write('\n')
|
|
1880
|
+
|
|
1859
1881
|
###########################################################################
|
|
1860
1882
|
def check_checklist(self, standard, items=None,
|
|
1861
1883
|
check_ok=False, verbose=False, require_reports=True):
|
|
@@ -1997,15 +2019,17 @@ class Chip:
|
|
|
1997
2019
|
self.get('tool', tool, 'task', task, 'report', metric, job=job,
|
|
1998
2020
|
step=step, index=index)
|
|
1999
2021
|
|
|
2000
|
-
if
|
|
2022
|
+
if allow_missing_reports and not has_reports:
|
|
2001
2023
|
# No reports available and it is allowed
|
|
2002
2024
|
continue
|
|
2003
2025
|
|
|
2026
|
+
reports = []
|
|
2004
2027
|
try:
|
|
2005
|
-
|
|
2006
|
-
|
|
2007
|
-
|
|
2008
|
-
|
|
2028
|
+
if has_reports:
|
|
2029
|
+
reports = self.find_files('tool', tool, 'task', task, 'report', metric,
|
|
2030
|
+
job=job,
|
|
2031
|
+
step=step, index=index,
|
|
2032
|
+
missing_ok=not require_reports)
|
|
2009
2033
|
except SiliconCompilerError:
|
|
2010
2034
|
reports = []
|
|
2011
2035
|
continue
|
|
@@ -2041,33 +2065,38 @@ class Chip:
|
|
|
2041
2065
|
return not error
|
|
2042
2066
|
|
|
2043
2067
|
###########################################################################
|
|
2044
|
-
def __import_library(self, libname,
|
|
2068
|
+
def __import_library(self, libname, library, job=None, clobber=True, keep_input=True):
|
|
2045
2069
|
'''Helper to import library with config 'libconfig' as a library
|
|
2046
2070
|
'libname' in current Chip object.'''
|
|
2047
|
-
if job:
|
|
2048
|
-
cfg = self.schema.cfg['history'][job]['library']
|
|
2049
|
-
else:
|
|
2050
|
-
cfg = self.schema.cfg['library']
|
|
2051
|
-
|
|
2052
|
-
if 'library' in libcfg:
|
|
2053
|
-
for sublib_name, sublibcfg in libcfg['library'].items():
|
|
2054
|
-
self.__import_library(sublib_name, sublibcfg,
|
|
2055
|
-
job=job, clobber=clobber, keep_input=keep_input)
|
|
2056
2071
|
|
|
2057
|
-
if libname in
|
|
2072
|
+
if libname in self.schema.getkeys('library'):
|
|
2058
2073
|
if not clobber:
|
|
2059
2074
|
return
|
|
2075
|
+
if hasattr(library, 'schema'):
|
|
2076
|
+
library = library.schema
|
|
2060
2077
|
|
|
2061
|
-
|
|
2062
|
-
|
|
2078
|
+
try:
|
|
2079
|
+
for sublib in library.getkeys('library'):
|
|
2080
|
+
self.__import_library(sublib,
|
|
2081
|
+
EditableSchema(library).search('library', sublib),
|
|
2082
|
+
job=job, clobber=clobber, keep_input=keep_input)
|
|
2083
|
+
except KeyError:
|
|
2084
|
+
pass
|
|
2085
|
+
|
|
2086
|
+
self.__import_data_sources(library)
|
|
2063
2087
|
|
|
2064
2088
|
# Only keep some sections to avoid recursive bloat
|
|
2065
2089
|
keeps = ['asic', 'design', 'fpga', 'option', 'output', 'package']
|
|
2066
2090
|
if keep_input:
|
|
2067
2091
|
keeps.append('input')
|
|
2068
|
-
|
|
2069
|
-
|
|
2070
|
-
|
|
2092
|
+
|
|
2093
|
+
importlibrary = library.copy()
|
|
2094
|
+
edit_lib = EditableSchema(importlibrary)
|
|
2095
|
+
for section in list(importlibrary.getkeys()):
|
|
2096
|
+
if section not in keeps:
|
|
2097
|
+
edit_lib.remove(section)
|
|
2098
|
+
|
|
2099
|
+
EditableSchema(self.schema).insert("library", libname, importlibrary, clobber=True)
|
|
2071
2100
|
|
|
2072
2101
|
###########################################################################
|
|
2073
2102
|
def write_flowgraph(self, filename, flow=None,
|
|
@@ -2352,6 +2381,8 @@ class Chip:
|
|
|
2352
2381
|
|
|
2353
2382
|
nodes = {}
|
|
2354
2383
|
|
|
2384
|
+
search_schema = EditableSchema(self.schema)
|
|
2385
|
+
|
|
2355
2386
|
def collect_library(root_type, lib, name=None):
|
|
2356
2387
|
if not name:
|
|
2357
2388
|
name = lib.design
|
|
@@ -2390,15 +2421,15 @@ class Chip:
|
|
|
2390
2421
|
|
|
2391
2422
|
for in_lib in lib.get('option', 'library',
|
|
2392
2423
|
step=Schema.GLOBAL_KEY, index=Schema.GLOBAL_KEY):
|
|
2393
|
-
collect_library("library",
|
|
2424
|
+
collect_library("library", search_schema.search('library', in_lib),
|
|
2394
2425
|
name=in_lib)
|
|
2395
2426
|
for in_lib in lib.get('asic', 'logiclib',
|
|
2396
2427
|
step=Schema.GLOBAL_KEY, index=Schema.GLOBAL_KEY):
|
|
2397
|
-
collect_library("logiclib",
|
|
2428
|
+
collect_library("logiclib", search_schema.search('library', in_lib),
|
|
2398
2429
|
name=in_lib)
|
|
2399
2430
|
for in_lib in lib.get('asic', 'macrolib',
|
|
2400
2431
|
step=Schema.GLOBAL_KEY, index=Schema.GLOBAL_KEY):
|
|
2401
|
-
collect_library("macrolib",
|
|
2432
|
+
collect_library("macrolib", search_schema.search('library', in_lib),
|
|
2402
2433
|
name=in_lib)
|
|
2403
2434
|
|
|
2404
2435
|
collect_library("design", self)
|
|
@@ -2436,6 +2467,8 @@ class Chip:
|
|
|
2436
2467
|
all_libraries = self.getkeys('library')
|
|
2437
2468
|
|
|
2438
2469
|
def swap(*key):
|
|
2470
|
+
if not self.schema.valid(*key):
|
|
2471
|
+
return
|
|
2439
2472
|
if step is not None:
|
|
2440
2473
|
r_step = step
|
|
2441
2474
|
r_index = index
|
|
@@ -2451,7 +2484,7 @@ class Chip:
|
|
|
2451
2484
|
list(map(lambda x: new_library if x == org_library else x, val)),
|
|
2452
2485
|
step=r_step, index=r_index)
|
|
2453
2486
|
else:
|
|
2454
|
-
for val, r_step, r_index in self.schema.
|
|
2487
|
+
for val, r_step, r_index in self.schema.get(*key, field=None).getvalues():
|
|
2455
2488
|
if r_step is None:
|
|
2456
2489
|
r_step = Schema.GLOBAL_KEY
|
|
2457
2490
|
if r_index is None:
|
|
@@ -2529,7 +2562,7 @@ class Chip:
|
|
|
2529
2562
|
is_file = re.search('file', leaftype)
|
|
2530
2563
|
if is_dir or is_file:
|
|
2531
2564
|
if self.get(*key, field='copy'):
|
|
2532
|
-
for value, step, index in self.schema.
|
|
2565
|
+
for value, step, index in self.schema.get(*key, field=None).getvalues():
|
|
2533
2566
|
if not value:
|
|
2534
2567
|
continue
|
|
2535
2568
|
packages = self.get(*key, field='package', step=step, index=index)
|
|
@@ -2713,7 +2746,7 @@ class Chip:
|
|
|
2713
2746
|
flowgraph_nodes = [(step, index)]
|
|
2714
2747
|
elif step:
|
|
2715
2748
|
flow = self.get('option', 'flow')
|
|
2716
|
-
flowgraph_nodes =
|
|
2749
|
+
flowgraph_nodes = [(step, index) for index in self.getkeys("flowgraph", flow, step)]
|
|
2717
2750
|
else:
|
|
2718
2751
|
flowgraph_nodes = nodes_to_execute(self)
|
|
2719
2752
|
|
|
@@ -2834,8 +2867,12 @@ class Chip:
|
|
|
2834
2867
|
if check:
|
|
2835
2868
|
# compare previous hash to new hash
|
|
2836
2869
|
oldhash = self.schema.get(*keypath, step=step, index=index, field='filehash')
|
|
2870
|
+
if not isinstance(oldhash, list):
|
|
2871
|
+
oldhash = [oldhash]
|
|
2837
2872
|
check_failed = False
|
|
2838
2873
|
for i, item in enumerate(oldhash):
|
|
2874
|
+
if item is None:
|
|
2875
|
+
continue
|
|
2839
2876
|
if item != hashlist[i]:
|
|
2840
2877
|
self.logger.error(f"Hash mismatch for [{keypath}]")
|
|
2841
2878
|
check_failed = True
|
|
@@ -2848,11 +2885,11 @@ class Chip:
|
|
|
2848
2885
|
set_step = None
|
|
2849
2886
|
set_index = None
|
|
2850
2887
|
pernode = self.get(*keypath, field='pernode')
|
|
2851
|
-
if pernode ==
|
|
2888
|
+
if pernode == PerNode.REQUIRED:
|
|
2852
2889
|
set_step = step
|
|
2853
2890
|
set_index = index
|
|
2854
|
-
elif pernode ==
|
|
2855
|
-
for vals, key_step, key_index in self.schema.
|
|
2891
|
+
elif pernode == PerNode.OPTIONAL:
|
|
2892
|
+
for vals, key_step, key_index in self.schema.get(*keypath, field=None).getvalues():
|
|
2856
2893
|
if key_step == step and key_index == index and vals:
|
|
2857
2894
|
set_step = step
|
|
2858
2895
|
set_index = index
|
|
@@ -2867,7 +2904,7 @@ class Chip:
|
|
|
2867
2904
|
return hashlist
|
|
2868
2905
|
|
|
2869
2906
|
###########################################################################
|
|
2870
|
-
def dashboard(self, wait=True, port=None, graph_chips=None):
|
|
2907
|
+
def dashboard(self, wait=True, port=None, graph_chips=None, type=DashboardType.WEB):
|
|
2871
2908
|
'''
|
|
2872
2909
|
Open a session of the dashboard.
|
|
2873
2910
|
|
|
@@ -2881,6 +2918,8 @@ class Chip:
|
|
|
2881
2918
|
dashboard to.
|
|
2882
2919
|
graph_chips (list): A list of dictionaries of the format
|
|
2883
2920
|
{'chip': chip object, 'name': chip name}
|
|
2921
|
+
type (enum): A string specifying what kind of dashboard to
|
|
2922
|
+
launch. Available options: 'cli', 'web'.
|
|
2884
2923
|
|
|
2885
2924
|
Examples:
|
|
2886
2925
|
>>> chip.dashboard()
|
|
@@ -2891,7 +2930,14 @@ class Chip:
|
|
|
2891
2930
|
self._dash.stop()
|
|
2892
2931
|
self._dash = None
|
|
2893
2932
|
|
|
2894
|
-
|
|
2933
|
+
# Select dashboard type
|
|
2934
|
+
type = DashboardType(type)
|
|
2935
|
+
if type == DashboardType.WEB:
|
|
2936
|
+
self._dash = WebDashboard(self, port=port, graph_chips=graph_chips)
|
|
2937
|
+
elif type == DashboardType.CLI:
|
|
2938
|
+
self._dash = CliDashboard(self)
|
|
2939
|
+
wait = False
|
|
2940
|
+
|
|
2895
2941
|
self._dash.open_dashboard()
|
|
2896
2942
|
|
|
2897
2943
|
if wait:
|
|
@@ -2928,9 +2974,12 @@ class Chip:
|
|
|
2928
2974
|
|
|
2929
2975
|
# display whole flowgraph if no from/to specified
|
|
2930
2976
|
flow = self.get('option', 'flow')
|
|
2931
|
-
|
|
2932
|
-
|
|
2933
|
-
|
|
2977
|
+
runtime = RuntimeFlowgraph(
|
|
2978
|
+
self.schema.get("flowgraph", flow, field='schema'),
|
|
2979
|
+
to_steps=self.get('option', 'to'),
|
|
2980
|
+
prune_nodes=self.get('option', 'prune'))
|
|
2981
|
+
_show_summary_table(self, flow, list(runtime.get_nodes()),
|
|
2982
|
+
show_all_indices=show_all_indices)
|
|
2934
2983
|
|
|
2935
2984
|
# dashboard does not generate any data
|
|
2936
2985
|
self.logger.info('Dashboard at "sc-dashboard '
|
|
@@ -3017,7 +3066,7 @@ class Chip:
|
|
|
3017
3066
|
flow (str): Flow name
|
|
3018
3067
|
step (str): Step name
|
|
3019
3068
|
task (module/str): Task to associate with this node
|
|
3020
|
-
index (int): Step index
|
|
3069
|
+
index (int/str): Step index
|
|
3021
3070
|
|
|
3022
3071
|
Examples:
|
|
3023
3072
|
>>> import siliconcomiler.tools.openroad.place as place
|
|
@@ -3025,40 +3074,15 @@ class Chip:
|
|
|
3025
3074
|
Creates a 'place' task with step='apr_place' and index=0 and binds it to the
|
|
3026
3075
|
'openroad' tool.
|
|
3027
3076
|
'''
|
|
3077
|
+
from siliconcompiler import FlowgraphSchema
|
|
3078
|
+
from siliconcompiler.schema import EditableSchema
|
|
3028
3079
|
|
|
3029
|
-
if
|
|
3030
|
-
|
|
3031
|
-
|
|
3032
|
-
|
|
3033
|
-
index = str(index)
|
|
3034
|
-
|
|
3035
|
-
# Determine task name and module
|
|
3036
|
-
task_module = None
|
|
3037
|
-
if (isinstance(task, str)):
|
|
3038
|
-
task_module = task
|
|
3039
|
-
elif inspect.ismodule(task):
|
|
3040
|
-
task_module = task.__name__
|
|
3041
|
-
self.modules[task_module] = task
|
|
3080
|
+
if not self.schema.valid("flowgraph", flow):
|
|
3081
|
+
graph = FlowgraphSchema(flow)
|
|
3082
|
+
EditableSchema(self.schema).insert("flowgraph", flow, graph)
|
|
3042
3083
|
else:
|
|
3043
|
-
|
|
3044
|
-
|
|
3045
|
-
chip=self)
|
|
3046
|
-
|
|
3047
|
-
task_parts = task_module.split('.')
|
|
3048
|
-
if len(task_parts) < 2:
|
|
3049
|
-
raise SiliconCompilerError(
|
|
3050
|
-
f"{task} is not a valid task, it must be associated with a tool '<tool>.<task>'.",
|
|
3051
|
-
chip=self)
|
|
3052
|
-
tool_name, task_name = task_parts[-2:]
|
|
3053
|
-
|
|
3054
|
-
# bind tool to node
|
|
3055
|
-
self.set('flowgraph', flow, step, index, 'tool', tool_name)
|
|
3056
|
-
self.set('flowgraph', flow, step, index, 'task', task_name)
|
|
3057
|
-
self.set('flowgraph', flow, step, index, 'taskmodule', task_module)
|
|
3058
|
-
|
|
3059
|
-
# set default weights
|
|
3060
|
-
for metric in self.getkeys('metric'):
|
|
3061
|
-
self.set('flowgraph', flow, step, index, 'weight', metric, 0)
|
|
3084
|
+
graph = self.schema.get("flowgraph", flow, field="schema")
|
|
3085
|
+
graph.node(step, task, index=index)
|
|
3062
3086
|
|
|
3063
3087
|
###########################################################################
|
|
3064
3088
|
def edge(self, flow, tail, head, tail_index=0, head_index=0):
|
|
@@ -3076,28 +3100,16 @@ class Chip:
|
|
|
3076
3100
|
flow (str): Name of flow
|
|
3077
3101
|
tail (str): Name of tail node
|
|
3078
3102
|
head (str): Name of head node
|
|
3079
|
-
tail_index (int): Index of tail node to connect
|
|
3080
|
-
head_index (int): Index of head node to connect
|
|
3103
|
+
tail_index (int/str): Index of tail node to connect
|
|
3104
|
+
head_index (int/str): Index of head node to connect
|
|
3081
3105
|
|
|
3082
3106
|
Examples:
|
|
3083
3107
|
>>> chip.edge('place', 'cts')
|
|
3084
3108
|
Creates a directed edge from place to cts.
|
|
3085
3109
|
'''
|
|
3086
|
-
head_index = str(head_index)
|
|
3087
|
-
tail_index = str(tail_index)
|
|
3088
|
-
|
|
3089
|
-
for step in (head, tail):
|
|
3090
|
-
if step in (Schema.GLOBAL_KEY, 'default'):
|
|
3091
|
-
self.error(f'Illegal step name: {step} is reserved')
|
|
3092
|
-
return
|
|
3093
3110
|
|
|
3094
|
-
|
|
3095
|
-
|
|
3096
|
-
self.logger.warning(f'Edge from {tail}{tail_index} to {head}{head_index} already '
|
|
3097
|
-
'exists, skipping')
|
|
3098
|
-
return
|
|
3099
|
-
|
|
3100
|
-
self.add('flowgraph', flow, head, head_index, 'input', tail_node)
|
|
3111
|
+
graph = self.schema.get("flowgraph", flow, field="schema")
|
|
3112
|
+
graph.edge(tail, head, tail_index=tail_index, head_index=head_index)
|
|
3101
3113
|
|
|
3102
3114
|
###########################################################################
|
|
3103
3115
|
def remove_node(self, flow, step, index=None):
|
|
@@ -3107,40 +3119,14 @@ class Chip:
|
|
|
3107
3119
|
Args:
|
|
3108
3120
|
flow (str): Flow name
|
|
3109
3121
|
step (str): Step name
|
|
3110
|
-
index (int): Step index
|
|
3122
|
+
index (int/str): Step index
|
|
3111
3123
|
'''
|
|
3112
3124
|
|
|
3113
3125
|
if flow not in self.getkeys('flowgraph'):
|
|
3114
3126
|
raise ValueError(f'{flow} is not in the manifest')
|
|
3115
3127
|
|
|
3116
|
-
|
|
3117
|
-
|
|
3118
|
-
|
|
3119
|
-
if index is None:
|
|
3120
|
-
# Iterate over all indexes
|
|
3121
|
-
for index in self.getkeys('flowgraph', flow, step):
|
|
3122
|
-
self.remove_node(flow, step, index)
|
|
3123
|
-
return
|
|
3124
|
-
|
|
3125
|
-
index = str(index)
|
|
3126
|
-
if index not in self.getkeys('flowgraph', flow, step):
|
|
3127
|
-
raise ValueError(f'{index} is not a valid index for {step} in {flow}')
|
|
3128
|
-
|
|
3129
|
-
# Save input edges
|
|
3130
|
-
node = (step, index)
|
|
3131
|
-
node_inputs = self.get('flowgraph', flow, step, index, 'input')
|
|
3132
|
-
self.remove('flowgraph', flow, step, index)
|
|
3133
|
-
|
|
3134
|
-
if len(self.getkeys('flowgraph', flow, step)) == 0:
|
|
3135
|
-
self.remove('flowgraph', flow, step)
|
|
3136
|
-
|
|
3137
|
-
for flow_step in self.getkeys('flowgraph', flow):
|
|
3138
|
-
for flow_index in self.getkeys('flowgraph', flow, flow_step):
|
|
3139
|
-
inputs = self.get('flowgraph', flow, flow_step, flow_index, 'input')
|
|
3140
|
-
if node in inputs:
|
|
3141
|
-
inputs = [inode for inode in inputs if inode != node]
|
|
3142
|
-
inputs.extend(node_inputs)
|
|
3143
|
-
self.set('flowgraph', flow, flow_step, flow_index, 'input', set(inputs))
|
|
3128
|
+
graph = self.schema.get("flowgraph", flow, field="schema")
|
|
3129
|
+
graph.remove_node(step, index=index)
|
|
3144
3130
|
|
|
3145
3131
|
###########################################################################
|
|
3146
3132
|
def graph(self, flow, subflow, name=None):
|
|
@@ -3156,27 +3142,9 @@ class Chip:
|
|
|
3156
3142
|
>>> chip.graph('asicflow')
|
|
3157
3143
|
Instantiates a flow named 'asicflow'.
|
|
3158
3144
|
'''
|
|
3159
|
-
|
|
3160
|
-
|
|
3161
|
-
|
|
3162
|
-
newstep = step
|
|
3163
|
-
else:
|
|
3164
|
-
newstep = name + "." + step
|
|
3165
|
-
|
|
3166
|
-
for keys in self.allkeys('flowgraph', subflow, step):
|
|
3167
|
-
val = self.get('flowgraph', subflow, step, *keys)
|
|
3168
|
-
self.set('flowgraph', flow, newstep, *keys, val)
|
|
3169
|
-
|
|
3170
|
-
if name is None:
|
|
3171
|
-
continue
|
|
3172
|
-
|
|
3173
|
-
for index in self.getkeys('flowgraph', flow, newstep):
|
|
3174
|
-
# rename inputs
|
|
3175
|
-
all_inputs = self.get('flowgraph', flow, newstep, index, 'input')
|
|
3176
|
-
self.set('flowgraph', flow, newstep, index, 'input', [])
|
|
3177
|
-
for in_step, in_index in all_inputs:
|
|
3178
|
-
newin = name + "." + in_step
|
|
3179
|
-
self.add('flowgraph', flow, newstep, index, 'input', (newin, in_index))
|
|
3145
|
+
graph = self.schema.get("flowgraph", flow, field="schema")
|
|
3146
|
+
subgraph = self.schema.get("flowgraph", subflow, field="schema")
|
|
3147
|
+
graph.graph(subgraph, name=name)
|
|
3180
3148
|
|
|
3181
3149
|
###########################################################################
|
|
3182
3150
|
def run(self, raise_exception=False):
|
|
@@ -3219,6 +3187,12 @@ class Chip:
|
|
|
3219
3187
|
raise e
|
|
3220
3188
|
self.logger.error(str(e))
|
|
3221
3189
|
return False
|
|
3190
|
+
finally:
|
|
3191
|
+
# Update dashboard if running
|
|
3192
|
+
if self._dash:
|
|
3193
|
+
self._dash.update_manifest()
|
|
3194
|
+
self._dash.end_of_run()
|
|
3195
|
+
|
|
3222
3196
|
return True
|
|
3223
3197
|
|
|
3224
3198
|
###########################################################################
|
|
@@ -3338,7 +3312,7 @@ class Chip:
|
|
|
3338
3312
|
self.set('option', 'jobname', f'_{taskname}_{sc_job}_{sc_step}{sc_index}', clobber=True)
|
|
3339
3313
|
|
|
3340
3314
|
# Setup in step/index variables
|
|
3341
|
-
for
|
|
3315
|
+
for step, index in self.get("flowgraph", "showflow", field="schema").get_nodes():
|
|
3342
3316
|
if step != taskname:
|
|
3343
3317
|
continue
|
|
3344
3318
|
show_tool, _ = get_tool_task(self, step, index, flow='showflow')
|
|
@@ -3360,7 +3334,8 @@ class Chip:
|
|
|
3360
3334
|
try:
|
|
3361
3335
|
self.run(raise_exception=True)
|
|
3362
3336
|
if screenshot:
|
|
3363
|
-
step, index =
|
|
3337
|
+
step, index = self.schema.get("flowgraph", 'showflow',
|
|
3338
|
+
field="schema").get_exit_nodes()[0]
|
|
3364
3339
|
success = self.find_result('png', step=step, index=index)
|
|
3365
3340
|
else:
|
|
3366
3341
|
success = True
|
|
@@ -3438,8 +3413,8 @@ class Chip:
|
|
|
3438
3413
|
if hasattr(self, 'logger'):
|
|
3439
3414
|
self.logger.error(msg)
|
|
3440
3415
|
|
|
3441
|
-
step = self.get('arg', 'step')
|
|
3442
|
-
index = self.get('arg', 'index')
|
|
3416
|
+
step = self.schema.get('arg', 'step')
|
|
3417
|
+
index = self.schema.get('arg', 'index')
|
|
3443
3418
|
if self.schema.get('option', 'continue', step=step, index=index):
|
|
3444
3419
|
self._error = True
|
|
3445
3420
|
return
|
|
@@ -3471,4 +3446,3 @@ class Chip:
|
|
|
3471
3446
|
|
|
3472
3447
|
# Reinitialize logger on restore
|
|
3473
3448
|
self._init_logger()
|
|
3474
|
-
self.schema._init_logger(self.logger)
|