siliconcompiler 0.26.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- siliconcompiler/__init__.py +24 -0
- siliconcompiler/__main__.py +12 -0
- siliconcompiler/_common.py +49 -0
- siliconcompiler/_metadata.py +36 -0
- siliconcompiler/apps/__init__.py +0 -0
- siliconcompiler/apps/_common.py +76 -0
- siliconcompiler/apps/sc.py +92 -0
- siliconcompiler/apps/sc_dashboard.py +94 -0
- siliconcompiler/apps/sc_issue.py +178 -0
- siliconcompiler/apps/sc_remote.py +199 -0
- siliconcompiler/apps/sc_server.py +39 -0
- siliconcompiler/apps/sc_show.py +142 -0
- siliconcompiler/apps/smake.py +232 -0
- siliconcompiler/checklists/__init__.py +0 -0
- siliconcompiler/checklists/oh_tapeout.py +41 -0
- siliconcompiler/core.py +3221 -0
- siliconcompiler/data/RobotoMono/LICENSE.txt +202 -0
- siliconcompiler/data/RobotoMono/RobotoMono-Regular.ttf +0 -0
- siliconcompiler/data/heartbeat.v +18 -0
- siliconcompiler/data/logo.png +0 -0
- siliconcompiler/flowgraph.py +570 -0
- siliconcompiler/flows/__init__.py +0 -0
- siliconcompiler/flows/_common.py +67 -0
- siliconcompiler/flows/asicflow.py +180 -0
- siliconcompiler/flows/asictopflow.py +38 -0
- siliconcompiler/flows/dvflow.py +86 -0
- siliconcompiler/flows/fpgaflow.py +202 -0
- siliconcompiler/flows/generate_openroad_rcx.py +66 -0
- siliconcompiler/flows/lintflow.py +35 -0
- siliconcompiler/flows/screenshotflow.py +51 -0
- siliconcompiler/flows/showflow.py +59 -0
- siliconcompiler/flows/signoffflow.py +53 -0
- siliconcompiler/flows/synflow.py +128 -0
- siliconcompiler/fpgas/__init__.py +0 -0
- siliconcompiler/fpgas/lattice_ice40.py +42 -0
- siliconcompiler/fpgas/vpr_example.py +109 -0
- siliconcompiler/issue.py +300 -0
- siliconcompiler/libs/__init__.py +0 -0
- siliconcompiler/libs/asap7sc7p5t.py +8 -0
- siliconcompiler/libs/gf180mcu.py +8 -0
- siliconcompiler/libs/nangate45.py +8 -0
- siliconcompiler/libs/sky130hd.py +8 -0
- siliconcompiler/libs/sky130io.py +8 -0
- siliconcompiler/package.py +412 -0
- siliconcompiler/pdks/__init__.py +0 -0
- siliconcompiler/pdks/asap7.py +8 -0
- siliconcompiler/pdks/freepdk45.py +8 -0
- siliconcompiler/pdks/gf180.py +8 -0
- siliconcompiler/pdks/skywater130.py +8 -0
- siliconcompiler/remote/__init__.py +36 -0
- siliconcompiler/remote/client.py +891 -0
- siliconcompiler/remote/schema.py +106 -0
- siliconcompiler/remote/server.py +507 -0
- siliconcompiler/remote/server_schema/requests/cancel_job.json +51 -0
- siliconcompiler/remote/server_schema/requests/check_progress.json +61 -0
- siliconcompiler/remote/server_schema/requests/check_server.json +38 -0
- siliconcompiler/remote/server_schema/requests/delete_job.json +51 -0
- siliconcompiler/remote/server_schema/requests/get_results.json +48 -0
- siliconcompiler/remote/server_schema/requests/remote_run.json +40 -0
- siliconcompiler/remote/server_schema/responses/cancel_job.json +18 -0
- siliconcompiler/remote/server_schema/responses/check_progress.json +30 -0
- siliconcompiler/remote/server_schema/responses/check_server.json +32 -0
- siliconcompiler/remote/server_schema/responses/delete_job.json +18 -0
- siliconcompiler/remote/server_schema/responses/get_results.json +21 -0
- siliconcompiler/remote/server_schema/responses/remote_run.json +25 -0
- siliconcompiler/report/__init__.py +13 -0
- siliconcompiler/report/html_report.py +74 -0
- siliconcompiler/report/report.py +355 -0
- siliconcompiler/report/streamlit_report.py +137 -0
- siliconcompiler/report/streamlit_viewer.py +944 -0
- siliconcompiler/report/summary_image.py +117 -0
- siliconcompiler/report/summary_table.py +105 -0
- siliconcompiler/report/utils.py +163 -0
- siliconcompiler/scheduler/__init__.py +2092 -0
- siliconcompiler/scheduler/docker_runner.py +253 -0
- siliconcompiler/scheduler/run_node.py +138 -0
- siliconcompiler/scheduler/send_messages.py +178 -0
- siliconcompiler/scheduler/slurm.py +208 -0
- siliconcompiler/scheduler/validation/email_credentials.json +54 -0
- siliconcompiler/schema/__init__.py +7 -0
- siliconcompiler/schema/schema_cfg.py +4014 -0
- siliconcompiler/schema/schema_obj.py +1841 -0
- siliconcompiler/schema/utils.py +93 -0
- siliconcompiler/sphinx_ext/__init__.py +0 -0
- siliconcompiler/sphinx_ext/dynamicgen.py +1006 -0
- siliconcompiler/sphinx_ext/schemagen.py +221 -0
- siliconcompiler/sphinx_ext/utils.py +166 -0
- siliconcompiler/targets/__init__.py +0 -0
- siliconcompiler/targets/asap7_demo.py +68 -0
- siliconcompiler/targets/asic_demo.py +38 -0
- siliconcompiler/targets/fpgaflow_demo.py +47 -0
- siliconcompiler/targets/freepdk45_demo.py +59 -0
- siliconcompiler/targets/gf180_demo.py +77 -0
- siliconcompiler/targets/skywater130_demo.py +70 -0
- siliconcompiler/templates/email/general.j2 +66 -0
- siliconcompiler/templates/email/summary.j2 +43 -0
- siliconcompiler/templates/issue/README.txt +26 -0
- siliconcompiler/templates/issue/run.sh +6 -0
- siliconcompiler/templates/report/bootstrap.min.css +7 -0
- siliconcompiler/templates/report/bootstrap.min.js +7 -0
- siliconcompiler/templates/report/bootstrap_LICENSE.md +24 -0
- siliconcompiler/templates/report/sc_report.j2 +427 -0
- siliconcompiler/templates/slurm/run.sh +9 -0
- siliconcompiler/templates/tcl/manifest.tcl.j2 +137 -0
- siliconcompiler/tools/__init__.py +0 -0
- siliconcompiler/tools/_common/__init__.py +432 -0
- siliconcompiler/tools/_common/asic.py +115 -0
- siliconcompiler/tools/_common/sdc/sc_constraints.sdc +76 -0
- siliconcompiler/tools/_common/tcl/sc_pin_constraints.tcl +63 -0
- siliconcompiler/tools/bambu/bambu.py +32 -0
- siliconcompiler/tools/bambu/convert.py +77 -0
- siliconcompiler/tools/bluespec/bluespec.py +40 -0
- siliconcompiler/tools/bluespec/convert.py +103 -0
- siliconcompiler/tools/builtin/_common.py +155 -0
- siliconcompiler/tools/builtin/builtin.py +26 -0
- siliconcompiler/tools/builtin/concatenate.py +85 -0
- siliconcompiler/tools/builtin/join.py +27 -0
- siliconcompiler/tools/builtin/maximum.py +46 -0
- siliconcompiler/tools/builtin/minimum.py +57 -0
- siliconcompiler/tools/builtin/mux.py +70 -0
- siliconcompiler/tools/builtin/nop.py +38 -0
- siliconcompiler/tools/builtin/verify.py +83 -0
- siliconcompiler/tools/chisel/SCDriver.scala +10 -0
- siliconcompiler/tools/chisel/build.sbt +27 -0
- siliconcompiler/tools/chisel/chisel.py +37 -0
- siliconcompiler/tools/chisel/convert.py +140 -0
- siliconcompiler/tools/execute/exec_input.py +41 -0
- siliconcompiler/tools/execute/execute.py +17 -0
- siliconcompiler/tools/genfasm/bitstream.py +61 -0
- siliconcompiler/tools/genfasm/genfasm.py +40 -0
- siliconcompiler/tools/ghdl/convert.py +87 -0
- siliconcompiler/tools/ghdl/ghdl.py +41 -0
- siliconcompiler/tools/icarus/compile.py +87 -0
- siliconcompiler/tools/icarus/icarus.py +36 -0
- siliconcompiler/tools/icepack/bitstream.py +20 -0
- siliconcompiler/tools/icepack/icepack.py +43 -0
- siliconcompiler/tools/klayout/export.py +117 -0
- siliconcompiler/tools/klayout/klayout.py +119 -0
- siliconcompiler/tools/klayout/klayout_export.py +205 -0
- siliconcompiler/tools/klayout/klayout_operations.py +363 -0
- siliconcompiler/tools/klayout/klayout_show.py +242 -0
- siliconcompiler/tools/klayout/klayout_utils.py +176 -0
- siliconcompiler/tools/klayout/operations.py +194 -0
- siliconcompiler/tools/klayout/screenshot.py +98 -0
- siliconcompiler/tools/klayout/show.py +101 -0
- siliconcompiler/tools/magic/drc.py +49 -0
- siliconcompiler/tools/magic/extspice.py +19 -0
- siliconcompiler/tools/magic/magic.py +85 -0
- siliconcompiler/tools/magic/sc_drc.tcl +96 -0
- siliconcompiler/tools/magic/sc_extspice.tcl +54 -0
- siliconcompiler/tools/magic/sc_magic.tcl +47 -0
- siliconcompiler/tools/montage/montage.py +30 -0
- siliconcompiler/tools/montage/tile.py +66 -0
- siliconcompiler/tools/netgen/count_lvs.py +132 -0
- siliconcompiler/tools/netgen/lvs.py +90 -0
- siliconcompiler/tools/netgen/netgen.py +36 -0
- siliconcompiler/tools/netgen/sc_lvs.tcl +46 -0
- siliconcompiler/tools/nextpnr/apr.py +24 -0
- siliconcompiler/tools/nextpnr/nextpnr.py +59 -0
- siliconcompiler/tools/openfpgaloader/openfpgaloader.py +39 -0
- siliconcompiler/tools/openroad/__init__.py +0 -0
- siliconcompiler/tools/openroad/cts.py +45 -0
- siliconcompiler/tools/openroad/dfm.py +66 -0
- siliconcompiler/tools/openroad/export.py +131 -0
- siliconcompiler/tools/openroad/floorplan.py +70 -0
- siliconcompiler/tools/openroad/openroad.py +977 -0
- siliconcompiler/tools/openroad/physyn.py +27 -0
- siliconcompiler/tools/openroad/place.py +41 -0
- siliconcompiler/tools/openroad/rcx_bench.py +95 -0
- siliconcompiler/tools/openroad/rcx_extract.py +34 -0
- siliconcompiler/tools/openroad/route.py +45 -0
- siliconcompiler/tools/openroad/screenshot.py +60 -0
- siliconcompiler/tools/openroad/scripts/sc_apr.tcl +499 -0
- siliconcompiler/tools/openroad/scripts/sc_cts.tcl +64 -0
- siliconcompiler/tools/openroad/scripts/sc_dfm.tcl +20 -0
- siliconcompiler/tools/openroad/scripts/sc_export.tcl +98 -0
- siliconcompiler/tools/openroad/scripts/sc_floorplan.tcl +413 -0
- siliconcompiler/tools/openroad/scripts/sc_metrics.tcl +158 -0
- siliconcompiler/tools/openroad/scripts/sc_physyn.tcl +7 -0
- siliconcompiler/tools/openroad/scripts/sc_place.tcl +84 -0
- siliconcompiler/tools/openroad/scripts/sc_procs.tcl +423 -0
- siliconcompiler/tools/openroad/scripts/sc_rcx.tcl +63 -0
- siliconcompiler/tools/openroad/scripts/sc_rcx_bench.tcl +20 -0
- siliconcompiler/tools/openroad/scripts/sc_rcx_extract.tcl +12 -0
- siliconcompiler/tools/openroad/scripts/sc_route.tcl +133 -0
- siliconcompiler/tools/openroad/scripts/sc_screenshot.tcl +21 -0
- siliconcompiler/tools/openroad/scripts/sc_write.tcl +5 -0
- siliconcompiler/tools/openroad/scripts/sc_write_images.tcl +361 -0
- siliconcompiler/tools/openroad/show.py +94 -0
- siliconcompiler/tools/openroad/templates/pex.tcl +8 -0
- siliconcompiler/tools/opensta/__init__.py +101 -0
- siliconcompiler/tools/opensta/report_libraries.py +28 -0
- siliconcompiler/tools/opensta/scripts/sc_procs.tcl +47 -0
- siliconcompiler/tools/opensta/scripts/sc_report_libraries.tcl +74 -0
- siliconcompiler/tools/opensta/scripts/sc_timing.tcl +268 -0
- siliconcompiler/tools/opensta/timing.py +214 -0
- siliconcompiler/tools/slang/__init__.py +49 -0
- siliconcompiler/tools/slang/lint.py +101 -0
- siliconcompiler/tools/surelog/__init__.py +123 -0
- siliconcompiler/tools/surelog/parse.py +183 -0
- siliconcompiler/tools/surelog/templates/output.v +7 -0
- siliconcompiler/tools/sv2v/convert.py +46 -0
- siliconcompiler/tools/sv2v/sv2v.py +37 -0
- siliconcompiler/tools/template/template.py +125 -0
- siliconcompiler/tools/verilator/compile.py +139 -0
- siliconcompiler/tools/verilator/lint.py +19 -0
- siliconcompiler/tools/verilator/parse.py +27 -0
- siliconcompiler/tools/verilator/verilator.py +172 -0
- siliconcompiler/tools/vivado/__init__.py +7 -0
- siliconcompiler/tools/vivado/bitstream.py +21 -0
- siliconcompiler/tools/vivado/place.py +21 -0
- siliconcompiler/tools/vivado/route.py +21 -0
- siliconcompiler/tools/vivado/scripts/sc_bitstream.tcl +6 -0
- siliconcompiler/tools/vivado/scripts/sc_place.tcl +2 -0
- siliconcompiler/tools/vivado/scripts/sc_route.tcl +4 -0
- siliconcompiler/tools/vivado/scripts/sc_run.tcl +45 -0
- siliconcompiler/tools/vivado/scripts/sc_syn_fpga.tcl +25 -0
- siliconcompiler/tools/vivado/syn_fpga.py +20 -0
- siliconcompiler/tools/vivado/vivado.py +147 -0
- siliconcompiler/tools/vpr/_json_constraint.py +63 -0
- siliconcompiler/tools/vpr/_xml_constraint.py +109 -0
- siliconcompiler/tools/vpr/place.py +137 -0
- siliconcompiler/tools/vpr/route.py +124 -0
- siliconcompiler/tools/vpr/screenshot.py +54 -0
- siliconcompiler/tools/vpr/show.py +88 -0
- siliconcompiler/tools/vpr/vpr.py +357 -0
- siliconcompiler/tools/xyce/xyce.py +36 -0
- siliconcompiler/tools/yosys/lec.py +56 -0
- siliconcompiler/tools/yosys/prepareLib.py +59 -0
- siliconcompiler/tools/yosys/sc_lec.tcl +84 -0
- siliconcompiler/tools/yosys/sc_syn.tcl +79 -0
- siliconcompiler/tools/yosys/syn_asic.py +565 -0
- siliconcompiler/tools/yosys/syn_asic.tcl +377 -0
- siliconcompiler/tools/yosys/syn_asic_fpga_shared.tcl +31 -0
- siliconcompiler/tools/yosys/syn_fpga.py +146 -0
- siliconcompiler/tools/yosys/syn_fpga.tcl +233 -0
- siliconcompiler/tools/yosys/syn_strategies.tcl +81 -0
- siliconcompiler/tools/yosys/techmaps/lcu_kogge_stone.v +39 -0
- siliconcompiler/tools/yosys/templates/abc.const +2 -0
- siliconcompiler/tools/yosys/yosys.py +147 -0
- siliconcompiler/units.py +259 -0
- siliconcompiler/use.py +177 -0
- siliconcompiler/utils/__init__.py +423 -0
- siliconcompiler/utils/asic.py +158 -0
- siliconcompiler/utils/showtools.py +25 -0
- siliconcompiler-0.26.5.dist-info/LICENSE +190 -0
- siliconcompiler-0.26.5.dist-info/METADATA +195 -0
- siliconcompiler-0.26.5.dist-info/RECORD +251 -0
- siliconcompiler-0.26.5.dist-info/WHEEL +5 -0
- siliconcompiler-0.26.5.dist-info/entry_points.txt +12 -0
- siliconcompiler-0.26.5.dist-info/top_level.txt +1 -0
siliconcompiler/core.py
ADDED
|
@@ -0,0 +1,3221 @@
|
|
|
1
|
+
# Copyright 2020 Silicon Compiler Authors. All Rights Reserved.
|
|
2
|
+
|
|
3
|
+
import tarfile
|
|
4
|
+
import os
|
|
5
|
+
import pathlib
|
|
6
|
+
import sys
|
|
7
|
+
import stat
|
|
8
|
+
import gzip
|
|
9
|
+
import re
|
|
10
|
+
import logging
|
|
11
|
+
import hashlib
|
|
12
|
+
import shutil
|
|
13
|
+
import importlib
|
|
14
|
+
import inspect
|
|
15
|
+
import textwrap
|
|
16
|
+
import graphviz
|
|
17
|
+
import codecs
|
|
18
|
+
import copy
|
|
19
|
+
from siliconcompiler.remote import client
|
|
20
|
+
from siliconcompiler.schema import Schema, SCHEMA_VERSION
|
|
21
|
+
from siliconcompiler.schema import utils as schema_utils
|
|
22
|
+
from siliconcompiler import utils
|
|
23
|
+
from siliconcompiler import _metadata
|
|
24
|
+
from siliconcompiler import NodeStatus, SiliconCompilerError
|
|
25
|
+
from siliconcompiler.report import _show_summary_table
|
|
26
|
+
from siliconcompiler.report import _generate_summary_image, _open_summary_image
|
|
27
|
+
from siliconcompiler.report import _generate_html_report, _open_html_report
|
|
28
|
+
from siliconcompiler.report import Dashboard
|
|
29
|
+
from siliconcompiler import package as sc_package
|
|
30
|
+
import glob
|
|
31
|
+
from siliconcompiler.scheduler import run as sc_runner
|
|
32
|
+
from siliconcompiler.flowgraph import _get_flowgraph_nodes, nodes_to_execute, \
|
|
33
|
+
_get_pruned_node_inputs, _get_flowgraph_exit_nodes, get_executed_nodes, \
|
|
34
|
+
_get_flowgraph_execution_order, _check_flowgraph_io, \
|
|
35
|
+
_get_flowgraph_information
|
|
36
|
+
from siliconcompiler.tools._common import get_tool_task
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class Chip:
|
|
40
|
+
"""Object for configuring and executing hardware design flows.
|
|
41
|
+
|
|
42
|
+
This is the main object used for configuration, data, and
|
|
43
|
+
execution within the SiliconCompiler platform.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
design (string): Name of the top level chip design module.
|
|
47
|
+
|
|
48
|
+
Examples:
|
|
49
|
+
>>> siliconcompiler.Chip(design="top")
|
|
50
|
+
Creates a chip object with name "top".
|
|
51
|
+
"""
|
|
52
|
+
|
|
53
|
+
###########################################################################
|
|
54
|
+
def __init__(self, design, loglevel=None):
|
|
55
|
+
# version numbers
|
|
56
|
+
self.scversion = _metadata.version
|
|
57
|
+
self.schemaversion = SCHEMA_VERSION
|
|
58
|
+
|
|
59
|
+
# Local variables
|
|
60
|
+
self.scroot = os.path.dirname(os.path.abspath(__file__))
|
|
61
|
+
self._error = False
|
|
62
|
+
try:
|
|
63
|
+
self.cwd = os.getcwd()
|
|
64
|
+
except FileNotFoundError:
|
|
65
|
+
raise SiliconCompilerError(
|
|
66
|
+
"SiliconCompiler must be run from a directory that exists. "
|
|
67
|
+
"If you are sure that your working directory is valid, try running `cd $(pwd)`.""")
|
|
68
|
+
|
|
69
|
+
# Initialize custom error handling for codecs. This has to be called
|
|
70
|
+
# by each spawned (as opposed to forked) subprocess
|
|
71
|
+
self._init_codecs()
|
|
72
|
+
|
|
73
|
+
self._init_logger()
|
|
74
|
+
|
|
75
|
+
self.schema = Schema(logger=self.logger)
|
|
76
|
+
|
|
77
|
+
self.register_source('siliconcompiler',
|
|
78
|
+
'python://siliconcompiler')
|
|
79
|
+
|
|
80
|
+
# Cache of python modules
|
|
81
|
+
self.modules = {}
|
|
82
|
+
|
|
83
|
+
# Cache of python packages loaded
|
|
84
|
+
self._packages = {}
|
|
85
|
+
|
|
86
|
+
# Cache of file hashes
|
|
87
|
+
self.__hashes = {}
|
|
88
|
+
|
|
89
|
+
# Showtools
|
|
90
|
+
self._showtools = {}
|
|
91
|
+
for plugin in utils.get_plugins('show'):
|
|
92
|
+
plugin(self)
|
|
93
|
+
|
|
94
|
+
# Controls whether find_files returns an abspath or relative to this
|
|
95
|
+
# this is primarily used when generating standalone testcases
|
|
96
|
+
self._relative_path = None
|
|
97
|
+
|
|
98
|
+
self.set('design', design)
|
|
99
|
+
if loglevel:
|
|
100
|
+
self.set('option', 'loglevel', loglevel)
|
|
101
|
+
|
|
102
|
+
self._loaded_modules = {
|
|
103
|
+
'flows': [],
|
|
104
|
+
'pdks': [],
|
|
105
|
+
'fpgas': [],
|
|
106
|
+
'libs': [],
|
|
107
|
+
'checklists': []
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
###########################################################################
|
|
111
|
+
@property
|
|
112
|
+
def design(self):
|
|
113
|
+
'''Design name of chip object.
|
|
114
|
+
|
|
115
|
+
This is an immutable property.'''
|
|
116
|
+
return self.get('design')
|
|
117
|
+
|
|
118
|
+
###########################################################################
|
|
119
|
+
def top(self, step=None, index=None):
|
|
120
|
+
'''Gets the name of the design's entrypoint for compilation and
|
|
121
|
+
simulation.
|
|
122
|
+
|
|
123
|
+
This method should be used to name input and output files in tool
|
|
124
|
+
drivers, rather than relying on chip.get('design') directly.
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
step (str): Node step name
|
|
128
|
+
index (str): Node index
|
|
129
|
+
|
|
130
|
+
Returns :keypath:`option, entrypoint` if it has been set, otherwise
|
|
131
|
+
:keypath:`design`.
|
|
132
|
+
'''
|
|
133
|
+
if not step:
|
|
134
|
+
step = Schema.GLOBAL_KEY
|
|
135
|
+
if not index:
|
|
136
|
+
index = Schema.GLOBAL_KEY
|
|
137
|
+
entrypoint = self.get('option', 'entrypoint', step=step, index=index)
|
|
138
|
+
if not entrypoint:
|
|
139
|
+
return self.design
|
|
140
|
+
return entrypoint
|
|
141
|
+
|
|
142
|
+
###########################################################################
|
|
143
|
+
def _load_module(self, module_name, raise_error=False):
|
|
144
|
+
if module_name in self.modules:
|
|
145
|
+
return self.modules[module_name]
|
|
146
|
+
|
|
147
|
+
try:
|
|
148
|
+
self.modules[module_name] = importlib.import_module(module_name)
|
|
149
|
+
return self.modules[module_name]
|
|
150
|
+
except Exception as e:
|
|
151
|
+
if raise_error:
|
|
152
|
+
raise e
|
|
153
|
+
|
|
154
|
+
return None
|
|
155
|
+
|
|
156
|
+
###########################################################################
|
|
157
|
+
def _get_loaded_modules(self):
|
|
158
|
+
return self.modules
|
|
159
|
+
|
|
160
|
+
def _get_tool_module(self, step, index, flow=None, error=True):
|
|
161
|
+
if not flow:
|
|
162
|
+
flow = self.get('option', 'flow')
|
|
163
|
+
|
|
164
|
+
tool, _ = get_tool_task(self, step, index, flow=flow)
|
|
165
|
+
|
|
166
|
+
taskmodule = self.get('flowgraph', flow, step, index, 'taskmodule')
|
|
167
|
+
module_path = taskmodule.split('.')
|
|
168
|
+
|
|
169
|
+
tool_module_base = module_path[0:-1]
|
|
170
|
+
|
|
171
|
+
module = None
|
|
172
|
+
tool_module_names = ['.'.join([*tool_module_base, tool]), '.'.join(tool_module_base)]
|
|
173
|
+
for tool_module in tool_module_names:
|
|
174
|
+
if module:
|
|
175
|
+
break
|
|
176
|
+
|
|
177
|
+
module = self._load_module(tool_module)
|
|
178
|
+
|
|
179
|
+
if error and not module:
|
|
180
|
+
raise SiliconCompilerError(f'Unable to load {", ".join(tool_module_names)} for {tool}',
|
|
181
|
+
chip=self)
|
|
182
|
+
else:
|
|
183
|
+
return module
|
|
184
|
+
|
|
185
|
+
def _get_task_module(self, step, index, flow=None, error=True):
|
|
186
|
+
if not flow:
|
|
187
|
+
flow = self.get('option', 'flow')
|
|
188
|
+
|
|
189
|
+
taskmodule = self.get('flowgraph', flow, step, index, 'taskmodule')
|
|
190
|
+
|
|
191
|
+
module = self._load_module(taskmodule)
|
|
192
|
+
|
|
193
|
+
if error and not module:
|
|
194
|
+
tool, task = get_tool_task(self, step, index, flow=flow)
|
|
195
|
+
raise SiliconCompilerError(f'Unable to load {taskmodule} for {tool}/{task}', chip=self)
|
|
196
|
+
else:
|
|
197
|
+
return module
|
|
198
|
+
|
|
199
|
+
def _add_file_logger(self, filename):
|
|
200
|
+
# Add a file handler for logging
|
|
201
|
+
logformat = self.logger.handlers[0].formatter
|
|
202
|
+
|
|
203
|
+
file_handler = logging.FileHandler(filename)
|
|
204
|
+
file_handler.setFormatter(logformat)
|
|
205
|
+
|
|
206
|
+
self.logger.addHandler(file_handler)
|
|
207
|
+
|
|
208
|
+
###########################################################################
|
|
209
|
+
def _init_logger(self, step=None, index=None, in_run=False):
|
|
210
|
+
|
|
211
|
+
# Check if the logger exists and create
|
|
212
|
+
if not hasattr(self, 'logger') or not self.logger:
|
|
213
|
+
self.logger = logging.getLogger(f'sc_{id(self)}')
|
|
214
|
+
|
|
215
|
+
self.logger.propagate = False
|
|
216
|
+
|
|
217
|
+
loglevel = 'info'
|
|
218
|
+
if hasattr(self, 'schema'):
|
|
219
|
+
loglevel = self.schema.get('option', 'loglevel', step=step, index=index)
|
|
220
|
+
else:
|
|
221
|
+
in_run = False
|
|
222
|
+
|
|
223
|
+
log_format = ['%(levelname)-7s']
|
|
224
|
+
if loglevel == 'debug':
|
|
225
|
+
log_format.append('%(funcName)-10s')
|
|
226
|
+
log_format.append('%(lineno)-4s')
|
|
227
|
+
|
|
228
|
+
if in_run:
|
|
229
|
+
# Figure out how wide to make step and index fields
|
|
230
|
+
max_step_len = 1
|
|
231
|
+
max_index_len = 1
|
|
232
|
+
nodes_to_run = _get_flowgraph_nodes(self, flow=self.get('option', 'flow'))
|
|
233
|
+
if self.get('option', 'remote'):
|
|
234
|
+
nodes_to_run.append((client.remote_step_name, '0'))
|
|
235
|
+
for future_step, future_index in nodes_to_run:
|
|
236
|
+
max_step_len = max(len(future_step), max_step_len)
|
|
237
|
+
max_index_len = max(len(future_index), max_index_len)
|
|
238
|
+
max_step_len = min(max_step_len, 20)
|
|
239
|
+
|
|
240
|
+
jobname = self.get('option', 'jobname')
|
|
241
|
+
|
|
242
|
+
if step is None:
|
|
243
|
+
step = '-' * max(max_step_len // 4, 1)
|
|
244
|
+
if index is None:
|
|
245
|
+
index = '-' * max(max_index_len // 4, 1)
|
|
246
|
+
|
|
247
|
+
log_format.append(jobname)
|
|
248
|
+
log_format.append(f'{utils.truncate_text(step, max_step_len): <{max_step_len}}')
|
|
249
|
+
log_format.append(f'{index: >{max_index_len}}')
|
|
250
|
+
|
|
251
|
+
log_format.append('%(message)s')
|
|
252
|
+
logformat = '| ' + ' | '.join(log_format)
|
|
253
|
+
|
|
254
|
+
if not self.logger.hasHandlers():
|
|
255
|
+
stream_handler = logging.StreamHandler(stream=sys.stdout)
|
|
256
|
+
self.logger.addHandler(stream_handler)
|
|
257
|
+
|
|
258
|
+
for handler in self.logger.handlers:
|
|
259
|
+
formatter = logging.Formatter(logformat)
|
|
260
|
+
handler.setFormatter(formatter)
|
|
261
|
+
|
|
262
|
+
self.logger.setLevel(schema_utils.translate_loglevel(loglevel))
|
|
263
|
+
|
|
264
|
+
###########################################################################
|
|
265
|
+
def _init_codecs(self):
|
|
266
|
+
# Custom error handlers used to provide warnings when invalid characters
|
|
267
|
+
# are encountered in a file for a given encoding. The names
|
|
268
|
+
# 'replace_with_warning' and 'ignore_with_warning' are supplied to
|
|
269
|
+
# open() via the 'errors' kwarg.
|
|
270
|
+
|
|
271
|
+
# Warning message/behavior for invalid characters while running tool
|
|
272
|
+
def display_error_handler(e):
|
|
273
|
+
self.logger.warning('Invalid character in tool output, displaying as �')
|
|
274
|
+
return codecs.replace_errors(e)
|
|
275
|
+
codecs.register_error('replace_with_warning', display_error_handler)
|
|
276
|
+
|
|
277
|
+
# Warning message/behavior for invalid characters while processing log
|
|
278
|
+
def log_error_handler(e):
|
|
279
|
+
self.logger.warning('Ignoring invalid character found while reading log')
|
|
280
|
+
return codecs.ignore_errors(e)
|
|
281
|
+
codecs.register_error('ignore_with_warning', log_error_handler)
|
|
282
|
+
|
|
283
|
+
###########################################################################
|
|
284
|
+
def create_cmdline(self,
|
|
285
|
+
progname=None,
|
|
286
|
+
description=None,
|
|
287
|
+
switchlist=None,
|
|
288
|
+
input_map=None,
|
|
289
|
+
additional_args=None):
|
|
290
|
+
"""Creates an SC command line interface.
|
|
291
|
+
|
|
292
|
+
Exposes parameters in the SC schema as command line switches,
|
|
293
|
+
simplifying creation of SC apps with a restricted set of schema
|
|
294
|
+
parameters exposed at the command line. The order of command
|
|
295
|
+
line switch settings parsed from the command line is as follows:
|
|
296
|
+
|
|
297
|
+
1. loglevel
|
|
298
|
+
2. read_manifest([cfg])
|
|
299
|
+
3. read compiler inputs
|
|
300
|
+
4. all other switches
|
|
301
|
+
5. load_target('target')
|
|
302
|
+
|
|
303
|
+
The cmdline interface is implemented using the Python argparse package
|
|
304
|
+
and the following use restrictions apply.
|
|
305
|
+
|
|
306
|
+
* Help is accessed with the '-h' switch.
|
|
307
|
+
* Arguments that include spaces must be enclosed with double quotes.
|
|
308
|
+
* List parameters are entered individually. (ie. -y libdir1 -y libdir2)
|
|
309
|
+
* For parameters with Boolean types, the switch implies "true".
|
|
310
|
+
* Special characters (such as '-') must be enclosed in double quotes.
|
|
311
|
+
* Compiler compatible switches include: -D, -I, -O{0,1,2,3}
|
|
312
|
+
* Verilog legacy switch formats are supported: +libext+, +incdir+
|
|
313
|
+
|
|
314
|
+
Args:
|
|
315
|
+
progname (str): Name of program to be executed.
|
|
316
|
+
description (str): Short program description.
|
|
317
|
+
switchlist (list of str): List of SC parameter switches to expose
|
|
318
|
+
at the command line. By default all SC schema switches are
|
|
319
|
+
available. Parameter switches should be entered based on the
|
|
320
|
+
parameter 'switch' field in the schema. For parameters with
|
|
321
|
+
multiple switches, both will be accepted if any one is included
|
|
322
|
+
in this list.
|
|
323
|
+
input_map (dict of str): Dictionary mapping file extensions to input
|
|
324
|
+
filetypes. This is used to automatically assign positional
|
|
325
|
+
source arguments to ['input', 'fileset', ...] keypaths based on their file
|
|
326
|
+
extension. If None, the CLI will not accept positional source
|
|
327
|
+
arguments.
|
|
328
|
+
additional_args (dict of dict): Dictionary of extra arguments to add
|
|
329
|
+
to the command line parser, with the arguments matching the
|
|
330
|
+
argparse.add_argument() call.
|
|
331
|
+
|
|
332
|
+
Returns:
|
|
333
|
+
None if additional_args is not provided, otherwise a dictionary with the
|
|
334
|
+
command line options detected from the additional_args
|
|
335
|
+
|
|
336
|
+
Examples:
|
|
337
|
+
>>> chip.create_cmdline(progname='sc-show',switchlist=['-input','-cfg'])
|
|
338
|
+
Creates a command line interface for 'sc-show' app.
|
|
339
|
+
|
|
340
|
+
>>> chip.create_cmdline(progname='sc', input_map={'v': ('rtl', 'verilog')})
|
|
341
|
+
All sources ending in .v will be stored in ['input', 'rtl', 'verilog']
|
|
342
|
+
|
|
343
|
+
>>> extra = chip.create_cmdline(progname='sc',
|
|
344
|
+
additional_args={'-demo': {'action': 'store_true'}})
|
|
345
|
+
Returns extra = {'demo': False/True}
|
|
346
|
+
"""
|
|
347
|
+
|
|
348
|
+
def print_banner():
|
|
349
|
+
print(_metadata.banner)
|
|
350
|
+
print("Authors:", ", ".join(_metadata.authors))
|
|
351
|
+
print("Version:", _metadata.version, "\n")
|
|
352
|
+
print("-" * 80)
|
|
353
|
+
|
|
354
|
+
def input_map_handler(sources):
|
|
355
|
+
for source in sources:
|
|
356
|
+
self.input(source, iomap=input_map)
|
|
357
|
+
|
|
358
|
+
def preprocess_keys(keypath, item):
|
|
359
|
+
if tuple(keypath) == ('option', 'optmode') and not item.startswith('O'):
|
|
360
|
+
return 'O' + item
|
|
361
|
+
return item
|
|
362
|
+
|
|
363
|
+
def post_process(cmdargs):
|
|
364
|
+
# Ensure files and dir packages are set
|
|
365
|
+
for key in self.allkeys():
|
|
366
|
+
paramtype = self.get(*key, field='type')
|
|
367
|
+
if 'file' not in paramtype and 'dir' not in paramtype:
|
|
368
|
+
continue
|
|
369
|
+
|
|
370
|
+
is_list = '[' in paramtype
|
|
371
|
+
|
|
372
|
+
for vals, step, index in self.schema._getvals(*key):
|
|
373
|
+
if self.get(*key, field='pernode') != 'never':
|
|
374
|
+
if step is None:
|
|
375
|
+
step = Schema.GLOBAL_KEY
|
|
376
|
+
if index is None:
|
|
377
|
+
index = Schema.GLOBAL_KEY
|
|
378
|
+
|
|
379
|
+
if not is_list:
|
|
380
|
+
vals = [vals]
|
|
381
|
+
packages = self.get(*key, field='package', step=step, index=index)
|
|
382
|
+
if len(packages) == len(vals):
|
|
383
|
+
continue
|
|
384
|
+
|
|
385
|
+
packages.extend((len(vals) - len(packages)) * [None])
|
|
386
|
+
|
|
387
|
+
self.set(*key, packages, field='package', step=step, index=index)
|
|
388
|
+
|
|
389
|
+
# Read in target if set
|
|
390
|
+
if 'option_target' in cmdargs:
|
|
391
|
+
# running target command
|
|
392
|
+
self.load_target(cmdargs['option_target'])
|
|
393
|
+
|
|
394
|
+
if not progname:
|
|
395
|
+
progname = self.design
|
|
396
|
+
|
|
397
|
+
try:
|
|
398
|
+
return self.schema.create_cmdline(
|
|
399
|
+
progname=progname,
|
|
400
|
+
description=description,
|
|
401
|
+
switchlist=switchlist,
|
|
402
|
+
input_map=input_map,
|
|
403
|
+
additional_args=additional_args,
|
|
404
|
+
version=_metadata.version,
|
|
405
|
+
print_banner=print_banner,
|
|
406
|
+
input_map_handler=input_map_handler,
|
|
407
|
+
preprocess_keys=preprocess_keys,
|
|
408
|
+
post_process=post_process,
|
|
409
|
+
logger=self.logger)
|
|
410
|
+
except ValueError as e:
|
|
411
|
+
raise SiliconCompilerError(f'{e}', chip=self)
|
|
412
|
+
|
|
413
|
+
def register_source(self, name, path, ref=None, clobber=True):
|
|
414
|
+
"""
|
|
415
|
+
Registers a package by its name with the source path and reference
|
|
416
|
+
|
|
417
|
+
Registered package sources are stored in the package section of the schema.
|
|
418
|
+
|
|
419
|
+
Args:
|
|
420
|
+
name (str): Package name
|
|
421
|
+
path (str): Path to the sources, can be file, git url, archive url
|
|
422
|
+
ref (str): Reference of the sources, can be commitid, branch name, tag
|
|
423
|
+
|
|
424
|
+
Examples:
|
|
425
|
+
>>> chip.register_source('siliconcompiler_data',
|
|
426
|
+
'git+https://github.com/siliconcompiler/siliconcompiler',
|
|
427
|
+
'dependency-caching-rebase')
|
|
428
|
+
"""
|
|
429
|
+
|
|
430
|
+
preset_path = self.get('package', 'source', name, 'path')
|
|
431
|
+
preset_ref = self.get('package', 'source', name, 'ref')
|
|
432
|
+
if preset_path and preset_path != path or preset_ref and preset_ref != ref:
|
|
433
|
+
self.logger.warning(f'The data source {name} already exists.')
|
|
434
|
+
self.logger.warning(f'Overwriting path {preset_path} with {path}.')
|
|
435
|
+
self.logger.warning(f'Overwriting ref {preset_ref} with {ref}.')
|
|
436
|
+
self.set('package', 'source', name, 'path', path, clobber=clobber)
|
|
437
|
+
if ref:
|
|
438
|
+
self.set('package', 'source', name, 'ref', ref, clobber=clobber)
|
|
439
|
+
|
|
440
|
+
def register_showtool(self, extension, task):
|
|
441
|
+
"""
|
|
442
|
+
Registers a show or screenshot task with a given extension.
|
|
443
|
+
|
|
444
|
+
Args:
|
|
445
|
+
extension (str): file extension
|
|
446
|
+
task (module): task to use for viewing this extension
|
|
447
|
+
|
|
448
|
+
Examples:
|
|
449
|
+
>>> from siliconcompiler.tools.klayout import show
|
|
450
|
+
>>> chip.register_showtool('gds', show)
|
|
451
|
+
"""
|
|
452
|
+
showtype = task.__name__.split('.')[-1]
|
|
453
|
+
|
|
454
|
+
if showtype not in ('show', 'screenshot'):
|
|
455
|
+
raise ValueError(f'Invalid showtask: {task.__name__}')
|
|
456
|
+
|
|
457
|
+
if extension not in self._showtools:
|
|
458
|
+
self._showtools[extension] = {}
|
|
459
|
+
|
|
460
|
+
self._showtools[extension][showtype] = task
|
|
461
|
+
|
|
462
|
+
##########################################################################
|
|
463
|
+
def load_target(self, module, **kwargs):
|
|
464
|
+
"""
|
|
465
|
+
Loads a target module and runs the setup() function.
|
|
466
|
+
|
|
467
|
+
The function searches the installed Python packages for <name> and
|
|
468
|
+
siliconcompiler.targets.<name> and runs the setup function in that module
|
|
469
|
+
if found.
|
|
470
|
+
|
|
471
|
+
Args:
|
|
472
|
+
module (str or module): Module name
|
|
473
|
+
**kwargs (str): Options to pass along to the target
|
|
474
|
+
|
|
475
|
+
Examples:
|
|
476
|
+
>>> chip.load_target('freepdk45_demo', syn_np=5)
|
|
477
|
+
Loads the 'freepdk45_demo' target with 5 parallel synthesis tasks
|
|
478
|
+
"""
|
|
479
|
+
|
|
480
|
+
if not inspect.ismodule(module):
|
|
481
|
+
# Search order "{name}", and "siliconcompiler.targets.{name}"
|
|
482
|
+
modules = []
|
|
483
|
+
for mod_name in [module, f'siliconcompiler.targets.{module}']:
|
|
484
|
+
mod = self._load_module(mod_name)
|
|
485
|
+
if mod:
|
|
486
|
+
modules.append(mod)
|
|
487
|
+
|
|
488
|
+
if len(modules) == 0:
|
|
489
|
+
raise SiliconCompilerError(f'Could not find target {module}', chip=self)
|
|
490
|
+
else:
|
|
491
|
+
modules = [module]
|
|
492
|
+
|
|
493
|
+
# Check for setup in modules
|
|
494
|
+
load_function = None
|
|
495
|
+
for mod in modules:
|
|
496
|
+
load_function = getattr(mod, 'setup', None)
|
|
497
|
+
if load_function:
|
|
498
|
+
module_name = mod.__name__
|
|
499
|
+
break
|
|
500
|
+
|
|
501
|
+
if not load_function:
|
|
502
|
+
raise SiliconCompilerError(
|
|
503
|
+
f'Could not find setup function for {module} target',
|
|
504
|
+
chip=self)
|
|
505
|
+
|
|
506
|
+
try:
|
|
507
|
+
load_function(self, **kwargs)
|
|
508
|
+
except Exception as e:
|
|
509
|
+
self.logger.error(f'Failed to load target {module}')
|
|
510
|
+
raise e
|
|
511
|
+
|
|
512
|
+
# Record target
|
|
513
|
+
self.set('option', 'target', module_name)
|
|
514
|
+
|
|
515
|
+
##########################################################################
|
|
516
|
+
def use(self, module, **kwargs):
|
|
517
|
+
'''
|
|
518
|
+
Loads a SiliconCompiler module into the current chip object.
|
|
519
|
+
|
|
520
|
+
The behavior of this function is described in the table below
|
|
521
|
+
|
|
522
|
+
.. list-table:: Use behavior
|
|
523
|
+
:header-rows: 1
|
|
524
|
+
|
|
525
|
+
* - Input type
|
|
526
|
+
- Action
|
|
527
|
+
* - Module with setup function
|
|
528
|
+
- Call `setup()` and import returned objects
|
|
529
|
+
* - Chip
|
|
530
|
+
- Import as a library
|
|
531
|
+
* - Library
|
|
532
|
+
- Import as a library
|
|
533
|
+
* - PDK
|
|
534
|
+
- Import as a pdk
|
|
535
|
+
* - FPGA
|
|
536
|
+
- Import as a fpga
|
|
537
|
+
* - Flow
|
|
538
|
+
- Import as a flow
|
|
539
|
+
* - Checklist
|
|
540
|
+
- Import as a checklist
|
|
541
|
+
'''
|
|
542
|
+
|
|
543
|
+
# Load supported types here to avoid cyclic import
|
|
544
|
+
from siliconcompiler import PDK
|
|
545
|
+
from siliconcompiler import FPGA
|
|
546
|
+
from siliconcompiler import Flow
|
|
547
|
+
from siliconcompiler import Library
|
|
548
|
+
from siliconcompiler import Checklist
|
|
549
|
+
|
|
550
|
+
setup_func = getattr(module, 'setup', None)
|
|
551
|
+
if (setup_func):
|
|
552
|
+
# Call the module setup function.
|
|
553
|
+
try:
|
|
554
|
+
use_modules = setup_func(self, **kwargs)
|
|
555
|
+
except Exception as e:
|
|
556
|
+
self.logger.error(f'Unable to run setup() for {module.__name__}')
|
|
557
|
+
raise e
|
|
558
|
+
else:
|
|
559
|
+
# Import directly
|
|
560
|
+
use_modules = module
|
|
561
|
+
|
|
562
|
+
# Make it a list for consistency
|
|
563
|
+
if not isinstance(use_modules, list):
|
|
564
|
+
use_modules = [use_modules]
|
|
565
|
+
|
|
566
|
+
for use_module in use_modules:
|
|
567
|
+
if isinstance(use_module, PDK):
|
|
568
|
+
self._loaded_modules['pdks'].append(use_module.design)
|
|
569
|
+
self.__use_import('pdk', use_module)
|
|
570
|
+
|
|
571
|
+
elif isinstance(use_module, FPGA):
|
|
572
|
+
self._loaded_modules['fpgas'].append(use_module.design)
|
|
573
|
+
self.__use_import('fpga', use_module)
|
|
574
|
+
|
|
575
|
+
elif isinstance(use_module, Flow):
|
|
576
|
+
self._loaded_modules['flows'].append(use_module.design)
|
|
577
|
+
self.__use_import('flowgraph', use_module)
|
|
578
|
+
|
|
579
|
+
elif isinstance(use_module, Checklist):
|
|
580
|
+
self._loaded_modules['checklists'].append(use_module.design)
|
|
581
|
+
self.__use_import('checklist', use_module)
|
|
582
|
+
|
|
583
|
+
elif isinstance(use_module, (Library, Chip)):
|
|
584
|
+
self._loaded_modules['libs'].append(use_module.design)
|
|
585
|
+
cfg = use_module.schema.cfg
|
|
586
|
+
keep_inputs = True
|
|
587
|
+
if not isinstance(use_module, Library):
|
|
588
|
+
keep_inputs = False
|
|
589
|
+
self.__import_library(use_module.design, cfg,
|
|
590
|
+
keep_input=keep_inputs)
|
|
591
|
+
|
|
592
|
+
is_auto_enable = getattr(use_module, 'is_auto_enable', None)
|
|
593
|
+
if is_auto_enable:
|
|
594
|
+
if is_auto_enable():
|
|
595
|
+
self.add('option', 'library', use_module.design)
|
|
596
|
+
|
|
597
|
+
else:
|
|
598
|
+
module_name = module.__name__
|
|
599
|
+
class_name = use_module.__class__.__name__
|
|
600
|
+
raise ValueError(f"{module_name} returned an object with an "
|
|
601
|
+
f"unsupported type: {class_name}")
|
|
602
|
+
|
|
603
|
+
def __import_data_sources(self, cfg):
|
|
604
|
+
if 'package' not in cfg or 'source' not in cfg['package']:
|
|
605
|
+
return
|
|
606
|
+
|
|
607
|
+
for source, config in cfg['package']['source'].items():
|
|
608
|
+
if source == 'default':
|
|
609
|
+
continue
|
|
610
|
+
|
|
611
|
+
if 'path' not in config or \
|
|
612
|
+
Schema.GLOBAL_KEY not in config['path']['node'] or \
|
|
613
|
+
Schema.GLOBAL_KEY not in config['path']['node'][Schema.GLOBAL_KEY]:
|
|
614
|
+
continue
|
|
615
|
+
|
|
616
|
+
path = config['path']['node'][Schema.GLOBAL_KEY][Schema.GLOBAL_KEY]['value']
|
|
617
|
+
|
|
618
|
+
ref = None
|
|
619
|
+
if 'ref' in config and \
|
|
620
|
+
Schema.GLOBAL_KEY in config['ref']['node'] and \
|
|
621
|
+
Schema.GLOBAL_KEY in config['ref']['node'][Schema.GLOBAL_KEY]:
|
|
622
|
+
ref = config['ref']['node'][Schema.GLOBAL_KEY][Schema.GLOBAL_KEY]['value']
|
|
623
|
+
|
|
624
|
+
self.register_source(
|
|
625
|
+
name=source,
|
|
626
|
+
path=path,
|
|
627
|
+
ref=ref)
|
|
628
|
+
|
|
629
|
+
def __use_import(self, group, module):
|
|
630
|
+
'''
|
|
631
|
+
Imports the module into the schema
|
|
632
|
+
|
|
633
|
+
Args:
|
|
634
|
+
group (str): Top group to copy information into
|
|
635
|
+
module (class): Chip object to import
|
|
636
|
+
'''
|
|
637
|
+
|
|
638
|
+
importname = module.design
|
|
639
|
+
|
|
640
|
+
src_cfg = self.schema.cfg[group]
|
|
641
|
+
|
|
642
|
+
if importname in src_cfg:
|
|
643
|
+
self.logger.warning(f'Overwriting existing {group} {importname}')
|
|
644
|
+
del src_cfg[importname]
|
|
645
|
+
|
|
646
|
+
# Copy
|
|
647
|
+
src_cfg[importname] = module.getdict(group, importname)
|
|
648
|
+
self.__import_data_sources(module.schema.cfg)
|
|
649
|
+
|
|
650
|
+
###########################################################################
|
|
651
|
+
def help(self, *keypath):
|
|
652
|
+
"""
|
|
653
|
+
Returns a schema parameter description.
|
|
654
|
+
|
|
655
|
+
Args:
|
|
656
|
+
*keypath(str): Keypath to parameter.
|
|
657
|
+
|
|
658
|
+
Returns:
|
|
659
|
+
A formatted multi-line help paragraph for the parameter provided.
|
|
660
|
+
|
|
661
|
+
Examples:
|
|
662
|
+
>>> print(chip.help('asic','diearea'))
|
|
663
|
+
Displays help information about the 'asic, diearea' parameter
|
|
664
|
+
|
|
665
|
+
"""
|
|
666
|
+
|
|
667
|
+
self.logger.debug('Fetching help for %s', keypath)
|
|
668
|
+
|
|
669
|
+
# Fetch Values
|
|
670
|
+
|
|
671
|
+
description = self.get(*keypath, field='shorthelp')
|
|
672
|
+
typestr = self.get(*keypath, field='type')
|
|
673
|
+
switchstr = str(self.get(*keypath, field='switch'))
|
|
674
|
+
defstr = str(self.schema.get_default(*keypath))
|
|
675
|
+
requirement = str(self.get(*keypath, field='require'))
|
|
676
|
+
helpstr = self.get(*keypath, field='help')
|
|
677
|
+
example = self.get(*keypath, field='example')
|
|
678
|
+
|
|
679
|
+
examplestr = ("\nExamples: " + example[0] + ''.join(
|
|
680
|
+
["\n " + ex for ex in example[1:]]))
|
|
681
|
+
|
|
682
|
+
# Removing multiple spaces and newlines
|
|
683
|
+
helpstr = helpstr.rstrip()
|
|
684
|
+
helpstr = helpstr.replace("\n", "")
|
|
685
|
+
helpstr = ' '.join(helpstr.split())
|
|
686
|
+
|
|
687
|
+
for idx, item in enumerate(example):
|
|
688
|
+
example[idx] = ' '.join(item.split())
|
|
689
|
+
example[idx] = example[idx].replace(", ", ",")
|
|
690
|
+
|
|
691
|
+
# Wrap text
|
|
692
|
+
para = textwrap.TextWrapper(width=60)
|
|
693
|
+
para_list = para.wrap(text=helpstr)
|
|
694
|
+
|
|
695
|
+
# Full Doc String
|
|
696
|
+
fullstr = "-" * 80
|
|
697
|
+
fullstr += "\nDescription: " + description
|
|
698
|
+
fullstr += "\nSwitch: " + switchstr
|
|
699
|
+
fullstr += "\nType: " + typestr
|
|
700
|
+
fullstr += "\nRequirement: " + requirement
|
|
701
|
+
fullstr += "\nDefault: " + defstr
|
|
702
|
+
fullstr += examplestr
|
|
703
|
+
fullstr += "\nHelp: " + para_list[0] + "\n"
|
|
704
|
+
for line in para_list[1:]:
|
|
705
|
+
fullstr = fullstr + " " * 13 + line.lstrip() + "\n"
|
|
706
|
+
|
|
707
|
+
return fullstr
|
|
708
|
+
|
|
709
|
+
###########################################################################
|
|
710
|
+
def valid(self, *keypath, default_valid=False, job=None):
|
|
711
|
+
"""
|
|
712
|
+
Checks validity of a keypath.
|
|
713
|
+
|
|
714
|
+
Checks the validity of a parameter keypath and returns True if the
|
|
715
|
+
keypath is valid and False if invalid.
|
|
716
|
+
|
|
717
|
+
Args:
|
|
718
|
+
default_valid (bool): Whether to consider "default" in valid
|
|
719
|
+
keypaths as a wildcard. Defaults to False.
|
|
720
|
+
job (str): Jobname to use for dictionary access in place of the
|
|
721
|
+
current active jobname.
|
|
722
|
+
|
|
723
|
+
Returns:
|
|
724
|
+
Boolean indicating validity of keypath.
|
|
725
|
+
|
|
726
|
+
Examples:
|
|
727
|
+
>>> check = chip.valid('design')
|
|
728
|
+
Returns True.
|
|
729
|
+
>>> check = chip.valid('blah')
|
|
730
|
+
Returns False.
|
|
731
|
+
>>> check = chip.valid('metric', 'foo', '0', 'tasktime', default_valid=True)
|
|
732
|
+
Returns True, even if "foo" and "0" aren't in current configuration.
|
|
733
|
+
"""
|
|
734
|
+
return self.schema.valid(*keypath, default_valid=default_valid, job=job)
|
|
735
|
+
|
|
736
|
+
###########################################################################
|
|
737
|
+
def get(self, *keypath, field='value', job=None, step=None, index=None):
|
|
738
|
+
"""
|
|
739
|
+
Returns a schema parameter field.
|
|
740
|
+
|
|
741
|
+
Returns a schema parameter field based on the keypath provided in the
|
|
742
|
+
``*keypath``. See the :ref:`Schema Reference Manual<SiliconCompiler
|
|
743
|
+
Schema>` for documentation of all supported keypaths. The returned type
|
|
744
|
+
is consistent with the type field of the parameter. Fetching parameters
|
|
745
|
+
with empty or undefined value files returns None for scalar types and []
|
|
746
|
+
(empty list) for list types. Accessing a non-existent keypath produces
|
|
747
|
+
a logger error message and raises the Chip object error flag.
|
|
748
|
+
|
|
749
|
+
Args:
|
|
750
|
+
keypath(list str): Variable length schema key list.
|
|
751
|
+
field(str): Parameter field to fetch.
|
|
752
|
+
job (str): Jobname to use for dictionary access in place of the
|
|
753
|
+
current active jobname.
|
|
754
|
+
step (str): Step name to access for parameters that may be specified
|
|
755
|
+
on a per-node basis.
|
|
756
|
+
index (str): Index name to access for parameters that may be specified
|
|
757
|
+
on a per-node basis.
|
|
758
|
+
|
|
759
|
+
Returns:
|
|
760
|
+
Value found for the keypath and field provided.
|
|
761
|
+
|
|
762
|
+
Examples:
|
|
763
|
+
>>> foundry = chip.get('pdk', 'foundry')
|
|
764
|
+
Returns the name of the foundry from the PDK.
|
|
765
|
+
|
|
766
|
+
"""
|
|
767
|
+
self.logger.debug(f"Reading from {keypath}. Field = '{field}'")
|
|
768
|
+
|
|
769
|
+
try:
|
|
770
|
+
strict = self.schema.get('option', 'strict')
|
|
771
|
+
if field == 'value' and strict:
|
|
772
|
+
pernode = self.schema.get(*keypath, field='pernode')
|
|
773
|
+
if pernode == 'optional' and \
|
|
774
|
+
(step is None or index is None) and \
|
|
775
|
+
(Schema.GLOBAL_KEY not in (step, index)): # allow explicit access to global
|
|
776
|
+
self.error(
|
|
777
|
+
f"Invalid args to get() of keypath {keypath}: step and "
|
|
778
|
+
"index are required for reading from this parameter "
|
|
779
|
+
"while ['option', 'strict'] is True."
|
|
780
|
+
)
|
|
781
|
+
return None
|
|
782
|
+
|
|
783
|
+
return self.schema.get(*keypath, field=field, job=job, step=step, index=index)
|
|
784
|
+
except (ValueError, TypeError) as e:
|
|
785
|
+
self.error(str(e))
|
|
786
|
+
return None
|
|
787
|
+
|
|
788
|
+
###########################################################################
|
|
789
|
+
def getkeys(self, *keypath, job=None):
|
|
790
|
+
"""
|
|
791
|
+
Returns a list of schema dictionary keys.
|
|
792
|
+
|
|
793
|
+
Searches the schema for the keypath provided and returns a list of
|
|
794
|
+
keys found, excluding the generic 'default' key. Accessing a
|
|
795
|
+
non-existent keypath produces a logger error message and raises the
|
|
796
|
+
Chip object error flag.
|
|
797
|
+
|
|
798
|
+
Args:
|
|
799
|
+
keypath (list str): Variable length ordered schema key list
|
|
800
|
+
job (str): Jobname to use for dictionary access in place of the
|
|
801
|
+
current active jobname.
|
|
802
|
+
|
|
803
|
+
Returns:
|
|
804
|
+
List of keys found for the keypath provided.
|
|
805
|
+
|
|
806
|
+
Examples:
|
|
807
|
+
>>> keylist = chip.getkeys('pdk')
|
|
808
|
+
Returns all keys for the 'pdk' keypath.
|
|
809
|
+
"""
|
|
810
|
+
if len(keypath) > 0:
|
|
811
|
+
self.logger.debug(f'Getting schema parameter keys for {keypath}')
|
|
812
|
+
else:
|
|
813
|
+
self.logger.debug('Getting all schema parameter keys.')
|
|
814
|
+
|
|
815
|
+
try:
|
|
816
|
+
return self.schema.getkeys(*keypath, job=job)
|
|
817
|
+
except (ValueError, TypeError) as e:
|
|
818
|
+
self.error(str(e))
|
|
819
|
+
return None
|
|
820
|
+
|
|
821
|
+
###########################################################################
|
|
822
|
+
def allkeys(self, *keypath_prefix):
|
|
823
|
+
'''Returns all keypaths in the schema as a list of lists.
|
|
824
|
+
|
|
825
|
+
Arg:
|
|
826
|
+
keypath_prefix (list str): Keypath prefix to search under. The
|
|
827
|
+
returned keypaths do not include the prefix.
|
|
828
|
+
'''
|
|
829
|
+
return self.schema.allkeys(*keypath_prefix)
|
|
830
|
+
|
|
831
|
+
###########################################################################
|
|
832
|
+
def getdict(self, *keypath):
|
|
833
|
+
"""
|
|
834
|
+
Returns a schema dictionary.
|
|
835
|
+
|
|
836
|
+
Searches the schema for the keypath provided and returns a complete
|
|
837
|
+
dictionary. Accessing a non-existent keypath produces a logger error
|
|
838
|
+
message and raises the Chip object error flag.
|
|
839
|
+
|
|
840
|
+
Args:
|
|
841
|
+
keypath(list str): Variable length ordered schema key list
|
|
842
|
+
|
|
843
|
+
Returns:
|
|
844
|
+
A schema dictionary
|
|
845
|
+
|
|
846
|
+
Examples:
|
|
847
|
+
>>> pdk = chip.getdict('pdk')
|
|
848
|
+
Returns the complete dictionary found for the keypath 'pdk'
|
|
849
|
+
"""
|
|
850
|
+
self.logger.debug(f'Getting cfg for: {keypath}')
|
|
851
|
+
|
|
852
|
+
try:
|
|
853
|
+
return self.schema.getdict(*keypath)
|
|
854
|
+
except (ValueError, TypeError) as e:
|
|
855
|
+
self.error(str(e))
|
|
856
|
+
return None
|
|
857
|
+
|
|
858
|
+
###########################################################################
|
|
859
|
+
def __add_set_package(self, keypath, value, package, step, index, clobber, add):
|
|
860
|
+
sc_type = self.get(*keypath, field='type')
|
|
861
|
+
if 'file' in sc_type or 'dir' in sc_type:
|
|
862
|
+
value_list = isinstance(value, (list, tuple))
|
|
863
|
+
package_list = isinstance(package, (list, tuple))
|
|
864
|
+
if value_list != package_list:
|
|
865
|
+
if value_list:
|
|
866
|
+
package = len(value) * [package]
|
|
867
|
+
else:
|
|
868
|
+
raise ValueError()
|
|
869
|
+
|
|
870
|
+
if add:
|
|
871
|
+
self.schema.add(*keypath, package, field='package',
|
|
872
|
+
step=step, index=index)
|
|
873
|
+
else:
|
|
874
|
+
self.schema.set(*keypath, package, field='package',
|
|
875
|
+
step=step, index=index, clobber=clobber)
|
|
876
|
+
|
|
877
|
+
###########################################################################
|
|
878
|
+
def set(self, *args, field='value', clobber=True, step=None, index=None, package=None):
|
|
879
|
+
'''
|
|
880
|
+
Sets a schema parameter field.
|
|
881
|
+
|
|
882
|
+
Sets a schema parameter field based on the keypath and value provided in
|
|
883
|
+
the ``*args``. See the :ref:`Schema Reference Manual<SiliconCompiler
|
|
884
|
+
Schema>` for documentation of all supported keypaths. New schema
|
|
885
|
+
dictionaries are automatically created for keypaths that overlap with
|
|
886
|
+
'default' dictionaries. The write action is ignored if the parameter
|
|
887
|
+
value is non-empty and the clobber option is set to False.
|
|
888
|
+
|
|
889
|
+
The value provided must agree with the dictionary parameter 'type'.
|
|
890
|
+
Accessing a non-existent keypath or providing a value that disagrees
|
|
891
|
+
with the parameter type produces a logger error message and raises the
|
|
892
|
+
Chip object error flag.
|
|
893
|
+
|
|
894
|
+
Args:
|
|
895
|
+
args (list): Parameter keypath followed by a value to set.
|
|
896
|
+
field (str): Parameter field to set.
|
|
897
|
+
clobber (bool): Existing value is overwritten if True.
|
|
898
|
+
step (str): Step name to set for parameters that may be specified
|
|
899
|
+
on a per-node basis.
|
|
900
|
+
index (str): Index name to set for parameters that may be specified
|
|
901
|
+
on a per-node basis.
|
|
902
|
+
package (str): Package that this file/dir depends on. Available packages
|
|
903
|
+
are listed in the package source section of the schema.
|
|
904
|
+
|
|
905
|
+
Examples:
|
|
906
|
+
>>> chip.set('design', 'top')
|
|
907
|
+
Sets the name of the design to 'top'
|
|
908
|
+
'''
|
|
909
|
+
keypath = args[:-1]
|
|
910
|
+
value = args[-1]
|
|
911
|
+
self.logger.debug(f'Setting {keypath} to {value}')
|
|
912
|
+
|
|
913
|
+
# Special case to ensure loglevel is updated ASAP
|
|
914
|
+
if keypath == ['option', 'loglevel'] and field == 'value' and \
|
|
915
|
+
step == self.get('arg', 'step') and index == self.get('arg', 'index'):
|
|
916
|
+
self.logger.setLevel(schema_utils.translate_loglevel(value))
|
|
917
|
+
|
|
918
|
+
try:
|
|
919
|
+
value_success = self.schema.set(*keypath, value, field=field, clobber=clobber,
|
|
920
|
+
step=step, index=index)
|
|
921
|
+
if field == 'value' and value_success:
|
|
922
|
+
self.__add_set_package(keypath, value, package, step, index, True, False)
|
|
923
|
+
|
|
924
|
+
except (ValueError, TypeError) as e:
|
|
925
|
+
self.error(e)
|
|
926
|
+
|
|
927
|
+
###########################################################################
|
|
928
|
+
def unset(self, *keypath, step=None, index=None):
|
|
929
|
+
'''
|
|
930
|
+
Unsets a schema parameter.
|
|
931
|
+
|
|
932
|
+
This method effectively undoes any previous calls to ``set()`` made to
|
|
933
|
+
the given keypath and step/index. For parameters with required or no
|
|
934
|
+
per-node values, unsetting a parameter always causes it to revert to its
|
|
935
|
+
default value, and future calls to ``set()`` with ``clobber=False`` will
|
|
936
|
+
once again be able to modify the value.
|
|
937
|
+
|
|
938
|
+
If you unset a particular step/index for a parameter with optional
|
|
939
|
+
per-node values, note that the newly returned value will be the global
|
|
940
|
+
value if it has been set. To completely return the parameter to its
|
|
941
|
+
default state, the global value has to be unset as well.
|
|
942
|
+
|
|
943
|
+
``unset()`` has no effect if called on a parameter that has not been
|
|
944
|
+
previously set.
|
|
945
|
+
|
|
946
|
+
Args:
|
|
947
|
+
keypath (list): Parameter keypath to clear.
|
|
948
|
+
step (str): Step name to unset for parameters that may be specified
|
|
949
|
+
on a per-node basis.
|
|
950
|
+
index (str): Index name to unset for parameters that may be specified
|
|
951
|
+
on a per-node basis.
|
|
952
|
+
'''
|
|
953
|
+
self.logger.debug(f'Unsetting {keypath}')
|
|
954
|
+
|
|
955
|
+
if not self.schema.unset(*keypath, step=step, index=index):
|
|
956
|
+
self.logger.debug(f'Failed to unset value for {keypath}: parameter is locked')
|
|
957
|
+
|
|
958
|
+
###########################################################################
|
|
959
|
+
def remove(self, *keypath):
|
|
960
|
+
'''
|
|
961
|
+
Remove a schema parameter and its subparameters.
|
|
962
|
+
|
|
963
|
+
Args:
|
|
964
|
+
keypath (list): Parameter keypath to clear.
|
|
965
|
+
'''
|
|
966
|
+
self.logger.debug(f'Removing {keypath}')
|
|
967
|
+
|
|
968
|
+
if not self.schema.remove(*keypath):
|
|
969
|
+
self.logger.debug(f'Failed to unset value for {keypath}: parameter is locked')
|
|
970
|
+
|
|
971
|
+
###########################################################################
|
|
972
|
+
def add(self, *args, field='value', step=None, index=None, package=None):
|
|
973
|
+
'''
|
|
974
|
+
Adds item(s) to a schema parameter list.
|
|
975
|
+
|
|
976
|
+
Adds item(s) to schema parameter list based on the keypath and value
|
|
977
|
+
provided in the ``*args``. See the :ref:`Schema Reference
|
|
978
|
+
Manual<SiliconCompiler Schema>` for documentation of all supported
|
|
979
|
+
keypaths. New schema dictionaries are automatically created for keypaths
|
|
980
|
+
that overlap with 'default' dictionaries.
|
|
981
|
+
|
|
982
|
+
The value provided must agree with the dictionary parameter 'type'.
|
|
983
|
+
Accessing a non-existent keypath, providing a value that disagrees
|
|
984
|
+
with the parameter type, or using add with a scalar parameter produces
|
|
985
|
+
a logger error message and raises the Chip object error flag.
|
|
986
|
+
|
|
987
|
+
Args:
|
|
988
|
+
args (list): Parameter keypath followed by a value to add.
|
|
989
|
+
field (str): Parameter field to modify.
|
|
990
|
+
step (str): Step name to modify for parameters that may be specified
|
|
991
|
+
on a per-node basis.
|
|
992
|
+
index (str): Index name to modify for parameters that may be specified
|
|
993
|
+
on a per-node basis.
|
|
994
|
+
package (str): Package that this file/dir depends on. Available packages
|
|
995
|
+
are listed in the package source section of the schema.
|
|
996
|
+
|
|
997
|
+
Examples:
|
|
998
|
+
>>> chip.add('input', 'rtl', 'verilog', 'hello.v')
|
|
999
|
+
Adds the file 'hello.v' to the list of sources.
|
|
1000
|
+
'''
|
|
1001
|
+
keypath = args[:-1]
|
|
1002
|
+
value = args[-1]
|
|
1003
|
+
self.logger.debug(f'Appending value {value} to {keypath}')
|
|
1004
|
+
|
|
1005
|
+
try:
|
|
1006
|
+
value_success = self.schema.add(*args, field=field, step=step, index=index)
|
|
1007
|
+
|
|
1008
|
+
if field == 'value' and value_success:
|
|
1009
|
+
self.__add_set_package(keypath, value, package, step, index, True, True)
|
|
1010
|
+
except (ValueError, TypeError) as e:
|
|
1011
|
+
self.error(str(e))
|
|
1012
|
+
|
|
1013
|
+
###########################################################################
|
|
1014
|
+
def input(self, filename, fileset=None, filetype=None, iomap=None,
|
|
1015
|
+
step=None, index=None, package=None):
|
|
1016
|
+
'''
|
|
1017
|
+
Adds file to a filset. The default behavior is to infer filetypes and
|
|
1018
|
+
filesets based on the suffix of the file extensions. The method is
|
|
1019
|
+
a wrapper function for set.add('input', filset, filetype,...)
|
|
1020
|
+
|
|
1021
|
+
Default filetype and filset based on suffix:
|
|
1022
|
+
|
|
1023
|
+
.. code:: none
|
|
1024
|
+
|
|
1025
|
+
{iotable}
|
|
1026
|
+
|
|
1027
|
+
Args:
|
|
1028
|
+
fileset (str): File grouping
|
|
1029
|
+
filetype (str): File type
|
|
1030
|
+
iomap (dict of tuple(set, type)): File set and type mapping based on file extension
|
|
1031
|
+
step (str): Node name
|
|
1032
|
+
index (str): Node index
|
|
1033
|
+
package (str): Name of package where this file can be found
|
|
1034
|
+
'''
|
|
1035
|
+
|
|
1036
|
+
self._add_input_output('input', filename, fileset, filetype, iomap,
|
|
1037
|
+
step=step, index=index, package=package)
|
|
1038
|
+
# Replace {iotable} in __doc__ with actual table for fileset/filetype and extension mapping
|
|
1039
|
+
input.__doc__ = input.__doc__.replace("{iotable}",
|
|
1040
|
+
utils.format_fileset_type_table())
|
|
1041
|
+
|
|
1042
|
+
###########################################################################
|
|
1043
|
+
def output(self, filename, fileset=None, filetype=None, iomap=None,
|
|
1044
|
+
step=None, index=None, package=None):
|
|
1045
|
+
'''Same as input'''
|
|
1046
|
+
|
|
1047
|
+
self._add_input_output('output', filename, fileset, filetype, iomap,
|
|
1048
|
+
step=step, index=index, package=package)
|
|
1049
|
+
# Copy input functions __doc__ and replace 'input' with 'output' to make constant
|
|
1050
|
+
output.__doc__ = input.__doc__.replace("input", "output")
|
|
1051
|
+
|
|
1052
|
+
###########################################################################
|
|
1053
|
+
def _add_input_output(self, category, filename, fileset, filetype, iomap,
|
|
1054
|
+
step=None, index=None, package=None, quiet=False):
|
|
1055
|
+
'''
|
|
1056
|
+
Adds file to input or output groups.
|
|
1057
|
+
Performs a lookup in the io map for the fileset and filetype
|
|
1058
|
+
and will use those if they are not provided in the arguments
|
|
1059
|
+
'''
|
|
1060
|
+
# Normalize value to string in case we receive a pathlib.Path
|
|
1061
|
+
filename = str(filename)
|
|
1062
|
+
|
|
1063
|
+
ext = utils.get_file_ext(filename)
|
|
1064
|
+
|
|
1065
|
+
default_fileset = None
|
|
1066
|
+
default_filetype = None
|
|
1067
|
+
if not iomap:
|
|
1068
|
+
iomap = utils.get_default_iomap()
|
|
1069
|
+
|
|
1070
|
+
if ext in iomap:
|
|
1071
|
+
default_fileset, default_filetype = iomap[ext]
|
|
1072
|
+
|
|
1073
|
+
if not fileset:
|
|
1074
|
+
use_fileset = default_fileset
|
|
1075
|
+
else:
|
|
1076
|
+
use_fileset = fileset
|
|
1077
|
+
|
|
1078
|
+
if not filetype:
|
|
1079
|
+
use_filetype = default_filetype
|
|
1080
|
+
else:
|
|
1081
|
+
use_filetype = filetype
|
|
1082
|
+
|
|
1083
|
+
if not use_fileset or not use_filetype:
|
|
1084
|
+
self.logger.error(f'Unable to infer {category} fileset and/or filetype for '
|
|
1085
|
+
f'{filename} based on file extension.')
|
|
1086
|
+
elif not quiet:
|
|
1087
|
+
if not fileset and not filetype:
|
|
1088
|
+
self.logger.info(f'{filename} inferred as {use_fileset}/{use_filetype}')
|
|
1089
|
+
elif not filetype:
|
|
1090
|
+
self.logger.info(f'{filename} inferred as filetype {use_filetype}')
|
|
1091
|
+
elif not fileset:
|
|
1092
|
+
self.logger.info(f'{filename} inferred as fileset {use_fileset}')
|
|
1093
|
+
|
|
1094
|
+
self.add(category, use_fileset, use_filetype, filename,
|
|
1095
|
+
step=step, index=index, package=package)
|
|
1096
|
+
|
|
1097
|
+
###########################################################################
|
|
1098
|
+
def find_files(self, *keypath, missing_ok=False, job=None, step=None, index=None):
|
|
1099
|
+
"""
|
|
1100
|
+
Returns absolute paths to files or directories based on the keypath
|
|
1101
|
+
provided.
|
|
1102
|
+
|
|
1103
|
+
By default, this function first checks if the keypath provided has its
|
|
1104
|
+
`copy` parameter set to True. If so, it returns paths to the files in
|
|
1105
|
+
the build directory. Otherwise, it resolves these files based on the
|
|
1106
|
+
current working directory and SC path.
|
|
1107
|
+
|
|
1108
|
+
The keypath provided must point to a schema parameter of type file, dir,
|
|
1109
|
+
or lists of either. Otherwise, it will trigger an error.
|
|
1110
|
+
|
|
1111
|
+
Args:
|
|
1112
|
+
keypath (list str): Variable length schema key list.
|
|
1113
|
+
missing_ok (bool): If True, silently return None when files aren't
|
|
1114
|
+
found. If False, print an error and set the error flag.
|
|
1115
|
+
job (str): Jobname to use for dictionary access in place of the
|
|
1116
|
+
current active jobname.
|
|
1117
|
+
step (str): Step name to access for parameters that may be specified
|
|
1118
|
+
on a per-node basis.
|
|
1119
|
+
index (str): Index name to access for parameters that may be specified
|
|
1120
|
+
on a per-node basis.
|
|
1121
|
+
|
|
1122
|
+
Returns:
|
|
1123
|
+
If keys points to a scalar entry, returns an absolute path to that
|
|
1124
|
+
file/directory, or None if not found. It keys points to a list
|
|
1125
|
+
entry, returns a list of either the absolute paths or None for each
|
|
1126
|
+
entry, depending on whether it is found.
|
|
1127
|
+
|
|
1128
|
+
Examples:
|
|
1129
|
+
>>> chip.find_files('input', 'verilog')
|
|
1130
|
+
Returns a list of absolute paths to source files, as specified in
|
|
1131
|
+
the schema.
|
|
1132
|
+
|
|
1133
|
+
"""
|
|
1134
|
+
strict = self.get('option', 'strict')
|
|
1135
|
+
pernode = self.get(*keypath, field='pernode')
|
|
1136
|
+
if strict and pernode == 'optional' and (step is None or index is None):
|
|
1137
|
+
self.error(
|
|
1138
|
+
f"Invalid args to find_files() of keypath {keypath}: step and "
|
|
1139
|
+
"index are required for reading from this parameter while "
|
|
1140
|
+
"['option', 'strict'] is True."
|
|
1141
|
+
)
|
|
1142
|
+
return []
|
|
1143
|
+
return self.__find_files(*keypath, missing_ok=missing_ok, job=job, step=step, index=index)
|
|
1144
|
+
|
|
1145
|
+
def __convert_paths_to_posix(self, paths):
|
|
1146
|
+
posix_paths = []
|
|
1147
|
+
for p in paths:
|
|
1148
|
+
if p:
|
|
1149
|
+
# Cast everything to a windows path and convert to posix.
|
|
1150
|
+
# https://stackoverflow.com/questions/73682260
|
|
1151
|
+
posix_paths.append(pathlib.PureWindowsPath(p).as_posix())
|
|
1152
|
+
else:
|
|
1153
|
+
posix_paths.append(p)
|
|
1154
|
+
return posix_paths
|
|
1155
|
+
|
|
1156
|
+
###########################################################################
|
|
1157
|
+
def __find_files(self,
|
|
1158
|
+
*keypath,
|
|
1159
|
+
missing_ok=False,
|
|
1160
|
+
job=None,
|
|
1161
|
+
step=None,
|
|
1162
|
+
index=None,
|
|
1163
|
+
list_index=None,
|
|
1164
|
+
abs_path_only=False):
|
|
1165
|
+
"""Internal find_files() that allows you to skip step/index for optional
|
|
1166
|
+
params, regardless of [option, strict]."""
|
|
1167
|
+
|
|
1168
|
+
paramtype = self.get(*keypath, field='type', job=job)
|
|
1169
|
+
|
|
1170
|
+
if 'file' not in paramtype and 'dir' not in paramtype:
|
|
1171
|
+
self.error('Can only call find_files on file or dir types')
|
|
1172
|
+
return None
|
|
1173
|
+
|
|
1174
|
+
is_list = bool(re.match(r'\[', paramtype))
|
|
1175
|
+
|
|
1176
|
+
paths = self.schema.get(*keypath, job=job, step=step, index=index)
|
|
1177
|
+
dependencies = self.schema.get(*keypath, job=job,
|
|
1178
|
+
step=step, index=index, field='package')
|
|
1179
|
+
# Convert to list if we have scalar
|
|
1180
|
+
if not is_list:
|
|
1181
|
+
# Dependencies are always specified as list with default []
|
|
1182
|
+
# If paths is a scalar we convert the default [] to [None]
|
|
1183
|
+
# to have a matching list with one element
|
|
1184
|
+
if dependencies == []:
|
|
1185
|
+
dependencies = [None]
|
|
1186
|
+
paths = [paths]
|
|
1187
|
+
|
|
1188
|
+
if list_index is not None:
|
|
1189
|
+
# List index is set, so we only want to check a particular path in the key
|
|
1190
|
+
paths = [paths[list_index]]
|
|
1191
|
+
dependencies = [dependencies[list_index]]
|
|
1192
|
+
|
|
1193
|
+
paths = self.__convert_paths_to_posix(paths)
|
|
1194
|
+
dependencies = self.__convert_paths_to_posix(dependencies)
|
|
1195
|
+
|
|
1196
|
+
result = []
|
|
1197
|
+
|
|
1198
|
+
collection_dir = self._getcollectdir(jobname=job)
|
|
1199
|
+
if not os.path.exists(collection_dir):
|
|
1200
|
+
collection_dir = None
|
|
1201
|
+
|
|
1202
|
+
# Special cases for various ['tool', ...] files that may be implicitly
|
|
1203
|
+
# under the workdir (or refdir in the case of scripts).
|
|
1204
|
+
# TODO: it may be cleaner to have a file resolution scope flag in schema
|
|
1205
|
+
# (e.g. 'workdir', 'refdir'), rather than hardcoding special
|
|
1206
|
+
# cases.
|
|
1207
|
+
|
|
1208
|
+
search_paths = None
|
|
1209
|
+
if len(keypath) >= 5 and \
|
|
1210
|
+
keypath[0] == 'tool' and \
|
|
1211
|
+
keypath[4] in ('input', 'output', 'report'):
|
|
1212
|
+
if keypath[4] == 'report':
|
|
1213
|
+
io = ""
|
|
1214
|
+
else:
|
|
1215
|
+
io = keypath[4] + 's'
|
|
1216
|
+
iodir = os.path.join(self.getworkdir(jobname=job, step=step, index=index), io)
|
|
1217
|
+
search_paths = [iodir]
|
|
1218
|
+
elif len(keypath) >= 5 and keypath[0] == 'tool' and keypath[4] == 'script':
|
|
1219
|
+
tool = keypath[1]
|
|
1220
|
+
task = keypath[3]
|
|
1221
|
+
refdirs = self.__find_files('tool', tool, 'task', task, 'refdir',
|
|
1222
|
+
step=step, index=index,
|
|
1223
|
+
abs_path_only=True)
|
|
1224
|
+
search_paths = refdirs
|
|
1225
|
+
|
|
1226
|
+
if search_paths:
|
|
1227
|
+
search_paths = self.__convert_paths_to_posix(search_paths)
|
|
1228
|
+
|
|
1229
|
+
for (dependency, path) in zip(dependencies, paths):
|
|
1230
|
+
if not search_paths and collection_dir:
|
|
1231
|
+
import_path = self.__find_sc_imported_file(path, dependency, collection_dir)
|
|
1232
|
+
if import_path:
|
|
1233
|
+
result.append(import_path)
|
|
1234
|
+
continue
|
|
1235
|
+
if dependency:
|
|
1236
|
+
depdendency_path = os.path.abspath(
|
|
1237
|
+
os.path.join(sc_package.path(self, dependency), path))
|
|
1238
|
+
if os.path.exists(depdendency_path):
|
|
1239
|
+
result.append(depdendency_path)
|
|
1240
|
+
else:
|
|
1241
|
+
result.append(None)
|
|
1242
|
+
if not missing_ok:
|
|
1243
|
+
self.error(f'Could not find {path} in {dependency}.')
|
|
1244
|
+
continue
|
|
1245
|
+
result.append(utils.find_sc_file(self,
|
|
1246
|
+
path,
|
|
1247
|
+
missing_ok=missing_ok,
|
|
1248
|
+
search_paths=search_paths))
|
|
1249
|
+
|
|
1250
|
+
if self._relative_path and not abs_path_only:
|
|
1251
|
+
rel_result = []
|
|
1252
|
+
for path in result:
|
|
1253
|
+
if path:
|
|
1254
|
+
rel_result.append(os.path.relpath(path, self._relative_path))
|
|
1255
|
+
else:
|
|
1256
|
+
rel_result.append(path)
|
|
1257
|
+
result = rel_result
|
|
1258
|
+
|
|
1259
|
+
# Convert back to scalar if that was original type
|
|
1260
|
+
if not is_list:
|
|
1261
|
+
return result[0]
|
|
1262
|
+
|
|
1263
|
+
return result
|
|
1264
|
+
|
|
1265
|
+
###########################################################################
|
|
1266
|
+
def __find_sc_imported_file(self, path, package, collected_dir):
|
|
1267
|
+
"""
|
|
1268
|
+
Returns the path to an imported file if it is available in the import directory
|
|
1269
|
+
or in a directory that was imported
|
|
1270
|
+
|
|
1271
|
+
Returns none if not found
|
|
1272
|
+
"""
|
|
1273
|
+
if not path:
|
|
1274
|
+
return None
|
|
1275
|
+
|
|
1276
|
+
collected_files = os.listdir(collected_dir)
|
|
1277
|
+
if not collected_files:
|
|
1278
|
+
return None
|
|
1279
|
+
|
|
1280
|
+
path_paths = pathlib.PurePosixPath(path).parts
|
|
1281
|
+
for n in range(len(path_paths)):
|
|
1282
|
+
# Search through the path elements to see if any of the previous path parts
|
|
1283
|
+
# have been imported
|
|
1284
|
+
|
|
1285
|
+
n += 1
|
|
1286
|
+
basename = str(pathlib.PurePosixPath(*path_paths[0:n]))
|
|
1287
|
+
endname = str(pathlib.PurePosixPath(*path_paths[n:]))
|
|
1288
|
+
|
|
1289
|
+
import_name = self.__get_imported_filename(basename, package)
|
|
1290
|
+
if import_name not in collected_files:
|
|
1291
|
+
continue
|
|
1292
|
+
|
|
1293
|
+
abspath = os.path.join(collected_dir, import_name)
|
|
1294
|
+
if endname:
|
|
1295
|
+
abspath = os.path.join(abspath, endname)
|
|
1296
|
+
abspath = os.path.abspath(abspath)
|
|
1297
|
+
if os.path.exists(abspath):
|
|
1298
|
+
return abspath
|
|
1299
|
+
|
|
1300
|
+
return None
|
|
1301
|
+
|
|
1302
|
+
def find_node_file(self, path, step, jobname=None, index='0'):
|
|
1303
|
+
"""
|
|
1304
|
+
Returns the absolute path of a file from a particular node.
|
|
1305
|
+
|
|
1306
|
+
Utility function that returns the absolute path to a node
|
|
1307
|
+
file based on the provided arguments. The result directory
|
|
1308
|
+
structure is:
|
|
1309
|
+
|
|
1310
|
+
<dir>/<design>/<jobname>/<step>/<index>/<path>
|
|
1311
|
+
|
|
1312
|
+
Args:
|
|
1313
|
+
path (str): Path to file inside node run directory
|
|
1314
|
+
step (str): Task step name ('syn', 'place', etc)
|
|
1315
|
+
jobname (str): Jobid directory name
|
|
1316
|
+
index (str): Task index
|
|
1317
|
+
|
|
1318
|
+
Returns:
|
|
1319
|
+
Returns absolute path to file.
|
|
1320
|
+
|
|
1321
|
+
Examples:
|
|
1322
|
+
>>> manifest_filepath = chip.find_node_file('outputs/heartbeat.vg', 'syn')
|
|
1323
|
+
Returns the absolute path to the gate level verilog.
|
|
1324
|
+
"""
|
|
1325
|
+
if jobname is None:
|
|
1326
|
+
jobname = self.get('option', 'jobname')
|
|
1327
|
+
|
|
1328
|
+
workdir = self.getworkdir(jobname, step, index)
|
|
1329
|
+
filename = f"{workdir}/{path}"
|
|
1330
|
+
|
|
1331
|
+
self.logger.debug(f"Finding node file: {filename}")
|
|
1332
|
+
|
|
1333
|
+
if os.path.exists(filename):
|
|
1334
|
+
return filename
|
|
1335
|
+
else:
|
|
1336
|
+
return None
|
|
1337
|
+
|
|
1338
|
+
###########################################################################
|
|
1339
|
+
def find_result(self, filetype, step, jobname=None, index='0'):
|
|
1340
|
+
"""
|
|
1341
|
+
Returns the absolute path of a compilation result.
|
|
1342
|
+
|
|
1343
|
+
Utility function that returns the absolute path to a results
|
|
1344
|
+
file based on the provided arguments. The result directory
|
|
1345
|
+
structure is:
|
|
1346
|
+
|
|
1347
|
+
<dir>/<design>/<jobname>/<step>/<index>/outputs/<design>.filetype
|
|
1348
|
+
|
|
1349
|
+
Args:
|
|
1350
|
+
filetype (str): File extension (v, def, etc)
|
|
1351
|
+
step (str): Task step name ('syn', 'place', etc)
|
|
1352
|
+
jobname (str): Jobid directory name
|
|
1353
|
+
index (str): Task index
|
|
1354
|
+
|
|
1355
|
+
Returns:
|
|
1356
|
+
Returns absolute path to file.
|
|
1357
|
+
|
|
1358
|
+
Examples:
|
|
1359
|
+
>>> vg_filepath = chip.find_result('vg', 'syn')
|
|
1360
|
+
Returns the absolute path to the gate level verilog.
|
|
1361
|
+
"""
|
|
1362
|
+
|
|
1363
|
+
design = self.top()
|
|
1364
|
+
return self.find_node_file(
|
|
1365
|
+
f"outputs/{design}.{filetype}",
|
|
1366
|
+
step=step,
|
|
1367
|
+
jobname=jobname,
|
|
1368
|
+
index=index)
|
|
1369
|
+
|
|
1370
|
+
###########################################################################
|
|
1371
|
+
def __abspath(self):
|
|
1372
|
+
'''
|
|
1373
|
+
Internal function that returns a copy of the chip schema with all
|
|
1374
|
+
relative paths resolved where required.
|
|
1375
|
+
'''
|
|
1376
|
+
schema = self.schema.copy()
|
|
1377
|
+
for keypath in self.allkeys():
|
|
1378
|
+
paramtype = self.get(*keypath, field='type')
|
|
1379
|
+
if not ('file' in paramtype or 'dir' in paramtype):
|
|
1380
|
+
# only do something if type is file or dir
|
|
1381
|
+
continue
|
|
1382
|
+
|
|
1383
|
+
values = self.schema._getvals(*keypath)
|
|
1384
|
+
for value, step, index in values:
|
|
1385
|
+
if not value:
|
|
1386
|
+
continue
|
|
1387
|
+
abspaths = self.__find_files(*keypath, missing_ok=True, step=step, index=index)
|
|
1388
|
+
if isinstance(abspaths, list) and None in abspaths:
|
|
1389
|
+
# Lists may not contain None
|
|
1390
|
+
schema.set(*keypath, [], step=step, index=index)
|
|
1391
|
+
else:
|
|
1392
|
+
schema.set(*keypath, abspaths, step=step, index=index)
|
|
1393
|
+
return schema
|
|
1394
|
+
|
|
1395
|
+
###########################################################################
|
|
1396
|
+
def check_filepaths(self):
|
|
1397
|
+
'''
|
|
1398
|
+
Verifies that paths to all files in manifest are valid.
|
|
1399
|
+
|
|
1400
|
+
Returns:
|
|
1401
|
+
True if all file paths are valid, otherwise False.
|
|
1402
|
+
'''
|
|
1403
|
+
|
|
1404
|
+
allkeys = self.allkeys()
|
|
1405
|
+
error = False
|
|
1406
|
+
for keypath in allkeys:
|
|
1407
|
+
paramtype = self.get(*keypath, field='type')
|
|
1408
|
+
is_file = 'file' in paramtype
|
|
1409
|
+
is_dir = 'dir' in paramtype
|
|
1410
|
+
is_list = paramtype.startswith('[')
|
|
1411
|
+
|
|
1412
|
+
if is_file or is_dir:
|
|
1413
|
+
if keypath[-2:] == ('option', 'builddir'):
|
|
1414
|
+
# Skip ['option', 'builddir'] since it will get created by run() if it doesn't
|
|
1415
|
+
# exist
|
|
1416
|
+
continue
|
|
1417
|
+
|
|
1418
|
+
for check_files, step, index in self.schema._getvals(*keypath):
|
|
1419
|
+
if not check_files:
|
|
1420
|
+
continue
|
|
1421
|
+
|
|
1422
|
+
if not is_list:
|
|
1423
|
+
check_files = [check_files]
|
|
1424
|
+
|
|
1425
|
+
for idx, check_file in enumerate(check_files):
|
|
1426
|
+
found_file = self.__find_files(*keypath,
|
|
1427
|
+
missing_ok=True,
|
|
1428
|
+
step=step, index=index,
|
|
1429
|
+
list_index=idx)
|
|
1430
|
+
if is_list:
|
|
1431
|
+
found_file = found_file[0]
|
|
1432
|
+
if not found_file:
|
|
1433
|
+
self.logger.error(f"Parameter {keypath} path {check_file} is invalid")
|
|
1434
|
+
error = True
|
|
1435
|
+
|
|
1436
|
+
return not error
|
|
1437
|
+
|
|
1438
|
+
###########################################################################
|
|
1439
|
+
def check_manifest(self):
|
|
1440
|
+
'''
|
|
1441
|
+
Verifies the integrity of the pre-run compilation manifest.
|
|
1442
|
+
|
|
1443
|
+
Checks the validity of the current schema manifest in
|
|
1444
|
+
memory to ensure that the design has been properly set up prior
|
|
1445
|
+
to running compilation. The function is called inside the run()
|
|
1446
|
+
function but can also be called separately. Checks performed by the
|
|
1447
|
+
check_manifest() function include:
|
|
1448
|
+
|
|
1449
|
+
* Has a flowgraph been defined?
|
|
1450
|
+
* Does the manifest satisfy the schema requirement field settings?
|
|
1451
|
+
* Are all flowgraph input names legal step/index pairs?
|
|
1452
|
+
* Are the tool parameter setting requirements met?
|
|
1453
|
+
|
|
1454
|
+
Returns:
|
|
1455
|
+
Returns True if the manifest is valid, else returns False.
|
|
1456
|
+
|
|
1457
|
+
Examples:
|
|
1458
|
+
>>> manifest_ok = chip.check_manifest()
|
|
1459
|
+
Returns True of the Chip object dictionary checks out.
|
|
1460
|
+
|
|
1461
|
+
'''
|
|
1462
|
+
error = False
|
|
1463
|
+
|
|
1464
|
+
# Dynamic checks
|
|
1465
|
+
# We only perform these if arg, step and arg, index are set.
|
|
1466
|
+
# We don't check inputs for skip all
|
|
1467
|
+
# TODO: Need to add skip step
|
|
1468
|
+
|
|
1469
|
+
flow = self.get('option', 'flow')
|
|
1470
|
+
|
|
1471
|
+
# 1. Checking that flowgraph and nodes to execute are legal
|
|
1472
|
+
if flow not in self.getkeys('flowgraph'):
|
|
1473
|
+
error = True
|
|
1474
|
+
self.logger.error(f"flowgraph {flow} not defined.")
|
|
1475
|
+
|
|
1476
|
+
nodes = [node for node in nodes_to_execute(self)
|
|
1477
|
+
if self.get('record', 'status', step=node[0], index=node[1])
|
|
1478
|
+
!= NodeStatus.SKIPPED]
|
|
1479
|
+
for (step, index) in nodes:
|
|
1480
|
+
for in_step, in_index in _get_pruned_node_inputs(self, flow, (step, index)):
|
|
1481
|
+
if (in_step, in_index) in nodes:
|
|
1482
|
+
# we're gonna run this step, OK
|
|
1483
|
+
continue
|
|
1484
|
+
if self.get('record', 'status', step=in_step, index=in_index) == \
|
|
1485
|
+
NodeStatus.SUCCESS:
|
|
1486
|
+
# this task has already completed successfully, OK
|
|
1487
|
+
continue
|
|
1488
|
+
self.logger.error(f'{step}{index} relies on {in_step}{in_index}, '
|
|
1489
|
+
'but this task has not been run and is not in the '
|
|
1490
|
+
'current nodes to execute.')
|
|
1491
|
+
error = True
|
|
1492
|
+
|
|
1493
|
+
# 2. Check library names
|
|
1494
|
+
libraries = set()
|
|
1495
|
+
libs_to_check = [
|
|
1496
|
+
('option', 'library'),
|
|
1497
|
+
('asic', 'logiclib'),
|
|
1498
|
+
('asic', 'macrolib')]
|
|
1499
|
+
# Create a list of nodes that include global and step only
|
|
1500
|
+
lib_node_check = [(None, None)]
|
|
1501
|
+
for step, _ in nodes:
|
|
1502
|
+
lib_node_check.append((step, None))
|
|
1503
|
+
lib_node_check.extend(nodes)
|
|
1504
|
+
for lib_key in libs_to_check:
|
|
1505
|
+
for val, step, index in self.schema._getvals(*lib_key):
|
|
1506
|
+
if (step, index) in lib_node_check:
|
|
1507
|
+
libraries.update(val)
|
|
1508
|
+
|
|
1509
|
+
for library in libraries:
|
|
1510
|
+
if library not in self.getkeys('library'):
|
|
1511
|
+
error = True
|
|
1512
|
+
self.logger.error(f"Target library {library} not found.")
|
|
1513
|
+
|
|
1514
|
+
# 3. Check schema requirements list
|
|
1515
|
+
allkeys = self.allkeys()
|
|
1516
|
+
for key in allkeys:
|
|
1517
|
+
keypath = ",".join(key)
|
|
1518
|
+
if 'default' not in key and 'history' not in key and 'library' not in key:
|
|
1519
|
+
key_empty = self.schema.is_empty(*key)
|
|
1520
|
+
requirement = self.get(*key, field='require')
|
|
1521
|
+
if key_empty and requirement:
|
|
1522
|
+
error = True
|
|
1523
|
+
self.logger.error(f"Global requirement missing for [{keypath}].")
|
|
1524
|
+
|
|
1525
|
+
# 4. Check if tool/task modules exists
|
|
1526
|
+
for (step, index) in nodes:
|
|
1527
|
+
tool = self.get('flowgraph', flow, step, index, 'tool')
|
|
1528
|
+
task = self.get('flowgraph', flow, step, index, 'task')
|
|
1529
|
+
tool_name, task_name = get_tool_task(self, step, index, flow=flow)
|
|
1530
|
+
|
|
1531
|
+
if not self._get_tool_module(step, index, flow=flow, error=False):
|
|
1532
|
+
error = True
|
|
1533
|
+
self.logger.error(f"Tool module {tool_name} could not be found or "
|
|
1534
|
+
f"loaded for {step}{index}.")
|
|
1535
|
+
if not self._get_task_module(step, index, flow=flow, error=False):
|
|
1536
|
+
error = True
|
|
1537
|
+
task_module = self.get('flowgraph', flow, step, index, 'taskmodule')
|
|
1538
|
+
self.logger.error(f"Task module {task_module} for {tool_name}/{task_name} "
|
|
1539
|
+
f"could not be found or loaded for {step}{index}.")
|
|
1540
|
+
|
|
1541
|
+
# 5. Check per tool parameter requirements (when tool exists)
|
|
1542
|
+
for (step, index) in nodes:
|
|
1543
|
+
tool, task = get_tool_task(self, step, index, flow=flow)
|
|
1544
|
+
task_module = self._get_task_module(step, index, flow=flow, error=False)
|
|
1545
|
+
if tool == 'builtin':
|
|
1546
|
+
continue
|
|
1547
|
+
|
|
1548
|
+
if tool not in self.getkeys('tool'):
|
|
1549
|
+
error = True
|
|
1550
|
+
self.logger.error(f'{tool} is not configured.')
|
|
1551
|
+
continue
|
|
1552
|
+
|
|
1553
|
+
if task not in self.getkeys('tool', tool, 'task'):
|
|
1554
|
+
error = True
|
|
1555
|
+
self.logger.error(f'{tool}/{task} is not configured.')
|
|
1556
|
+
continue
|
|
1557
|
+
|
|
1558
|
+
if self.valid('tool', tool, 'task', task, 'require'):
|
|
1559
|
+
all_required = self.get('tool', tool, 'task', task, 'require',
|
|
1560
|
+
step=step, index=index)
|
|
1561
|
+
for item in all_required:
|
|
1562
|
+
keypath = item.split(',')
|
|
1563
|
+
if self.schema.is_empty(*keypath):
|
|
1564
|
+
error = True
|
|
1565
|
+
self.logger.error(f"Value empty for {keypath} for {tool}.")
|
|
1566
|
+
|
|
1567
|
+
task_run = getattr(task_module, 'run', None)
|
|
1568
|
+
if self.schema.is_empty('tool', tool, 'exe') and not task_run:
|
|
1569
|
+
error = True
|
|
1570
|
+
self.logger.error(f'No executable or run() function specified for {tool}/{task}')
|
|
1571
|
+
|
|
1572
|
+
if not _check_flowgraph_io(self, nodes=nodes):
|
|
1573
|
+
error = True
|
|
1574
|
+
|
|
1575
|
+
return not error
|
|
1576
|
+
|
|
1577
|
+
###########################################################################
|
|
1578
|
+
def read_manifest(self, filename, job=None, clear=True, clobber=True):
|
|
1579
|
+
"""
|
|
1580
|
+
Reads a manifest from disk and merges it with the current compilation manifest.
|
|
1581
|
+
|
|
1582
|
+
The file format read is determined by the filename suffix. Currently
|
|
1583
|
+
json (*.json) and yaml(*.yaml) formats are supported.
|
|
1584
|
+
|
|
1585
|
+
Args:
|
|
1586
|
+
filename (filepath): Path to a manifest file to be loaded.
|
|
1587
|
+
job (str): Specifies non-default job to merge into.
|
|
1588
|
+
clear (bool): If True, disables append operations for list type.
|
|
1589
|
+
clobber (bool): If True, overwrites existing parameter value.
|
|
1590
|
+
|
|
1591
|
+
Examples:
|
|
1592
|
+
>>> chip.read_manifest('mychip.json')
|
|
1593
|
+
Loads the file mychip.json into the current Chip object.
|
|
1594
|
+
"""
|
|
1595
|
+
|
|
1596
|
+
# Read from file into new schema object
|
|
1597
|
+
schema = Schema(manifest=filename, logger=self.logger)
|
|
1598
|
+
|
|
1599
|
+
# Merge data in schema with Chip configuration
|
|
1600
|
+
self.schema.merge_manifest(schema, job=job, clear=clear, clobber=clobber)
|
|
1601
|
+
|
|
1602
|
+
# Read history, if we're not already reading into a job
|
|
1603
|
+
if 'history' in schema.cfg and not job:
|
|
1604
|
+
for historic_job in schema.cfg['history'].keys():
|
|
1605
|
+
self.schema.merge_manifest(schema.history(historic_job),
|
|
1606
|
+
job=historic_job,
|
|
1607
|
+
clear=clear,
|
|
1608
|
+
clobber=clobber)
|
|
1609
|
+
|
|
1610
|
+
# TODO: better way to handle this?
|
|
1611
|
+
if 'library' in schema.cfg:
|
|
1612
|
+
for libname in schema.cfg['library'].keys():
|
|
1613
|
+
self.__import_library(libname, schema.cfg['library'][libname],
|
|
1614
|
+
job=job,
|
|
1615
|
+
clobber=clobber)
|
|
1616
|
+
|
|
1617
|
+
###########################################################################
|
|
1618
|
+
def write_manifest(self, filename, prune=False, abspath=False):
|
|
1619
|
+
'''
|
|
1620
|
+
Writes the compilation manifest to a file.
|
|
1621
|
+
|
|
1622
|
+
The write file format is determined by the filename suffix. Currently
|
|
1623
|
+
json (*.json), yaml (*.yaml), tcl (*.tcl), and (*.csv) formats are
|
|
1624
|
+
supported.
|
|
1625
|
+
|
|
1626
|
+
Args:
|
|
1627
|
+
filename (filepath): Output filepath
|
|
1628
|
+
prune (bool): If True, only essential fields from the
|
|
1629
|
+
the Chip object schema are written to the output file.
|
|
1630
|
+
abspath (bool): If set to True, then all schema filepaths
|
|
1631
|
+
are resolved to absolute filepaths.
|
|
1632
|
+
|
|
1633
|
+
Examples:
|
|
1634
|
+
>>> chip.write_manifest('mydump.json')
|
|
1635
|
+
Prunes and dumps the current chip manifest into mydump.json
|
|
1636
|
+
'''
|
|
1637
|
+
|
|
1638
|
+
filepath = os.path.abspath(filename)
|
|
1639
|
+
self.logger.debug('Writing manifest to %s', filepath)
|
|
1640
|
+
|
|
1641
|
+
if not os.path.exists(os.path.dirname(filepath)):
|
|
1642
|
+
os.makedirs(os.path.dirname(filepath))
|
|
1643
|
+
|
|
1644
|
+
schema = self.schema
|
|
1645
|
+
# resolve absolute paths
|
|
1646
|
+
if abspath:
|
|
1647
|
+
schema = self.__abspath()
|
|
1648
|
+
|
|
1649
|
+
if prune:
|
|
1650
|
+
if schema is self.schema:
|
|
1651
|
+
schema = schema.copy()
|
|
1652
|
+
|
|
1653
|
+
self.logger.debug('Pruning dictionary before writing file %s', filepath)
|
|
1654
|
+
schema.prune()
|
|
1655
|
+
|
|
1656
|
+
is_csv = re.search(r'(\.csv)(\.gz)*$', filepath)
|
|
1657
|
+
|
|
1658
|
+
# format specific dumping
|
|
1659
|
+
if filepath.endswith('.gz'):
|
|
1660
|
+
fout = gzip.open(filepath, 'wt', encoding='UTF-8')
|
|
1661
|
+
elif is_csv:
|
|
1662
|
+
# Files written using csv library should be opened with newline=''
|
|
1663
|
+
# https://docs.python.org/3/library/csv.html#id3
|
|
1664
|
+
fout = open(filepath, 'w', newline='')
|
|
1665
|
+
else:
|
|
1666
|
+
fout = open(filepath, 'w')
|
|
1667
|
+
|
|
1668
|
+
# format specific printing
|
|
1669
|
+
try:
|
|
1670
|
+
if re.search(r'(\.json|\.sup)(\.gz)*$', filepath):
|
|
1671
|
+
schema.write_json(fout)
|
|
1672
|
+
elif re.search(r'(\.yaml|\.yml)(\.gz)*$', filepath):
|
|
1673
|
+
schema.write_yaml(fout)
|
|
1674
|
+
elif re.search(r'(\.tcl)(\.gz)*$', filepath):
|
|
1675
|
+
# TCL only gets values associated with the current node.
|
|
1676
|
+
step = self.get('arg', 'step')
|
|
1677
|
+
index = self.get('arg', 'index')
|
|
1678
|
+
schema.write_tcl(fout,
|
|
1679
|
+
prefix="dict set sc_cfg",
|
|
1680
|
+
step=step,
|
|
1681
|
+
index=index,
|
|
1682
|
+
template=utils.get_file_template('tcl/manifest.tcl.j2'))
|
|
1683
|
+
elif is_csv:
|
|
1684
|
+
schema.write_csv(fout)
|
|
1685
|
+
else:
|
|
1686
|
+
self.error(f'File format not recognized {filepath}')
|
|
1687
|
+
finally:
|
|
1688
|
+
fout.close()
|
|
1689
|
+
|
|
1690
|
+
###########################################################################
|
|
1691
|
+
def check_checklist(self, standard, items=None,
|
|
1692
|
+
check_ok=False, verbose=False, require_reports=True):
|
|
1693
|
+
'''
|
|
1694
|
+
Check items in a checklist.
|
|
1695
|
+
|
|
1696
|
+
Checks the status of items in a checklist for the standard provided. If
|
|
1697
|
+
a specific list of items is unspecified, all items are checked.
|
|
1698
|
+
|
|
1699
|
+
All items have an associated 'task' parameter, which indicates which
|
|
1700
|
+
tasks can be used to automatically validate the item. For an item to be
|
|
1701
|
+
checked, all tasks must satisfy the item's criteria, unless waivers are
|
|
1702
|
+
provided. In addition, that task must have generated EDA report files
|
|
1703
|
+
for each metric in the criteria.
|
|
1704
|
+
|
|
1705
|
+
For items without an associated task, the only requirement is that at
|
|
1706
|
+
least one report has been added to that item.
|
|
1707
|
+
|
|
1708
|
+
When 'check_ok' is True, every item must also have its 'ok' parameter
|
|
1709
|
+
set to True, indicating that a human has reviewed the item.
|
|
1710
|
+
|
|
1711
|
+
Args:
|
|
1712
|
+
standard (str): Standard to check.
|
|
1713
|
+
items (list of str): Items to check from standard.
|
|
1714
|
+
check_ok (bool): Whether to check item 'ok' parameter.
|
|
1715
|
+
verbose (bool): Whether to print passing criteria.
|
|
1716
|
+
require_reports (bool): Whether to assert the presence of reports.
|
|
1717
|
+
|
|
1718
|
+
Returns:
|
|
1719
|
+
Status of item check.
|
|
1720
|
+
|
|
1721
|
+
Examples:
|
|
1722
|
+
>>> status = chip.check_checklist('iso9000', 'd000')
|
|
1723
|
+
Returns status.
|
|
1724
|
+
'''
|
|
1725
|
+
error = False
|
|
1726
|
+
|
|
1727
|
+
self.logger.info(f'Checking checklist {standard}')
|
|
1728
|
+
|
|
1729
|
+
if standard not in self.getkeys('checklist'):
|
|
1730
|
+
self.logger.error(f'{standard} has not been loaded.')
|
|
1731
|
+
return False
|
|
1732
|
+
|
|
1733
|
+
if items is None:
|
|
1734
|
+
items = self.getkeys('checklist', standard)
|
|
1735
|
+
|
|
1736
|
+
# these tasks are recorded by SC so there are no reports
|
|
1737
|
+
metrics_without_reports = (
|
|
1738
|
+
'tasktime',
|
|
1739
|
+
'totaltime',
|
|
1740
|
+
'exetime',
|
|
1741
|
+
'memory')
|
|
1742
|
+
|
|
1743
|
+
for item in items:
|
|
1744
|
+
if item not in self.getkeys('checklist', standard):
|
|
1745
|
+
self.logger.error(f'{item} is not a check in {standard}.')
|
|
1746
|
+
error = True
|
|
1747
|
+
continue
|
|
1748
|
+
|
|
1749
|
+
allow_missing_reports = True
|
|
1750
|
+
|
|
1751
|
+
has_check = False
|
|
1752
|
+
|
|
1753
|
+
all_criteria = self.get('checklist', standard, item, 'criteria')
|
|
1754
|
+
for criteria in all_criteria:
|
|
1755
|
+
m = re.match(r'^(\w+)\s*([\>\=\<]+)\s*([+\-]?\d+(\.\d+)?(e[+\-]?\d+)?)$',
|
|
1756
|
+
criteria.strip())
|
|
1757
|
+
if not m:
|
|
1758
|
+
self.error(f"Illegal checklist criteria: {criteria}")
|
|
1759
|
+
return False
|
|
1760
|
+
elif m.group(1) not in self.getkeys('metric'):
|
|
1761
|
+
self.error(f"Criteria must use legal metrics only: {criteria}")
|
|
1762
|
+
return False
|
|
1763
|
+
|
|
1764
|
+
metric = m.group(1)
|
|
1765
|
+
op = m.group(2)
|
|
1766
|
+
if self.get('metric', metric, field='type') == 'int':
|
|
1767
|
+
goal = int(m.group(3))
|
|
1768
|
+
number_format = 'd'
|
|
1769
|
+
else:
|
|
1770
|
+
goal = float(m.group(3))
|
|
1771
|
+
|
|
1772
|
+
if goal == 0.0 or (abs(goal) > 1e-3 and abs(goal) < 1e5):
|
|
1773
|
+
number_format = '.3f'
|
|
1774
|
+
else:
|
|
1775
|
+
number_format = '.3e'
|
|
1776
|
+
|
|
1777
|
+
if metric not in metrics_without_reports:
|
|
1778
|
+
allow_missing_reports = False
|
|
1779
|
+
|
|
1780
|
+
tasks = self.get('checklist', standard, item, 'task')
|
|
1781
|
+
for job, step, index in tasks:
|
|
1782
|
+
if job not in self.getkeys('history'):
|
|
1783
|
+
self.error(f'{job} not found in history')
|
|
1784
|
+
|
|
1785
|
+
flow = self.get('option', 'flow', job=job)
|
|
1786
|
+
|
|
1787
|
+
if step not in self.getkeys('flowgraph', flow, job=job):
|
|
1788
|
+
self.error(f'{step} not found in flowgraph')
|
|
1789
|
+
|
|
1790
|
+
if index not in self.getkeys('flowgraph', flow, step, job=job):
|
|
1791
|
+
self.error(f'{step}{index} not found in flowgraph')
|
|
1792
|
+
|
|
1793
|
+
if self.get('record', 'status', step=step, index=index, job=job) == \
|
|
1794
|
+
NodeStatus.SKIPPED:
|
|
1795
|
+
if verbose:
|
|
1796
|
+
self.logger.warning(f'{step}{index} was skipped')
|
|
1797
|
+
continue
|
|
1798
|
+
|
|
1799
|
+
has_check = True
|
|
1800
|
+
|
|
1801
|
+
# Automated checks
|
|
1802
|
+
flow = self.get('option', 'flow', job=job)
|
|
1803
|
+
tool = self.get('flowgraph', flow, step, index, 'tool', job=job)
|
|
1804
|
+
task = self.get('flowgraph', flow, step, index, 'task', job=job)
|
|
1805
|
+
|
|
1806
|
+
value = self.get('metric', metric, job=job, step=step, index=index)
|
|
1807
|
+
criteria_ok = utils.safecompare(self, value, op, goal)
|
|
1808
|
+
if metric in self.getkeys('checklist', standard, item, 'waiver'):
|
|
1809
|
+
waivers = self.get('checklist', standard, item, 'waiver', metric)
|
|
1810
|
+
else:
|
|
1811
|
+
waivers = []
|
|
1812
|
+
|
|
1813
|
+
criteria_str = f'{metric}{op}{goal:{number_format}}'
|
|
1814
|
+
compare_str = f'{value:{number_format}}{op}{goal:{number_format}}'
|
|
1815
|
+
step_desc = f'job {job} with step {step}{index} and task {tool}/{task}'
|
|
1816
|
+
if not criteria_ok and waivers:
|
|
1817
|
+
self.logger.warning(f'{item} criteria {criteria_str} ({compare_str}) unmet '
|
|
1818
|
+
f'by {step_desc}, but found waivers.')
|
|
1819
|
+
elif not criteria_ok:
|
|
1820
|
+
self.logger.error(f'{item} criteria {criteria_str} ({compare_str}) unmet '
|
|
1821
|
+
f'by {step_desc}.')
|
|
1822
|
+
error = True
|
|
1823
|
+
elif verbose and criteria_ok:
|
|
1824
|
+
self.logger.info(f'{item} criteria {criteria_str} met by {step_desc}.')
|
|
1825
|
+
|
|
1826
|
+
has_reports = \
|
|
1827
|
+
self.valid('tool', tool, 'task', task, 'report', metric, job=job) and \
|
|
1828
|
+
self.get('tool', tool, 'task', task, 'report', metric, job=job,
|
|
1829
|
+
step=step, index=index)
|
|
1830
|
+
|
|
1831
|
+
if metric in metrics_without_reports and not has_reports:
|
|
1832
|
+
# No reports available and it is allowed
|
|
1833
|
+
continue
|
|
1834
|
+
|
|
1835
|
+
try:
|
|
1836
|
+
reports = self.find_files('tool', tool, 'task', task, 'report', metric,
|
|
1837
|
+
job=job,
|
|
1838
|
+
step=step, index=index,
|
|
1839
|
+
missing_ok=not require_reports)
|
|
1840
|
+
except SiliconCompilerError:
|
|
1841
|
+
reports = []
|
|
1842
|
+
continue
|
|
1843
|
+
|
|
1844
|
+
if require_reports and not reports:
|
|
1845
|
+
self.logger.error(f'No EDA reports generated for metric {metric} in '
|
|
1846
|
+
f'{step_desc}')
|
|
1847
|
+
error = True
|
|
1848
|
+
|
|
1849
|
+
for report in reports:
|
|
1850
|
+
if not report:
|
|
1851
|
+
continue
|
|
1852
|
+
|
|
1853
|
+
report = os.path.relpath(report, self.cwd)
|
|
1854
|
+
if report not in self.get('checklist', standard, item, 'report'):
|
|
1855
|
+
self.add('checklist', standard, item, 'report', report)
|
|
1856
|
+
|
|
1857
|
+
if has_check:
|
|
1858
|
+
if require_reports and \
|
|
1859
|
+
not allow_missing_reports and \
|
|
1860
|
+
not self.get('checklist', standard, item, 'report'):
|
|
1861
|
+
# TODO: validate that report exists?
|
|
1862
|
+
self.logger.error(f'No report documenting item {item}')
|
|
1863
|
+
error = True
|
|
1864
|
+
|
|
1865
|
+
if check_ok and not self.get('checklist', standard, item, 'ok'):
|
|
1866
|
+
self.logger.error(f"Item {item} 'ok' field not checked")
|
|
1867
|
+
error = True
|
|
1868
|
+
|
|
1869
|
+
if not error:
|
|
1870
|
+
self.logger.info('Check succeeded!')
|
|
1871
|
+
|
|
1872
|
+
return not error
|
|
1873
|
+
|
|
1874
|
+
###########################################################################
|
|
1875
|
+
def __import_library(self, libname, libcfg, job=None, clobber=True, keep_input=True):
|
|
1876
|
+
'''Helper to import library with config 'libconfig' as a library
|
|
1877
|
+
'libname' in current Chip object.'''
|
|
1878
|
+
if job:
|
|
1879
|
+
cfg = self.schema.cfg['history'][job]['library']
|
|
1880
|
+
else:
|
|
1881
|
+
cfg = self.schema.cfg['library']
|
|
1882
|
+
|
|
1883
|
+
if 'library' in libcfg:
|
|
1884
|
+
for sublib_name, sublibcfg in libcfg['library'].items():
|
|
1885
|
+
self.__import_library(sublib_name, sublibcfg,
|
|
1886
|
+
job=job, clobber=clobber, keep_input=keep_input)
|
|
1887
|
+
|
|
1888
|
+
if libname in cfg:
|
|
1889
|
+
if not clobber:
|
|
1890
|
+
return
|
|
1891
|
+
|
|
1892
|
+
self.__import_data_sources(libcfg)
|
|
1893
|
+
cfg[libname] = {}
|
|
1894
|
+
|
|
1895
|
+
# Only keep some sections to avoid recursive bloat
|
|
1896
|
+
keeps = ['asic', 'design', 'fpga', 'option', 'output', 'package']
|
|
1897
|
+
if keep_input:
|
|
1898
|
+
keeps.append('input')
|
|
1899
|
+
for section in list(libcfg.keys()):
|
|
1900
|
+
if section in keeps:
|
|
1901
|
+
cfg[libname][section] = copy.deepcopy(libcfg[section])
|
|
1902
|
+
|
|
1903
|
+
###########################################################################
|
|
1904
|
+
def write_flowgraph(self, filename, flow=None,
|
|
1905
|
+
fillcolor='#ffffff', fontcolor='#000000',
|
|
1906
|
+
background='transparent', fontsize='14',
|
|
1907
|
+
border=True, landscape=False,
|
|
1908
|
+
show_io=False):
|
|
1909
|
+
r'''
|
|
1910
|
+
Renders and saves the compilation flowgraph to a file.
|
|
1911
|
+
|
|
1912
|
+
The chip object flowgraph is traversed to create a graphviz (\*.dot)
|
|
1913
|
+
file comprised of node, edges, and labels. The dot file is a
|
|
1914
|
+
graphical representation of the flowgraph useful for validating the
|
|
1915
|
+
correctness of the execution flow graph. The dot file is then
|
|
1916
|
+
converted to the appropriate picture or drawing format based on the
|
|
1917
|
+
filename suffix provided. Supported output render formats include
|
|
1918
|
+
png, svg, gif, pdf and a few others. For more information about the
|
|
1919
|
+
graphviz project, see see https://graphviz.org/
|
|
1920
|
+
|
|
1921
|
+
Args:
|
|
1922
|
+
filename (filepath): Output filepath
|
|
1923
|
+
flow (str): Name of flowgraph to render
|
|
1924
|
+
fillcolor(str): Node fill RGB color hex value
|
|
1925
|
+
fontcolor (str): Node font RGB color hex value
|
|
1926
|
+
background (str): Background color
|
|
1927
|
+
fontsize (str): Node text font size
|
|
1928
|
+
border (bool): Enables node border if True
|
|
1929
|
+
landscape (bool): Renders graph in landscape layout if True
|
|
1930
|
+
show_io (bool): Add file input/outputs to graph
|
|
1931
|
+
|
|
1932
|
+
Examples:
|
|
1933
|
+
>>> chip.write_flowgraph('mydump.png')
|
|
1934
|
+
Renders the object flowgraph and writes the result to a png file.
|
|
1935
|
+
'''
|
|
1936
|
+
filepath = os.path.abspath(filename)
|
|
1937
|
+
self.logger.debug('Writing flowgraph to file %s', filepath)
|
|
1938
|
+
fileroot, ext = os.path.splitext(filepath)
|
|
1939
|
+
fileformat = ext.replace(".", "")
|
|
1940
|
+
|
|
1941
|
+
if flow is None:
|
|
1942
|
+
flow = self.get('option', 'flow')
|
|
1943
|
+
|
|
1944
|
+
if flow not in self.getkeys('flowgraph'):
|
|
1945
|
+
self.logger.error(f'{flow} is not a value flowgraph')
|
|
1946
|
+
return
|
|
1947
|
+
|
|
1948
|
+
# controlling border width
|
|
1949
|
+
if border:
|
|
1950
|
+
penwidth = '1'
|
|
1951
|
+
else:
|
|
1952
|
+
penwidth = '0'
|
|
1953
|
+
|
|
1954
|
+
# controlling graph direction
|
|
1955
|
+
if landscape:
|
|
1956
|
+
rankdir = 'LR'
|
|
1957
|
+
out_label_suffix = ':e'
|
|
1958
|
+
in_label_suffix = ':w'
|
|
1959
|
+
else:
|
|
1960
|
+
rankdir = 'TB'
|
|
1961
|
+
out_label_suffix = ':s'
|
|
1962
|
+
in_label_suffix = ':n'
|
|
1963
|
+
|
|
1964
|
+
all_graph_inputs, nodes, edges, show_io = _get_flowgraph_information(self, flow, io=show_io)
|
|
1965
|
+
|
|
1966
|
+
if not show_io:
|
|
1967
|
+
out_label_suffix = ''
|
|
1968
|
+
in_label_suffix = ''
|
|
1969
|
+
|
|
1970
|
+
dot = graphviz.Digraph(format=fileformat)
|
|
1971
|
+
dot.graph_attr['rankdir'] = rankdir
|
|
1972
|
+
if show_io:
|
|
1973
|
+
dot.graph_attr['concentrate'] = 'true'
|
|
1974
|
+
dot.graph_attr['ranksep'] = '0.75'
|
|
1975
|
+
dot.attr(bgcolor=background)
|
|
1976
|
+
|
|
1977
|
+
with dot.subgraph(name='inputs') as input_graph:
|
|
1978
|
+
input_graph.graph_attr['cluster'] = 'true'
|
|
1979
|
+
input_graph.graph_attr['color'] = background
|
|
1980
|
+
|
|
1981
|
+
# add inputs
|
|
1982
|
+
for graph_input in sorted(all_graph_inputs):
|
|
1983
|
+
input_graph.node(
|
|
1984
|
+
graph_input, label=graph_input, bordercolor=fontcolor, style='filled',
|
|
1985
|
+
fontcolor=fontcolor, fontsize=fontsize, ordering="in",
|
|
1986
|
+
penwidth=penwidth, fillcolor=fillcolor, shape="box")
|
|
1987
|
+
|
|
1988
|
+
with dot.subgraph(name='input_nodes') as input_graph_nodes:
|
|
1989
|
+
input_graph_nodes.graph_attr['cluster'] = 'true'
|
|
1990
|
+
input_graph_nodes.graph_attr['color'] = background
|
|
1991
|
+
|
|
1992
|
+
# add nodes
|
|
1993
|
+
shape = "oval" if not show_io else "Mrecord"
|
|
1994
|
+
for node, info in nodes.items():
|
|
1995
|
+
task_label = f"\\n ({info['task']})" if info['task'] is not None else ""
|
|
1996
|
+
if show_io:
|
|
1997
|
+
input_labels = [f"<{ikey}> {ifile}" for ifile, ikey in info['inputs'].items()]
|
|
1998
|
+
output_labels = [f"<{okey}> {ofile}" for ofile, okey in info['outputs'].items()]
|
|
1999
|
+
center_text = f"\\n {node} {task_label} \\n\\n"
|
|
2000
|
+
labelname = "{"
|
|
2001
|
+
if input_labels:
|
|
2002
|
+
labelname += f"{{ {' | '.join(input_labels)} }} |"
|
|
2003
|
+
labelname += center_text
|
|
2004
|
+
if output_labels:
|
|
2005
|
+
labelname += f"| {{ {' | '.join(output_labels)} }}"
|
|
2006
|
+
labelname += "}"
|
|
2007
|
+
else:
|
|
2008
|
+
labelname = f"{node}{task_label}"
|
|
2009
|
+
|
|
2010
|
+
dst = dot
|
|
2011
|
+
if info['is_input']:
|
|
2012
|
+
dst = input_graph_nodes
|
|
2013
|
+
dst.node(node, label=labelname, bordercolor=fontcolor, style='filled',
|
|
2014
|
+
fontcolor=fontcolor, fontsize=fontsize, ordering="in",
|
|
2015
|
+
penwidth=penwidth, fillcolor=fillcolor, shape=shape)
|
|
2016
|
+
|
|
2017
|
+
for edge0, edge1, weight in edges:
|
|
2018
|
+
dot.edge(f'{edge0}{out_label_suffix}', f'{edge1}{in_label_suffix}', weight=str(weight))
|
|
2019
|
+
|
|
2020
|
+
try:
|
|
2021
|
+
dot.render(filename=fileroot, cleanup=True)
|
|
2022
|
+
except graphviz.ExecutableNotFound as e:
|
|
2023
|
+
self.logger.error(f'Unable to save flowgraph: {e}')
|
|
2024
|
+
|
|
2025
|
+
###########################################################################
|
|
2026
|
+
def write_dependencygraph(self, filename, flow=None,
|
|
2027
|
+
fontcolor='#000000', color_scheme=None,
|
|
2028
|
+
background='transparent', fontsize='14',
|
|
2029
|
+
border=True, landscape=False):
|
|
2030
|
+
r'''
|
|
2031
|
+
Renders and saves the dependenct graph to a file.
|
|
2032
|
+
|
|
2033
|
+
The chip object flowgraph is traversed to create a graphviz (\*.dot)
|
|
2034
|
+
file comprised of node, edges, and labels. The dot file is a
|
|
2035
|
+
graphical representation of the flowgraph useful for validating the
|
|
2036
|
+
correctness of the execution flow graph. The dot file is then
|
|
2037
|
+
converted to the appropriate picture or drawing format based on the
|
|
2038
|
+
filename suffix provided. Supported output render formats include
|
|
2039
|
+
png, svg, gif, pdf and a few others. For more information about the
|
|
2040
|
+
graphviz project, see see https://graphviz.org/
|
|
2041
|
+
|
|
2042
|
+
Args:
|
|
2043
|
+
filename (filepath): Output filepath
|
|
2044
|
+
flow (str): Name of flowgraph to render
|
|
2045
|
+
fontcolor (str): Node font RGB color hex value
|
|
2046
|
+
color_scheme (str): Name of the color scheme to apply to the nodes.
|
|
2047
|
+
Valid choices are: "none", "simple", "detailed"
|
|
2048
|
+
background (str): Background color
|
|
2049
|
+
fontsize (str): Node text font size
|
|
2050
|
+
border (bool): Enables node border if True
|
|
2051
|
+
landscape (bool): Renders graph in landscape layout if True
|
|
2052
|
+
show_io (bool): Add file input/outputs to graph
|
|
2053
|
+
|
|
2054
|
+
Examples:
|
|
2055
|
+
>>> chip.write_flowgraph('mydump.png')
|
|
2056
|
+
Renders the object flowgraph and writes the result to a png file.
|
|
2057
|
+
'''
|
|
2058
|
+
filepath = os.path.abspath(filename)
|
|
2059
|
+
self.logger.debug('Writing dependency graph to file %s', filepath)
|
|
2060
|
+
fileroot, ext = os.path.splitext(filepath)
|
|
2061
|
+
fileformat = ext.replace(".", "")
|
|
2062
|
+
|
|
2063
|
+
color_schemes = {
|
|
2064
|
+
"none": {
|
|
2065
|
+
"design": "white",
|
|
2066
|
+
"library": "white",
|
|
2067
|
+
"logiclib": "white",
|
|
2068
|
+
"macrolib": "white"
|
|
2069
|
+
},
|
|
2070
|
+
"simple": {
|
|
2071
|
+
"design": "lightgreen",
|
|
2072
|
+
"library": "white",
|
|
2073
|
+
"logiclib": "lightgreen",
|
|
2074
|
+
"macrolib": "lightgreen"
|
|
2075
|
+
},
|
|
2076
|
+
"detailed": {
|
|
2077
|
+
"design": "lightgreen",
|
|
2078
|
+
"library": "white",
|
|
2079
|
+
"logiclib": "lightskyblue",
|
|
2080
|
+
"macrolib": "lightgoldenrod2"
|
|
2081
|
+
},
|
|
2082
|
+
}
|
|
2083
|
+
|
|
2084
|
+
if not color_scheme:
|
|
2085
|
+
color_scheme = "none"
|
|
2086
|
+
|
|
2087
|
+
if color_scheme not in color_schemes:
|
|
2088
|
+
raise ValueError(f'{color_scheme} is not a valid color scheme')
|
|
2089
|
+
|
|
2090
|
+
# controlling border width
|
|
2091
|
+
if border:
|
|
2092
|
+
penwidth = '1'
|
|
2093
|
+
else:
|
|
2094
|
+
penwidth = '0'
|
|
2095
|
+
|
|
2096
|
+
# controlling graph direction
|
|
2097
|
+
if landscape:
|
|
2098
|
+
rankdir = 'LR'
|
|
2099
|
+
else:
|
|
2100
|
+
rankdir = 'TB'
|
|
2101
|
+
|
|
2102
|
+
dot = graphviz.Digraph(format=fileformat)
|
|
2103
|
+
dot.graph_attr['rankdir'] = rankdir
|
|
2104
|
+
dot.attr(bgcolor=background)
|
|
2105
|
+
|
|
2106
|
+
def make_node(node_type, node, label):
|
|
2107
|
+
node = f'{node_type}-{node}'
|
|
2108
|
+
|
|
2109
|
+
if node in nodes:
|
|
2110
|
+
return node
|
|
2111
|
+
|
|
2112
|
+
nodes.add(node)
|
|
2113
|
+
dot.node(node, label=node, bordercolor=fontcolor, style='filled',
|
|
2114
|
+
fontcolor=fontcolor, fontsize=fontsize, ordering="in",
|
|
2115
|
+
penwidth=penwidth, fillcolor="white")
|
|
2116
|
+
return node
|
|
2117
|
+
|
|
2118
|
+
nodes = {}
|
|
2119
|
+
|
|
2120
|
+
def collect_library(root_type, lib, name=None):
|
|
2121
|
+
if not name:
|
|
2122
|
+
name = lib.design
|
|
2123
|
+
root_label = f'{root_type}-{name}'
|
|
2124
|
+
|
|
2125
|
+
if root_label in nodes:
|
|
2126
|
+
return
|
|
2127
|
+
|
|
2128
|
+
in_labels = []
|
|
2129
|
+
for in_lib in lib.get('option', 'library',
|
|
2130
|
+
step=Schema.GLOBAL_KEY, index=Schema.GLOBAL_KEY):
|
|
2131
|
+
in_labels.append(f'library-{in_lib}')
|
|
2132
|
+
for in_lib in lib.get('asic', 'logiclib',
|
|
2133
|
+
step=Schema.GLOBAL_KEY, index=Schema.GLOBAL_KEY):
|
|
2134
|
+
in_labels.append(f'logiclib-{in_lib}')
|
|
2135
|
+
for in_lib in lib.get('asic', 'macrolib',
|
|
2136
|
+
step=Schema.GLOBAL_KEY, index=Schema.GLOBAL_KEY):
|
|
2137
|
+
in_labels.append(f'macrolib-{in_lib}')
|
|
2138
|
+
|
|
2139
|
+
shape = "oval"
|
|
2140
|
+
if root_type == "logiclib":
|
|
2141
|
+
shape = "box"
|
|
2142
|
+
elif root_type == "macrolib":
|
|
2143
|
+
shape = "box"
|
|
2144
|
+
elif root_type == "design":
|
|
2145
|
+
shape = "box"
|
|
2146
|
+
|
|
2147
|
+
color = color_schemes[color_scheme][root_type]
|
|
2148
|
+
|
|
2149
|
+
nodes[root_label] = {
|
|
2150
|
+
"text": name,
|
|
2151
|
+
"shape": shape,
|
|
2152
|
+
"color": color,
|
|
2153
|
+
"connects_to": set(in_labels)
|
|
2154
|
+
}
|
|
2155
|
+
|
|
2156
|
+
for in_lib in lib.get('option', 'library',
|
|
2157
|
+
step=Schema.GLOBAL_KEY, index=Schema.GLOBAL_KEY):
|
|
2158
|
+
collect_library("library", Schema(cfg=self.getdict('library', in_lib)),
|
|
2159
|
+
name=in_lib)
|
|
2160
|
+
for in_lib in lib.get('asic', 'logiclib',
|
|
2161
|
+
step=Schema.GLOBAL_KEY, index=Schema.GLOBAL_KEY):
|
|
2162
|
+
collect_library("logiclib", Schema(cfg=self.getdict('library', in_lib)),
|
|
2163
|
+
name=in_lib)
|
|
2164
|
+
for in_lib in lib.get('asic', 'macrolib',
|
|
2165
|
+
step=Schema.GLOBAL_KEY, index=Schema.GLOBAL_KEY):
|
|
2166
|
+
collect_library("macrolib", Schema(cfg=self.getdict('library', in_lib)),
|
|
2167
|
+
name=in_lib)
|
|
2168
|
+
|
|
2169
|
+
collect_library("design", self)
|
|
2170
|
+
|
|
2171
|
+
for label, info in nodes.items():
|
|
2172
|
+
dot.node(label, label=info['text'], bordercolor=fontcolor, style='filled',
|
|
2173
|
+
fontcolor=fontcolor, fontsize=fontsize, ordering="in",
|
|
2174
|
+
penwidth=penwidth, fillcolor=info["color"], shape=info['shape'])
|
|
2175
|
+
|
|
2176
|
+
for conn in info['connects_to']:
|
|
2177
|
+
dot.edge(label, conn, dir='back')
|
|
2178
|
+
|
|
2179
|
+
try:
|
|
2180
|
+
dot.render(filename=fileroot, cleanup=True)
|
|
2181
|
+
except graphviz.ExecutableNotFound as e:
|
|
2182
|
+
self.logger.error(f'Unable to save flowgraph: {e}')
|
|
2183
|
+
|
|
2184
|
+
########################################################################
|
|
2185
|
+
def swap_library(self, org_library, new_library, step=None, index=None):
|
|
2186
|
+
'''
|
|
2187
|
+
Recursively changes a library in ['option', 'library'] from a previous
|
|
2188
|
+
value to a new value. If the library is not present then nothing is
|
|
2189
|
+
changed. If the new library is None, the original library will be removed.
|
|
2190
|
+
|
|
2191
|
+
Args:
|
|
2192
|
+
org_library (str): Name of old library
|
|
2193
|
+
new_library (str): Name of new library
|
|
2194
|
+
step(str): Step to change, if not specified, all steps will be modified
|
|
2195
|
+
index (str): Index to change, if not specified, all indexes will be modified
|
|
2196
|
+
|
|
2197
|
+
Examples:
|
|
2198
|
+
>>> chip.swap_library('lambdalib_iolib', 'lambdalib_sky130iolib')
|
|
2199
|
+
Changes from the lambdalib_iolib to lambdalib_sky130iolib.
|
|
2200
|
+
'''
|
|
2201
|
+
all_libraries = self.getkeys('library')
|
|
2202
|
+
|
|
2203
|
+
def swap(*key):
|
|
2204
|
+
if step is not None:
|
|
2205
|
+
r_step = step
|
|
2206
|
+
r_index = index
|
|
2207
|
+
if r_index is None:
|
|
2208
|
+
r_index = Schema.GLOBAL_KEY
|
|
2209
|
+
|
|
2210
|
+
val = self.get(*key, step=r_step, index=r_index)
|
|
2211
|
+
if new_library is None:
|
|
2212
|
+
self.set(*key, [v for v in val if v != org_library],
|
|
2213
|
+
step=r_step, index=r_index)
|
|
2214
|
+
else:
|
|
2215
|
+
self.set(*key,
|
|
2216
|
+
list(map(lambda x: new_library if x == org_library else x, val)),
|
|
2217
|
+
step=r_step, index=r_index)
|
|
2218
|
+
else:
|
|
2219
|
+
for val, r_step, r_index in self.schema._getvals(*key):
|
|
2220
|
+
if r_step is None:
|
|
2221
|
+
r_step = Schema.GLOBAL_KEY
|
|
2222
|
+
if r_index is None:
|
|
2223
|
+
r_index = Schema.GLOBAL_KEY
|
|
2224
|
+
|
|
2225
|
+
if new_library is None:
|
|
2226
|
+
self.set(*key, [v for v in val if v != org_library],
|
|
2227
|
+
step=r_step, index=r_index)
|
|
2228
|
+
else:
|
|
2229
|
+
self.set(*key,
|
|
2230
|
+
list(map(lambda x: new_library if x == org_library else x, val)),
|
|
2231
|
+
step=r_step, index=r_index)
|
|
2232
|
+
|
|
2233
|
+
swap('option', 'library')
|
|
2234
|
+
for lib in all_libraries:
|
|
2235
|
+
swap('library', lib, 'option', 'library')
|
|
2236
|
+
|
|
2237
|
+
########################################################################
|
|
2238
|
+
def collect(self, directory=None, verbose=True, whitelist=None):
|
|
2239
|
+
'''
|
|
2240
|
+
Collects files found in the configuration dictionary and places
|
|
2241
|
+
them in inputs/. The function only copies in files that have the 'copy'
|
|
2242
|
+
field set as true.
|
|
2243
|
+
|
|
2244
|
+
1. indexing like in run, job1
|
|
2245
|
+
2. chdir package
|
|
2246
|
+
3. run tool to collect files, pickle file in output/design.v
|
|
2247
|
+
4. copy in rest of the files below
|
|
2248
|
+
5. record files read in to schema
|
|
2249
|
+
|
|
2250
|
+
Args:
|
|
2251
|
+
directory (filepath): Output filepath
|
|
2252
|
+
verbose (bool): Flag to indicate if logging should be used
|
|
2253
|
+
whitelist (list[path]): List of directories that are allowed to be
|
|
2254
|
+
collected. If a directory is is found that is not on this list
|
|
2255
|
+
a RuntimeError will be raised.
|
|
2256
|
+
'''
|
|
2257
|
+
|
|
2258
|
+
if not directory:
|
|
2259
|
+
directory = os.path.join(self._getcollectdir())
|
|
2260
|
+
|
|
2261
|
+
if os.path.exists(directory):
|
|
2262
|
+
shutil.rmtree(directory)
|
|
2263
|
+
os.makedirs(directory)
|
|
2264
|
+
|
|
2265
|
+
if verbose:
|
|
2266
|
+
self.logger.info('Collecting input sources')
|
|
2267
|
+
|
|
2268
|
+
dirs = {}
|
|
2269
|
+
files = {}
|
|
2270
|
+
|
|
2271
|
+
for key in self.allkeys():
|
|
2272
|
+
if key[-2:] == ('option', 'builddir'):
|
|
2273
|
+
# skip builddir
|
|
2274
|
+
continue
|
|
2275
|
+
if key[-2:] == ('option', 'cachedir'):
|
|
2276
|
+
# skip cache
|
|
2277
|
+
continue
|
|
2278
|
+
if key[0] == 'history':
|
|
2279
|
+
# skip history
|
|
2280
|
+
continue
|
|
2281
|
+
if key[0] == 'tool' and key[2] == 'task' and key[4] in ('input',
|
|
2282
|
+
'report',
|
|
2283
|
+
'output'):
|
|
2284
|
+
# skip flow files files from builds
|
|
2285
|
+
continue
|
|
2286
|
+
|
|
2287
|
+
leaftype = self.get(*key, field='type')
|
|
2288
|
+
is_dir = re.search('dir', leaftype)
|
|
2289
|
+
is_file = re.search('file', leaftype)
|
|
2290
|
+
if is_dir or is_file:
|
|
2291
|
+
if self.get(*key, field='copy'):
|
|
2292
|
+
for value, step, index in self.schema._getvals(*key):
|
|
2293
|
+
if not value:
|
|
2294
|
+
continue
|
|
2295
|
+
packages = self.get(*key, field='package', step=step, index=index)
|
|
2296
|
+
key_dirs = self.__find_files(*key, step=step, index=index)
|
|
2297
|
+
if not isinstance(key_dirs, (list, tuple)):
|
|
2298
|
+
key_dirs = [key_dirs]
|
|
2299
|
+
if not isinstance(value, (list, tuple)):
|
|
2300
|
+
value = [value]
|
|
2301
|
+
if not isinstance(packages, (list, tuple)):
|
|
2302
|
+
packages = [packages]
|
|
2303
|
+
for path, package, abspath in zip(value, packages, key_dirs):
|
|
2304
|
+
if not package:
|
|
2305
|
+
# Ensure package is an empty string
|
|
2306
|
+
package = ''
|
|
2307
|
+
if is_dir:
|
|
2308
|
+
dirs[(package, path)] = abspath
|
|
2309
|
+
else:
|
|
2310
|
+
files[(package, path)] = abspath
|
|
2311
|
+
|
|
2312
|
+
for package, path in sorted(dirs.keys()):
|
|
2313
|
+
posix_path = self.__convert_paths_to_posix([path])[0]
|
|
2314
|
+
if self.__find_sc_imported_file(posix_path, package, directory):
|
|
2315
|
+
# File already imported in directory
|
|
2316
|
+
continue
|
|
2317
|
+
|
|
2318
|
+
abspath = dirs[(package, path)]
|
|
2319
|
+
if abspath:
|
|
2320
|
+
filename = self.__get_imported_filename(posix_path, package)
|
|
2321
|
+
dst_path = os.path.join(directory, filename)
|
|
2322
|
+
if os.path.exists(dst_path):
|
|
2323
|
+
continue
|
|
2324
|
+
|
|
2325
|
+
directory_file_limit = None
|
|
2326
|
+
file_count = 0
|
|
2327
|
+
|
|
2328
|
+
# Do sanity checks
|
|
2329
|
+
def check_path(path, files):
|
|
2330
|
+
if pathlib.Path(path) == pathlib.Path.home():
|
|
2331
|
+
# refuse to collect home directory
|
|
2332
|
+
self.logger.error(f'Cannot collect user home directory: {path}')
|
|
2333
|
+
return files
|
|
2334
|
+
|
|
2335
|
+
if pathlib.Path(path) == pathlib.Path(self.getbuilddir()):
|
|
2336
|
+
# refuse to collect build directory
|
|
2337
|
+
self.logger.error(f'Cannot collect build directory: {path}')
|
|
2338
|
+
return files
|
|
2339
|
+
|
|
2340
|
+
# do not collect hidden files
|
|
2341
|
+
hidden_files = []
|
|
2342
|
+
# filter out hidden files (unix)
|
|
2343
|
+
hidden_files.extend([f for f in files if f.startswith('.')])
|
|
2344
|
+
# filter out hidden files (windows)
|
|
2345
|
+
try:
|
|
2346
|
+
if hasattr(os.stat_result, 'st_file_attributes'):
|
|
2347
|
+
hidden_files.extend([
|
|
2348
|
+
f for f in files
|
|
2349
|
+
if bool(os.stat(os.path.join(path, f)).st_file_attributes &
|
|
2350
|
+
stat.FILE_ATTRIBUTE_HIDDEN)
|
|
2351
|
+
])
|
|
2352
|
+
except: # noqa 722
|
|
2353
|
+
pass
|
|
2354
|
+
# filter out hidden files (macos)
|
|
2355
|
+
try:
|
|
2356
|
+
if hasattr(os.stat_result, 'st_reparse_tag'):
|
|
2357
|
+
hidden_files.extend([
|
|
2358
|
+
f for f in files
|
|
2359
|
+
if bool(os.stat(os.path.join(path, f)).st_reparse_tag &
|
|
2360
|
+
stat.UF_HIDDEN)
|
|
2361
|
+
])
|
|
2362
|
+
except: # noqa 722
|
|
2363
|
+
pass
|
|
2364
|
+
|
|
2365
|
+
nonlocal file_count
|
|
2366
|
+
file_count += len(files) - len(hidden_files)
|
|
2367
|
+
|
|
2368
|
+
if directory_file_limit and file_count > directory_file_limit:
|
|
2369
|
+
self.logger.error(f'File collection from {abspath} exceeds '
|
|
2370
|
+
f'{directory_file_limit} files')
|
|
2371
|
+
return files
|
|
2372
|
+
|
|
2373
|
+
return hidden_files
|
|
2374
|
+
|
|
2375
|
+
if whitelist is not None and abspath not in whitelist:
|
|
2376
|
+
raise RuntimeError(f'{abspath} is not on the approved collection list.')
|
|
2377
|
+
|
|
2378
|
+
if verbose:
|
|
2379
|
+
self.logger.info(f"Copying directory {abspath} to '{directory}' directory")
|
|
2380
|
+
shutil.copytree(abspath, dst_path, ignore=check_path)
|
|
2381
|
+
else:
|
|
2382
|
+
raise SiliconCompilerError(f'Failed to copy {path}', chip=self)
|
|
2383
|
+
|
|
2384
|
+
for package, path in sorted(files.keys()):
|
|
2385
|
+
posix_path = self.__convert_paths_to_posix([path])[0]
|
|
2386
|
+
if self.__find_sc_imported_file(posix_path, package, directory):
|
|
2387
|
+
# File already imported in directory
|
|
2388
|
+
continue
|
|
2389
|
+
|
|
2390
|
+
abspath = files[(package, path)]
|
|
2391
|
+
if abspath:
|
|
2392
|
+
filename = self.__get_imported_filename(posix_path, package)
|
|
2393
|
+
dst_path = os.path.join(directory, filename)
|
|
2394
|
+
if verbose:
|
|
2395
|
+
self.logger.info(f"Copying {abspath} to '{directory}' directory")
|
|
2396
|
+
shutil.copy2(abspath, dst_path)
|
|
2397
|
+
else:
|
|
2398
|
+
raise SiliconCompilerError(f'Failed to copy {path}', chip=self)
|
|
2399
|
+
|
|
2400
|
+
###########################################################################
|
|
2401
|
+
def _archive_node(self, tar, step, index, include=None, verbose=True):
|
|
2402
|
+
if verbose:
|
|
2403
|
+
self.logger.info(f'Archiving {step}{index}...')
|
|
2404
|
+
|
|
2405
|
+
basedir = self.getworkdir(step=step, index=index)
|
|
2406
|
+
|
|
2407
|
+
def arcname(path):
|
|
2408
|
+
return os.path.relpath(path, self.cwd)
|
|
2409
|
+
|
|
2410
|
+
if not os.path.isdir(basedir):
|
|
2411
|
+
if self.get('record', 'status', step=step, index=index) != NodeStatus.SKIPPED:
|
|
2412
|
+
self.logger.error(f'Unable to archive {step}{index} due to missing node directory')
|
|
2413
|
+
return
|
|
2414
|
+
|
|
2415
|
+
if include:
|
|
2416
|
+
for pattern in include:
|
|
2417
|
+
for path in glob.iglob(os.path.join(basedir, pattern)):
|
|
2418
|
+
tar.add(path, arcname=arcname(path))
|
|
2419
|
+
else:
|
|
2420
|
+
for folder in ('reports', 'outputs'):
|
|
2421
|
+
path = os.path.join(basedir, folder)
|
|
2422
|
+
tar.add(path, arcname=arcname(path))
|
|
2423
|
+
|
|
2424
|
+
logfile = os.path.join(basedir, f'{step}.log')
|
|
2425
|
+
if os.path.isfile(logfile):
|
|
2426
|
+
tar.add(logfile, arcname=arcname(logfile))
|
|
2427
|
+
|
|
2428
|
+
###########################################################################
|
|
2429
|
+
def __archive_job(self, tar, job, flowgraph_nodes, index=None, include=None):
|
|
2430
|
+
design = self.get('design')
|
|
2431
|
+
|
|
2432
|
+
jobdir = self.getworkdir(jobname=job)
|
|
2433
|
+
manifest = os.path.join(jobdir, f'{design}.pkg.json')
|
|
2434
|
+
if os.path.isfile(manifest):
|
|
2435
|
+
arcname = os.path.relpath(manifest, self.cwd)
|
|
2436
|
+
tar.add(manifest, arcname=arcname)
|
|
2437
|
+
else:
|
|
2438
|
+
self.logger.warning('Archiving job with failed or incomplete run.')
|
|
2439
|
+
|
|
2440
|
+
for (step, idx) in flowgraph_nodes:
|
|
2441
|
+
self._archive_node(tar, step, idx, include=include)
|
|
2442
|
+
|
|
2443
|
+
###########################################################################
|
|
2444
|
+
def archive(self, jobs=None, step=None, index=None, include=None, archive_name=None):
|
|
2445
|
+
'''Archive a job directory.
|
|
2446
|
+
|
|
2447
|
+
Creates a single compressed archive (.tgz) based on the design,
|
|
2448
|
+
jobname, and flowgraph in the current chip manifest. Individual
|
|
2449
|
+
steps and/or indices can be archived based on arguments specified.
|
|
2450
|
+
By default, all steps and indices in the flowgraph are archived.
|
|
2451
|
+
By default, only outputs, reports, log files, and the final manifest
|
|
2452
|
+
are archived.
|
|
2453
|
+
|
|
2454
|
+
Args:
|
|
2455
|
+
jobs (list of str): List of jobs to archive. By default, archives only the current job.
|
|
2456
|
+
step(str): Step to archive.
|
|
2457
|
+
index (str): Index to archive
|
|
2458
|
+
include (list of str): Override of default inclusion rules. Accepts list of glob
|
|
2459
|
+
patterns that are matched from the root of individual step/index directories. To
|
|
2460
|
+
capture all files, supply "*".
|
|
2461
|
+
archive_name (str): Path to the archive
|
|
2462
|
+
'''
|
|
2463
|
+
design = self.get('design')
|
|
2464
|
+
if not jobs:
|
|
2465
|
+
jobname = self.get('option', 'jobname')
|
|
2466
|
+
jobs = [jobname]
|
|
2467
|
+
else:
|
|
2468
|
+
jobname = '_'.join(jobs)
|
|
2469
|
+
|
|
2470
|
+
if step and index:
|
|
2471
|
+
flowgraph_nodes = [(step, index)]
|
|
2472
|
+
elif step:
|
|
2473
|
+
flow = self.get('option', 'flow')
|
|
2474
|
+
flowgraph_nodes = _get_flowgraph_nodes(self, flow=flow, steps=[step])
|
|
2475
|
+
else:
|
|
2476
|
+
flowgraph_nodes = nodes_to_execute(self)
|
|
2477
|
+
|
|
2478
|
+
if not archive_name:
|
|
2479
|
+
if step and index:
|
|
2480
|
+
archive_name = f"{design}_{jobname}_{step}{index}.tgz"
|
|
2481
|
+
elif step:
|
|
2482
|
+
archive_name = f"{design}_{jobname}_{step}.tgz"
|
|
2483
|
+
else:
|
|
2484
|
+
archive_name = f"{design}_{jobname}.tgz"
|
|
2485
|
+
|
|
2486
|
+
self.logger.info(f'Creating archive {archive_name}...')
|
|
2487
|
+
|
|
2488
|
+
with tarfile.open(archive_name, "w:gz") as tar:
|
|
2489
|
+
for job in jobs:
|
|
2490
|
+
if len(jobs) > 0:
|
|
2491
|
+
self.logger.info(f'Archiving job {job}...')
|
|
2492
|
+
self.__archive_job(tar, job, flowgraph_nodes, include=include)
|
|
2493
|
+
return archive_name
|
|
2494
|
+
|
|
2495
|
+
###########################################################################
|
|
2496
|
+
def hash_files(self, *keypath, update=True, check=True, verbose=True, allow_cache=False,
|
|
2497
|
+
skip_missing=False, step=None, index=None):
|
|
2498
|
+
'''Generates hash values for a list of parameter files.
|
|
2499
|
+
|
|
2500
|
+
Generates a hash value for each file found in the keypath. If existing
|
|
2501
|
+
hash values are stored, this method will compare hashes and trigger an
|
|
2502
|
+
error if there's a mismatch. If the update variable is True, the
|
|
2503
|
+
computed hash values are recorded in the 'filehash' field of the
|
|
2504
|
+
parameter, following the order dictated by the files within the 'value'
|
|
2505
|
+
parameter field.
|
|
2506
|
+
|
|
2507
|
+
Files are located using the find_files() function.
|
|
2508
|
+
|
|
2509
|
+
The file hash calculation is performed based on the 'algo' setting.
|
|
2510
|
+
Supported algorithms include SHA1, SHA224, SHA256, SHA384, SHA512,
|
|
2511
|
+
and MD5.
|
|
2512
|
+
|
|
2513
|
+
Args:
|
|
2514
|
+
*keypath(str): Keypath to parameter.
|
|
2515
|
+
update (bool): If True, the hash values are recorded in the
|
|
2516
|
+
chip object manifest.
|
|
2517
|
+
check (bool): If True, checks the newly computed hash against
|
|
2518
|
+
the stored hash.
|
|
2519
|
+
verbose (bool): If True, generates log messages.
|
|
2520
|
+
allow_cache (bool): If True, hashing check the cached values
|
|
2521
|
+
for specific files, if found, it will use that hash value
|
|
2522
|
+
otherwise the hash will be computed.
|
|
2523
|
+
skip_missing (bool): If True, hashing will be skipped when missing
|
|
2524
|
+
files are detected.
|
|
2525
|
+
|
|
2526
|
+
Returns:
|
|
2527
|
+
A list of hash values.
|
|
2528
|
+
|
|
2529
|
+
Examples:
|
|
2530
|
+
>>> hashlist = hash_files('input', 'rtl', 'verilog')
|
|
2531
|
+
Computes, stores, and returns hashes of files in :keypath:`input, rtl, verilog`.
|
|
2532
|
+
'''
|
|
2533
|
+
|
|
2534
|
+
keypathstr = ','.join(keypath)
|
|
2535
|
+
# TODO: Insert into find_files?
|
|
2536
|
+
sc_type = self.get(*keypath, field='type')
|
|
2537
|
+
if 'file' not in sc_type and 'dir' not in sc_type:
|
|
2538
|
+
self.logger.error(f"Illegal attempt to hash non-file parameter [{keypathstr}].")
|
|
2539
|
+
return []
|
|
2540
|
+
|
|
2541
|
+
filelist = self.__find_files(*keypath, missing_ok=skip_missing, step=step, index=index)
|
|
2542
|
+
if not filelist:
|
|
2543
|
+
return []
|
|
2544
|
+
|
|
2545
|
+
algo = self.get(*keypath, field='hashalgo')
|
|
2546
|
+
hashfunc = getattr(hashlib, algo, None)
|
|
2547
|
+
if not hashfunc:
|
|
2548
|
+
self.logger.error(f"Unable to use {algo} as the hashing algorithm for [{keypathstr}].")
|
|
2549
|
+
return []
|
|
2550
|
+
|
|
2551
|
+
def hash_file(filename, hashobj=None):
|
|
2552
|
+
if not hashobj:
|
|
2553
|
+
hashobj = hashfunc()
|
|
2554
|
+
with open(filename, "rb") as f:
|
|
2555
|
+
for byte_block in iter(lambda: f.read(4096), b""):
|
|
2556
|
+
hashobj.update(byte_block)
|
|
2557
|
+
return hashobj.hexdigest()
|
|
2558
|
+
|
|
2559
|
+
if any([f is None for f in filelist]):
|
|
2560
|
+
# skip if there are missing files
|
|
2561
|
+
return []
|
|
2562
|
+
|
|
2563
|
+
# cycle through all paths
|
|
2564
|
+
hashlist = []
|
|
2565
|
+
if filelist and verbose:
|
|
2566
|
+
self.logger.info(f'Computing hash value for [{keypathstr}]')
|
|
2567
|
+
|
|
2568
|
+
for filename in filelist:
|
|
2569
|
+
if allow_cache and filename in self.__hashes:
|
|
2570
|
+
hashlist.append(self.__hashes[filename])
|
|
2571
|
+
continue
|
|
2572
|
+
|
|
2573
|
+
if os.path.isfile(filename):
|
|
2574
|
+
hashlist.append(hash_file(filename))
|
|
2575
|
+
elif os.path.isdir(filename):
|
|
2576
|
+
all_files = []
|
|
2577
|
+
for root, dirs, files in os.walk(filename):
|
|
2578
|
+
all_files.extend([os.path.join(root, f) for f in files])
|
|
2579
|
+
dirhash = None
|
|
2580
|
+
hashobj = hashfunc()
|
|
2581
|
+
for file in sorted(all_files):
|
|
2582
|
+
posix_path = self.__convert_paths_to_posix([os.path.relpath(file, filename)])
|
|
2583
|
+
hashobj.update(posix_path[0].encode("utf-8"))
|
|
2584
|
+
dirhash = hash_file(file, hashobj=hashobj)
|
|
2585
|
+
hashlist.append(dirhash)
|
|
2586
|
+
else:
|
|
2587
|
+
self.logger.error("Internal hashing error, file not found")
|
|
2588
|
+
continue
|
|
2589
|
+
|
|
2590
|
+
self.__hashes[filename] = hashlist[-1]
|
|
2591
|
+
|
|
2592
|
+
if check:
|
|
2593
|
+
# compare previous hash to new hash
|
|
2594
|
+
oldhash = self.schema.get(*keypath, step=step, index=index, field='filehash')
|
|
2595
|
+
check_failed = False
|
|
2596
|
+
for i, item in enumerate(oldhash):
|
|
2597
|
+
if item != hashlist[i]:
|
|
2598
|
+
self.logger.error(f"Hash mismatch for [{keypath}]")
|
|
2599
|
+
check_failed = True
|
|
2600
|
+
if check_failed:
|
|
2601
|
+
self.error("Hash mismatches detected")
|
|
2602
|
+
|
|
2603
|
+
if update:
|
|
2604
|
+
index = str(index)
|
|
2605
|
+
|
|
2606
|
+
set_step = None
|
|
2607
|
+
set_index = None
|
|
2608
|
+
pernode = self.get(*keypath, field='pernode')
|
|
2609
|
+
if pernode == 'required':
|
|
2610
|
+
set_step = step
|
|
2611
|
+
set_index = index
|
|
2612
|
+
elif pernode == 'optional':
|
|
2613
|
+
for vals, key_step, key_index in self.schema._getvals(*keypath):
|
|
2614
|
+
if key_step == step and key_index == index and vals:
|
|
2615
|
+
set_step = step
|
|
2616
|
+
set_index = index
|
|
2617
|
+
elif key_step == step and key_index is None and vals:
|
|
2618
|
+
set_step = step
|
|
2619
|
+
set_index = None
|
|
2620
|
+
|
|
2621
|
+
self.set(*keypath, hashlist,
|
|
2622
|
+
step=set_step, index=set_index,
|
|
2623
|
+
field='filehash', clobber=True)
|
|
2624
|
+
|
|
2625
|
+
return hashlist
|
|
2626
|
+
|
|
2627
|
+
###########################################################################
|
|
2628
|
+
def _dashboard(self, wait=True, port=None, graph_chips=None):
|
|
2629
|
+
'''
|
|
2630
|
+
Open a session of the dashboard.
|
|
2631
|
+
|
|
2632
|
+
The dashboard can be viewed in any webbrowser and can be accessed via:
|
|
2633
|
+
http://localhost:8501/
|
|
2634
|
+
|
|
2635
|
+
Args:
|
|
2636
|
+
wait (bool): If True, this call will wait in this method
|
|
2637
|
+
until the dashboard has been closed.
|
|
2638
|
+
port (int): An integer specifying which port to display the
|
|
2639
|
+
dashboard to.
|
|
2640
|
+
graph_chips (list): A list of dictionaries of the format
|
|
2641
|
+
{'chip': chip object, 'name': chip name}
|
|
2642
|
+
|
|
2643
|
+
Examples:
|
|
2644
|
+
>>> chip._dashboard()
|
|
2645
|
+
Opens a sesison of the dashboard.
|
|
2646
|
+
'''
|
|
2647
|
+
dash = Dashboard(self, port=port, graph_chips=graph_chips)
|
|
2648
|
+
dash.open_dashboard()
|
|
2649
|
+
if wait:
|
|
2650
|
+
try:
|
|
2651
|
+
dash.wait()
|
|
2652
|
+
except KeyboardInterrupt:
|
|
2653
|
+
dash._sleep()
|
|
2654
|
+
finally:
|
|
2655
|
+
dash.stop()
|
|
2656
|
+
return None
|
|
2657
|
+
|
|
2658
|
+
return dash
|
|
2659
|
+
|
|
2660
|
+
###########################################################################
|
|
2661
|
+
def summary(self, show_all_indices=False, generate_image=True, generate_html=True):
|
|
2662
|
+
'''
|
|
2663
|
+
Prints a summary of the compilation manifest.
|
|
2664
|
+
|
|
2665
|
+
Metrics from the flowgraph nodes, or from/to parameter if
|
|
2666
|
+
defined, are printed out on a per step basis. All metrics from the
|
|
2667
|
+
metric dictionary with weights set in the flowgraph dictionary are
|
|
2668
|
+
printed out.
|
|
2669
|
+
|
|
2670
|
+
Args:
|
|
2671
|
+
show_all_indices (bool): If True, displays metrics for all indices
|
|
2672
|
+
of each step. If False, displays metrics only for winning
|
|
2673
|
+
indices.
|
|
2674
|
+
generate_image (bool): If True, generates a summary image featuring
|
|
2675
|
+
a layout screenshot and a subset of metrics. Requires that the
|
|
2676
|
+
current job has an ending node that generated a PNG file.
|
|
2677
|
+
generate_html (bool): If True, generates an HTML report featuring a
|
|
2678
|
+
metrics summary table and manifest tree view. The report will
|
|
2679
|
+
include a layout screenshot if the current job has an ending node
|
|
2680
|
+
that generated a PNG file.
|
|
2681
|
+
|
|
2682
|
+
Examples:
|
|
2683
|
+
>>> chip.summary()
|
|
2684
|
+
Prints out a summary of the run to stdout.
|
|
2685
|
+
'''
|
|
2686
|
+
|
|
2687
|
+
# display whole flowgraph if no from/to specified
|
|
2688
|
+
flow = self.get('option', 'flow')
|
|
2689
|
+
nodes_to_execute = get_executed_nodes(self, flow)
|
|
2690
|
+
|
|
2691
|
+
_show_summary_table(self, flow, nodes_to_execute, show_all_indices=show_all_indices)
|
|
2692
|
+
|
|
2693
|
+
# Create a report for the Chip object which can be viewed in a web browser.
|
|
2694
|
+
# Place report files in the build's root directory.
|
|
2695
|
+
work_dir = self.getworkdir()
|
|
2696
|
+
if os.path.isdir(work_dir):
|
|
2697
|
+
# Mark file paths where the reports can be found if they were generated.
|
|
2698
|
+
results_html = os.path.join(work_dir, 'report.html')
|
|
2699
|
+
results_img = os.path.join(work_dir, f'{self.design}.png')
|
|
2700
|
+
|
|
2701
|
+
if generate_image:
|
|
2702
|
+
_generate_summary_image(self, results_img)
|
|
2703
|
+
|
|
2704
|
+
if generate_html:
|
|
2705
|
+
_generate_html_report(self, flow, nodes_to_execute, results_html)
|
|
2706
|
+
|
|
2707
|
+
# Try to open the results and layout only if '-nodisplay' is not set.
|
|
2708
|
+
# Priority: PNG, PDF, HTML.
|
|
2709
|
+
if (not self.get('option', 'nodisplay')):
|
|
2710
|
+
if os.path.isfile(results_img):
|
|
2711
|
+
_open_summary_image(results_img)
|
|
2712
|
+
elif os.path.isfile(results_html):
|
|
2713
|
+
_open_html_report(self, results_html)
|
|
2714
|
+
|
|
2715
|
+
###########################################################################
|
|
2716
|
+
def clock(self, pin, period, jitter=0, mode='global'):
|
|
2717
|
+
"""
|
|
2718
|
+
Clock configuration helper function.
|
|
2719
|
+
|
|
2720
|
+
A utility function for setting all parameters associated with a
|
|
2721
|
+
single clock definition in the schema.
|
|
2722
|
+
|
|
2723
|
+
The method modifies the following schema parameters:
|
|
2724
|
+
|
|
2725
|
+
['datasheet', 'pin', pin, 'type', mode]
|
|
2726
|
+
['datasheet', 'pin', pin, 'tperiod', mode]
|
|
2727
|
+
['datasheet', 'pin', pin, 'tjitter', mode]
|
|
2728
|
+
|
|
2729
|
+
Args:
|
|
2730
|
+
pin (str): Full hierarchical path to clk pin.
|
|
2731
|
+
period (float): Clock period specified in ns.
|
|
2732
|
+
jitter (float): Clock jitter specified in ns.
|
|
2733
|
+
mode (str): Mode of operation (from datasheet).
|
|
2734
|
+
|
|
2735
|
+
Examples:
|
|
2736
|
+
>>> chip.clock('clk', period=1.0)
|
|
2737
|
+
Create a clock named 'clk' with a 1.0ns period.
|
|
2738
|
+
"""
|
|
2739
|
+
|
|
2740
|
+
self.set('datasheet', 'pin', pin, 'type', mode, 'clock')
|
|
2741
|
+
|
|
2742
|
+
period_range = (period * 1e-9, period * 1e-9, period * 1e-9)
|
|
2743
|
+
self.set('datasheet', 'pin', pin, 'tperiod', mode, period_range)
|
|
2744
|
+
|
|
2745
|
+
jitter_range = (jitter * 1e-9, jitter * 1e-9, jitter * 1e-9)
|
|
2746
|
+
self.set('datasheet', 'pin', pin, 'tjitter', mode, jitter_range)
|
|
2747
|
+
|
|
2748
|
+
###########################################################################
|
|
2749
|
+
def node(self, flow, step, task, index=0):
|
|
2750
|
+
'''
|
|
2751
|
+
Creates a flowgraph node.
|
|
2752
|
+
|
|
2753
|
+
Creates a flowgraph node by binding a step to a tool specific task.
|
|
2754
|
+
A tool can be an external executable or one of the built in functions
|
|
2755
|
+
in the SiliconCompiler framework). Built in functions include: minimum,
|
|
2756
|
+
maximum, join, mux, verify. The task is set to 'step' if unspecified.
|
|
2757
|
+
|
|
2758
|
+
The method modifies the following schema parameters:
|
|
2759
|
+
|
|
2760
|
+
* ['flowgraph', flow, step, index, 'tool', tool]
|
|
2761
|
+
* ['flowgraph', flow, step, index, 'task', task]
|
|
2762
|
+
* ['flowgraph', flow, step, index, 'task', taskmodule]
|
|
2763
|
+
* ['flowgraph', flow, step, index, 'weight', metric]
|
|
2764
|
+
|
|
2765
|
+
Args:
|
|
2766
|
+
flow (str): Flow name
|
|
2767
|
+
step (str): Step name
|
|
2768
|
+
task (module/str): Task to associate with this node
|
|
2769
|
+
index (int): Step index
|
|
2770
|
+
|
|
2771
|
+
Examples:
|
|
2772
|
+
>>> import siliconcomiler.tools.openroad.place as place
|
|
2773
|
+
>>> chip.node('asicflow', 'apr_place', place, index=0)
|
|
2774
|
+
Creates a 'place' task with step='apr_place' and index=0 and binds it to the
|
|
2775
|
+
'openroad' tool.
|
|
2776
|
+
'''
|
|
2777
|
+
|
|
2778
|
+
if step in (Schema.GLOBAL_KEY, 'default', 'sc_collected_files'):
|
|
2779
|
+
self.error(f'Illegal step name: {step} is reserved')
|
|
2780
|
+
return
|
|
2781
|
+
|
|
2782
|
+
index = str(index)
|
|
2783
|
+
|
|
2784
|
+
# Determine task name and module
|
|
2785
|
+
task_module = None
|
|
2786
|
+
if (isinstance(task, str)):
|
|
2787
|
+
task_module = task
|
|
2788
|
+
elif inspect.ismodule(task):
|
|
2789
|
+
task_module = task.__name__
|
|
2790
|
+
self.modules[task_module] = task
|
|
2791
|
+
else:
|
|
2792
|
+
raise SiliconCompilerError(
|
|
2793
|
+
f"{task} is not a string or module and cannot be used to setup a task.",
|
|
2794
|
+
chip=self)
|
|
2795
|
+
|
|
2796
|
+
task_parts = task_module.split('.')
|
|
2797
|
+
if len(task_parts) < 2:
|
|
2798
|
+
raise SiliconCompilerError(
|
|
2799
|
+
f"{task} is not a valid task, it must be associated with a tool '<tool>.<task>'.",
|
|
2800
|
+
chip=self)
|
|
2801
|
+
tool_name, task_name = task_parts[-2:]
|
|
2802
|
+
|
|
2803
|
+
# bind tool to node
|
|
2804
|
+
self.set('flowgraph', flow, step, index, 'tool', tool_name)
|
|
2805
|
+
self.set('flowgraph', flow, step, index, 'task', task_name)
|
|
2806
|
+
self.set('flowgraph', flow, step, index, 'taskmodule', task_module)
|
|
2807
|
+
|
|
2808
|
+
# set default weights
|
|
2809
|
+
for metric in self.getkeys('metric'):
|
|
2810
|
+
self.set('flowgraph', flow, step, index, 'weight', metric, 0)
|
|
2811
|
+
|
|
2812
|
+
###########################################################################
|
|
2813
|
+
def edge(self, flow, tail, head, tail_index=0, head_index=0):
|
|
2814
|
+
'''
|
|
2815
|
+
Creates a directed edge from a tail node to a head node.
|
|
2816
|
+
|
|
2817
|
+
Connects the output of a tail node with the input of a head node by
|
|
2818
|
+
setting the 'input' field of the head node in the schema flowgraph.
|
|
2819
|
+
|
|
2820
|
+
The method modifies the following parameters:
|
|
2821
|
+
|
|
2822
|
+
['flowgraph', flow, head, str(head_index), 'input']
|
|
2823
|
+
|
|
2824
|
+
Args:
|
|
2825
|
+
flow (str): Name of flow
|
|
2826
|
+
tail (str): Name of tail node
|
|
2827
|
+
head (str): Name of head node
|
|
2828
|
+
tail_index (int): Index of tail node to connect
|
|
2829
|
+
head_index (int): Index of head node to connect
|
|
2830
|
+
|
|
2831
|
+
Examples:
|
|
2832
|
+
>>> chip.edge('place', 'cts')
|
|
2833
|
+
Creates a directed edge from place to cts.
|
|
2834
|
+
'''
|
|
2835
|
+
head_index = str(head_index)
|
|
2836
|
+
tail_index = str(tail_index)
|
|
2837
|
+
|
|
2838
|
+
for step in (head, tail):
|
|
2839
|
+
if step in (Schema.GLOBAL_KEY, 'default'):
|
|
2840
|
+
self.error(f'Illegal step name: {step} is reserved')
|
|
2841
|
+
return
|
|
2842
|
+
|
|
2843
|
+
tail_node = (tail, tail_index)
|
|
2844
|
+
if tail_node in self.get('flowgraph', flow, head, head_index, 'input'):
|
|
2845
|
+
self.logger.warning(f'Edge from {tail}{tail_index} to {head}{head_index} already '
|
|
2846
|
+
'exists, skipping')
|
|
2847
|
+
return
|
|
2848
|
+
|
|
2849
|
+
self.add('flowgraph', flow, head, head_index, 'input', tail_node)
|
|
2850
|
+
|
|
2851
|
+
###########################################################################
|
|
2852
|
+
def remove_node(self, flow, step, index=None):
|
|
2853
|
+
'''
|
|
2854
|
+
Remove a flowgraph node.
|
|
2855
|
+
|
|
2856
|
+
Args:
|
|
2857
|
+
flow (str): Flow name
|
|
2858
|
+
step (str): Step name
|
|
2859
|
+
index (int): Step index
|
|
2860
|
+
'''
|
|
2861
|
+
if index is None:
|
|
2862
|
+
# Iterate over all indexes
|
|
2863
|
+
for index in self.getkeys('flowgraph', flow, step):
|
|
2864
|
+
self.remove_node(flow, step, index)
|
|
2865
|
+
return
|
|
2866
|
+
|
|
2867
|
+
index = str(index)
|
|
2868
|
+
|
|
2869
|
+
# Save input edges
|
|
2870
|
+
node = (step, index)
|
|
2871
|
+
node_inputs = self.get('flowgraph', flow, step, index, 'input')
|
|
2872
|
+
self.remove('flowgraph', flow, step, index)
|
|
2873
|
+
|
|
2874
|
+
if len(self.getkeys('flowgraph', flow, step)) == 0:
|
|
2875
|
+
self.remove('flowgraph', flow, step)
|
|
2876
|
+
|
|
2877
|
+
for flow_step in self.getkeys('flowgraph', flow):
|
|
2878
|
+
for flow_index in self.getkeys('flowgraph', flow, flow_step):
|
|
2879
|
+
inputs = self.get('flowgraph', flow, flow_step, flow_index, 'input')
|
|
2880
|
+
if node in inputs:
|
|
2881
|
+
inputs = [inode for inode in inputs if inode != node]
|
|
2882
|
+
inputs.extend(node_inputs)
|
|
2883
|
+
self.set('flowgraph', flow, flow_step, flow_index, 'input', set(inputs))
|
|
2884
|
+
|
|
2885
|
+
###########################################################################
|
|
2886
|
+
def graph(self, flow, subflow, name=None):
|
|
2887
|
+
'''
|
|
2888
|
+
Instantiates a named flow as a graph in the current flowgraph.
|
|
2889
|
+
|
|
2890
|
+
Args:
|
|
2891
|
+
flow (str): Name of current flow.
|
|
2892
|
+
subflow (str): Name of flow to instantiate
|
|
2893
|
+
name (str): Name of instance
|
|
2894
|
+
|
|
2895
|
+
Examples:
|
|
2896
|
+
>>> chip.graph('asicflow')
|
|
2897
|
+
Instantiates a flow named 'asicflow'.
|
|
2898
|
+
'''
|
|
2899
|
+
for step in self.getkeys('flowgraph', subflow):
|
|
2900
|
+
# uniquify each step
|
|
2901
|
+
if name is None:
|
|
2902
|
+
newstep = step
|
|
2903
|
+
else:
|
|
2904
|
+
newstep = name + "." + step
|
|
2905
|
+
|
|
2906
|
+
for keys in self.allkeys('flowgraph', subflow, step):
|
|
2907
|
+
val = self.get('flowgraph', subflow, step, *keys)
|
|
2908
|
+
self.set('flowgraph', flow, newstep, *keys, val)
|
|
2909
|
+
|
|
2910
|
+
if name is None:
|
|
2911
|
+
continue
|
|
2912
|
+
|
|
2913
|
+
for index in self.getkeys('flowgraph', flow, newstep):
|
|
2914
|
+
# rename inputs
|
|
2915
|
+
all_inputs = self.get('flowgraph', flow, newstep, index, 'input')
|
|
2916
|
+
self.set('flowgraph', flow, newstep, index, 'input', [])
|
|
2917
|
+
for in_step, in_index in all_inputs:
|
|
2918
|
+
newin = name + "." + in_step
|
|
2919
|
+
self.add('flowgraph', flow, newstep, index, 'input', (newin, in_index))
|
|
2920
|
+
|
|
2921
|
+
###########################################################################
|
|
2922
|
+
def run(self):
|
|
2923
|
+
'''
|
|
2924
|
+
Executes tasks in a flowgraph.
|
|
2925
|
+
|
|
2926
|
+
The run function sets up tools and launches runs for every node
|
|
2927
|
+
in the flowgraph starting with 'from' steps and ending at 'to' steps.
|
|
2928
|
+
From/to are taken from the schema from/to parameters if defined,
|
|
2929
|
+
otherwise from/to are defined as the entry/exit steps of the flowgraph.
|
|
2930
|
+
Before starting the process, tool modules are loaded and setup up for each
|
|
2931
|
+
step and index based on on the schema eda dictionary settings.
|
|
2932
|
+
Once the tools have been set up, the manifest is checked using the
|
|
2933
|
+
check_manifest() function and files in the manifest are hashed based
|
|
2934
|
+
on the 'hashmode' schema setting.
|
|
2935
|
+
|
|
2936
|
+
Once launched, each process waits for preceding steps to complete,
|
|
2937
|
+
as defined by the flowgraph 'inputs' parameter. Once a all inputs
|
|
2938
|
+
are ready, previous steps are checked for errors before the
|
|
2939
|
+
process entered a local working directory and starts to run
|
|
2940
|
+
a tool or to execute a built in Chip function.
|
|
2941
|
+
|
|
2942
|
+
Fatal errors within a step/index process cause all subsequent
|
|
2943
|
+
processes to exit before start, returning control to the the main
|
|
2944
|
+
program which can then exit.
|
|
2945
|
+
|
|
2946
|
+
Examples:
|
|
2947
|
+
>>> run()
|
|
2948
|
+
Runs the execution flow defined by the flowgraph dictionary.
|
|
2949
|
+
'''
|
|
2950
|
+
|
|
2951
|
+
sc_runner(self)
|
|
2952
|
+
|
|
2953
|
+
###########################################################################
|
|
2954
|
+
def show(self, filename=None, screenshot=False, extension=None):
|
|
2955
|
+
'''
|
|
2956
|
+
Opens a graphical viewer for the filename provided.
|
|
2957
|
+
|
|
2958
|
+
The show function opens the filename specified using a viewer tool
|
|
2959
|
+
selected based on the file suffix and the registered showtools.
|
|
2960
|
+
Display settings and technology settings for viewing the file are read
|
|
2961
|
+
from the in-memory chip object schema settings. All temporary render
|
|
2962
|
+
and display files are saved in the <build_dir>/_show_<jobname> directory.
|
|
2963
|
+
|
|
2964
|
+
Args:
|
|
2965
|
+
filename (path): Name of file to display
|
|
2966
|
+
screenshot (bool): Flag to indicate if this is a screenshot or show
|
|
2967
|
+
extension (str): extension of file to show
|
|
2968
|
+
|
|
2969
|
+
Examples:
|
|
2970
|
+
>>> show('build/oh_add/job0/write_gds/0/outputs/oh_add.gds')
|
|
2971
|
+
Displays gds file with a viewer assigned by showtool
|
|
2972
|
+
'''
|
|
2973
|
+
|
|
2974
|
+
sc_step = self.get('arg', 'step')
|
|
2975
|
+
sc_index = self.get('arg', 'index')
|
|
2976
|
+
sc_job = self.get('option', 'jobname')
|
|
2977
|
+
|
|
2978
|
+
has_filename = filename is not None
|
|
2979
|
+
# Finding last layout if no argument specified
|
|
2980
|
+
if filename is None:
|
|
2981
|
+
self.logger.info('Searching build directory for layout to show.')
|
|
2982
|
+
|
|
2983
|
+
search_nodes = []
|
|
2984
|
+
if sc_step and sc_index:
|
|
2985
|
+
search_nodes.append((sc_step, sc_index))
|
|
2986
|
+
elif sc_step:
|
|
2987
|
+
for check_step, check_index in nodes_to_execute(self, self.get('option', 'flow')):
|
|
2988
|
+
if sc_step == check_step:
|
|
2989
|
+
search_nodes.append((check_step, check_index))
|
|
2990
|
+
else:
|
|
2991
|
+
for nodes in _get_flowgraph_execution_order(self,
|
|
2992
|
+
self.get('option', 'flow'),
|
|
2993
|
+
reverse=True):
|
|
2994
|
+
search_nodes.extend(nodes)
|
|
2995
|
+
|
|
2996
|
+
for ext in self._showtools.keys():
|
|
2997
|
+
if extension and extension != ext:
|
|
2998
|
+
continue
|
|
2999
|
+
for step, index in search_nodes:
|
|
3000
|
+
for search_ext in (ext, f"{ext}.gz"):
|
|
3001
|
+
filename = self.find_result(search_ext,
|
|
3002
|
+
step=step,
|
|
3003
|
+
index=index,
|
|
3004
|
+
jobname=sc_job)
|
|
3005
|
+
if filename:
|
|
3006
|
+
sc_step = step
|
|
3007
|
+
sc_index = index
|
|
3008
|
+
break
|
|
3009
|
+
if filename:
|
|
3010
|
+
break
|
|
3011
|
+
if filename:
|
|
3012
|
+
break
|
|
3013
|
+
|
|
3014
|
+
if filename is None:
|
|
3015
|
+
self.logger.error('Unable to automatically find layout in build directory.')
|
|
3016
|
+
self.logger.error('Try passing in a full path to show() instead.')
|
|
3017
|
+
return False
|
|
3018
|
+
|
|
3019
|
+
if not has_filename:
|
|
3020
|
+
self.logger.info(f'Showing file {filename}')
|
|
3021
|
+
|
|
3022
|
+
filepath = os.path.abspath(filename)
|
|
3023
|
+
|
|
3024
|
+
# Check that file exists
|
|
3025
|
+
if not os.path.exists(filepath):
|
|
3026
|
+
self.logger.error(f"Invalid filepath {filepath}.")
|
|
3027
|
+
return False
|
|
3028
|
+
|
|
3029
|
+
filetype = utils.get_file_ext(filepath)
|
|
3030
|
+
|
|
3031
|
+
if filetype not in self._showtools:
|
|
3032
|
+
self.logger.error(f"Filetype '{filetype}' not available in the registered showtools.")
|
|
3033
|
+
return False
|
|
3034
|
+
|
|
3035
|
+
saved_config = self.schema.copy()
|
|
3036
|
+
|
|
3037
|
+
taskname = 'show'
|
|
3038
|
+
if screenshot:
|
|
3039
|
+
taskname = 'screenshot'
|
|
3040
|
+
|
|
3041
|
+
try:
|
|
3042
|
+
from siliconcompiler.flows import showflow
|
|
3043
|
+
self.use(showflow, filetype=filetype, screenshot=screenshot)
|
|
3044
|
+
except Exception as e:
|
|
3045
|
+
self.logger.error(f"Flow setup failed: {e}")
|
|
3046
|
+
# restore environment
|
|
3047
|
+
self.schema = saved_config
|
|
3048
|
+
return False
|
|
3049
|
+
|
|
3050
|
+
# Override environment
|
|
3051
|
+
self.set('option', 'flow', 'showflow', clobber=True)
|
|
3052
|
+
self.set('option', 'track', False, clobber=True)
|
|
3053
|
+
self.set('option', 'hash', False, clobber=True)
|
|
3054
|
+
self.set('option', 'nodisplay', False, clobber=True)
|
|
3055
|
+
self.set('option', 'continue', True, clobber=True)
|
|
3056
|
+
self.set('option', 'quiet', False, clobber=True)
|
|
3057
|
+
self.set('arg', 'step', None, clobber=True)
|
|
3058
|
+
self.set('arg', 'index', None, clobber=True)
|
|
3059
|
+
self.unset('option', 'to')
|
|
3060
|
+
self.unset('option', 'prune')
|
|
3061
|
+
self.unset('option', 'from')
|
|
3062
|
+
# build new job name
|
|
3063
|
+
self.set('option', 'jobname', f'_{taskname}_{sc_job}_{sc_step}{sc_index}', clobber=True)
|
|
3064
|
+
|
|
3065
|
+
# Setup in step/index variables
|
|
3066
|
+
for (step, index) in _get_flowgraph_nodes(self, 'showflow'):
|
|
3067
|
+
if step != taskname:
|
|
3068
|
+
continue
|
|
3069
|
+
show_tool, _ = get_tool_task(self, step, index, flow='showflow')
|
|
3070
|
+
self.set('tool', show_tool, 'task', taskname, 'var', 'show_filetype', filetype,
|
|
3071
|
+
step=step, index=index)
|
|
3072
|
+
self.set('tool', show_tool, 'task', taskname, 'var', 'show_filepath', filepath,
|
|
3073
|
+
step=step, index=index)
|
|
3074
|
+
if sc_step:
|
|
3075
|
+
self.set('tool', show_tool, 'task', taskname, 'var', 'show_step', sc_step,
|
|
3076
|
+
step=step, index=index)
|
|
3077
|
+
if sc_index:
|
|
3078
|
+
self.set('tool', show_tool, 'task', taskname, 'var', 'show_index', sc_index,
|
|
3079
|
+
step=step, index=index)
|
|
3080
|
+
if sc_job:
|
|
3081
|
+
self.set('tool', show_tool, 'task', taskname, 'var', 'show_job', sc_job,
|
|
3082
|
+
step=step, index=index)
|
|
3083
|
+
|
|
3084
|
+
# run show flow
|
|
3085
|
+
try:
|
|
3086
|
+
self.run()
|
|
3087
|
+
if screenshot:
|
|
3088
|
+
step, index = _get_flowgraph_exit_nodes(self, flow='showflow')[0]
|
|
3089
|
+
success = self.find_result('png', step=step, index=index)
|
|
3090
|
+
else:
|
|
3091
|
+
success = True
|
|
3092
|
+
except SiliconCompilerError as e:
|
|
3093
|
+
self.logger.error(e)
|
|
3094
|
+
success = False
|
|
3095
|
+
|
|
3096
|
+
# restore environment
|
|
3097
|
+
self.schema = saved_config
|
|
3098
|
+
|
|
3099
|
+
return success
|
|
3100
|
+
|
|
3101
|
+
#######################################
|
|
3102
|
+
def _getcollectdir(self, jobname=None):
|
|
3103
|
+
'''
|
|
3104
|
+
Get absolute path to collected files directory
|
|
3105
|
+
'''
|
|
3106
|
+
|
|
3107
|
+
return os.path.join(self.getworkdir(jobname=jobname), 'sc_collected_files')
|
|
3108
|
+
|
|
3109
|
+
#######################################
|
|
3110
|
+
def getworkdir(self, jobname=None, step=None, index=None):
|
|
3111
|
+
'''
|
|
3112
|
+
Get absolute path to work directory for a given step/index,
|
|
3113
|
+
if step/index not given, job directory is returned
|
|
3114
|
+
|
|
3115
|
+
Args:
|
|
3116
|
+
jobname (str): Job name
|
|
3117
|
+
step (str): Node step name
|
|
3118
|
+
index (str): Node index
|
|
3119
|
+
'''
|
|
3120
|
+
|
|
3121
|
+
if jobname is None:
|
|
3122
|
+
jobname = self.get('option', 'jobname')
|
|
3123
|
+
|
|
3124
|
+
dirlist = [self.getbuilddir(),
|
|
3125
|
+
self.get('design'),
|
|
3126
|
+
jobname]
|
|
3127
|
+
|
|
3128
|
+
# Return jobdirectory if no step defined
|
|
3129
|
+
# Return index 0 by default
|
|
3130
|
+
if step is not None:
|
|
3131
|
+
dirlist.append(step)
|
|
3132
|
+
|
|
3133
|
+
if index is None:
|
|
3134
|
+
index = '0'
|
|
3135
|
+
|
|
3136
|
+
dirlist.append(str(index))
|
|
3137
|
+
|
|
3138
|
+
return os.path.join(*dirlist)
|
|
3139
|
+
|
|
3140
|
+
#######################################
|
|
3141
|
+
def getbuilddir(self):
|
|
3142
|
+
'''
|
|
3143
|
+
Get absolute path to the build directory
|
|
3144
|
+
'''
|
|
3145
|
+
|
|
3146
|
+
dirlist = [self.cwd,
|
|
3147
|
+
self.get('option', 'builddir')]
|
|
3148
|
+
|
|
3149
|
+
return os.path.join(*dirlist)
|
|
3150
|
+
|
|
3151
|
+
#######################################
|
|
3152
|
+
def __get_imported_filename(self, pathstr, package=None):
|
|
3153
|
+
''' Utility to map collected file to an unambiguous name based on its path.
|
|
3154
|
+
|
|
3155
|
+
The mapping looks like:
|
|
3156
|
+
path/to/file.ext => file_<md5('path/to/file.ext')>.ext
|
|
3157
|
+
'''
|
|
3158
|
+
path = pathlib.PurePosixPath(pathstr)
|
|
3159
|
+
ext = ''.join(path.suffixes)
|
|
3160
|
+
|
|
3161
|
+
# strip off all file suffixes to get just the bare name
|
|
3162
|
+
barepath = path
|
|
3163
|
+
while barepath.suffix:
|
|
3164
|
+
barepath = pathlib.PurePosixPath(barepath.stem)
|
|
3165
|
+
filename = str(barepath.parts[-1])
|
|
3166
|
+
|
|
3167
|
+
if not package:
|
|
3168
|
+
package = ''
|
|
3169
|
+
else:
|
|
3170
|
+
package = f'{package}:'
|
|
3171
|
+
path_to_hash = f'{package}{str(path)}'
|
|
3172
|
+
pathhash = hashlib.sha1(path_to_hash.encode('utf-8')).hexdigest()
|
|
3173
|
+
|
|
3174
|
+
return f'{filename}_{pathhash}{ext}'
|
|
3175
|
+
|
|
3176
|
+
def error(self, msg):
|
|
3177
|
+
'''
|
|
3178
|
+
Raises error.
|
|
3179
|
+
|
|
3180
|
+
If :keypath:`option, continue` is set to True, this
|
|
3181
|
+
will log an error and set an internal error flag that will cause run()
|
|
3182
|
+
to quit.
|
|
3183
|
+
|
|
3184
|
+
Args:
|
|
3185
|
+
msg (str): Message associated with error
|
|
3186
|
+
'''
|
|
3187
|
+
|
|
3188
|
+
if hasattr(self, 'logger'):
|
|
3189
|
+
self.logger.error(msg)
|
|
3190
|
+
|
|
3191
|
+
step = self.get('arg', 'step')
|
|
3192
|
+
index = self.get('arg', 'index')
|
|
3193
|
+
if self.schema.get('option', 'continue', step=step, index=index):
|
|
3194
|
+
self._error = True
|
|
3195
|
+
return
|
|
3196
|
+
|
|
3197
|
+
raise SiliconCompilerError(msg) from None
|
|
3198
|
+
|
|
3199
|
+
#######################################
|
|
3200
|
+
def __getstate__(self):
|
|
3201
|
+
# Called when generating a serial stream of the object
|
|
3202
|
+
attributes = self.__dict__.copy()
|
|
3203
|
+
|
|
3204
|
+
# Modules are not serializable, so save without cache
|
|
3205
|
+
attributes['modules'] = {}
|
|
3206
|
+
|
|
3207
|
+
# Modules are not serializable, so save without cache
|
|
3208
|
+
attributes['_showtools'] = {}
|
|
3209
|
+
|
|
3210
|
+
# We have to remove the chip's logger before serializing the object
|
|
3211
|
+
# since the logger object is not serializable.
|
|
3212
|
+
del attributes['logger']
|
|
3213
|
+
return attributes
|
|
3214
|
+
|
|
3215
|
+
#######################################
|
|
3216
|
+
def __setstate__(self, state):
|
|
3217
|
+
self.__dict__ = state
|
|
3218
|
+
|
|
3219
|
+
# Reinitialize logger on restore
|
|
3220
|
+
self._init_logger()
|
|
3221
|
+
self.schema._init_logger(self.logger)
|