siliconcompiler 0.32.3__py3-none-any.whl → 0.33.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- siliconcompiler/__init__.py +19 -2
- siliconcompiler/_metadata.py +1 -1
- siliconcompiler/apps/sc.py +2 -2
- siliconcompiler/apps/sc_install.py +3 -3
- siliconcompiler/apps/sc_issue.py +1 -1
- siliconcompiler/apps/sc_remote.py +4 -4
- siliconcompiler/apps/sc_show.py +2 -2
- siliconcompiler/apps/utils/replay.py +5 -3
- siliconcompiler/asic.py +120 -0
- siliconcompiler/checklist.py +150 -0
- siliconcompiler/core.py +267 -289
- siliconcompiler/flowgraph.py +803 -515
- siliconcompiler/fpga.py +84 -0
- siliconcompiler/metric.py +420 -0
- siliconcompiler/optimizer/vizier.py +2 -3
- siliconcompiler/package/__init__.py +29 -6
- siliconcompiler/pdk.py +415 -0
- siliconcompiler/record.py +449 -0
- siliconcompiler/remote/client.py +6 -3
- siliconcompiler/remote/schema.py +116 -112
- siliconcompiler/remote/server.py +3 -5
- siliconcompiler/report/dashboard/cli/__init__.py +13 -722
- siliconcompiler/report/dashboard/cli/board.py +895 -0
- siliconcompiler/report/dashboard/web/__init__.py +10 -10
- siliconcompiler/report/dashboard/web/components/__init__.py +5 -4
- siliconcompiler/report/dashboard/web/components/flowgraph.py +3 -3
- siliconcompiler/report/dashboard/web/components/graph.py +6 -3
- siliconcompiler/report/dashboard/web/state.py +1 -1
- siliconcompiler/report/dashboard/web/utils/__init__.py +4 -3
- siliconcompiler/report/html_report.py +2 -3
- siliconcompiler/report/report.py +13 -7
- siliconcompiler/report/summary_image.py +1 -1
- siliconcompiler/report/summary_table.py +3 -3
- siliconcompiler/report/utils.py +11 -10
- siliconcompiler/scheduler/__init__.py +145 -280
- siliconcompiler/scheduler/run_node.py +2 -1
- siliconcompiler/scheduler/send_messages.py +4 -4
- siliconcompiler/scheduler/slurm.py +2 -2
- siliconcompiler/schema/__init__.py +19 -2
- siliconcompiler/schema/baseschema.py +493 -0
- siliconcompiler/schema/cmdlineschema.py +250 -0
- siliconcompiler/{sphinx_ext → schema/docs}/__init__.py +3 -1
- siliconcompiler/{sphinx_ext → schema/docs}/dynamicgen.py +63 -81
- siliconcompiler/{sphinx_ext → schema/docs}/schemagen.py +73 -85
- siliconcompiler/{sphinx_ext → schema/docs}/utils.py +12 -13
- siliconcompiler/schema/editableschema.py +136 -0
- siliconcompiler/schema/journalingschema.py +238 -0
- siliconcompiler/schema/namedschema.py +41 -0
- siliconcompiler/schema/packageschema.py +101 -0
- siliconcompiler/schema/parameter.py +791 -0
- siliconcompiler/schema/parametertype.py +323 -0
- siliconcompiler/schema/parametervalue.py +736 -0
- siliconcompiler/schema/safeschema.py +37 -0
- siliconcompiler/schema/schema_cfg.py +109 -1789
- siliconcompiler/schema/utils.py +5 -68
- siliconcompiler/schema_obj.py +119 -0
- siliconcompiler/tool.py +1308 -0
- siliconcompiler/tools/_common/__init__.py +6 -10
- siliconcompiler/tools/_common/sdc/sc_constraints.sdc +1 -1
- siliconcompiler/tools/bluespec/convert.py +7 -7
- siliconcompiler/tools/builtin/_common.py +1 -1
- siliconcompiler/tools/builtin/concatenate.py +2 -2
- siliconcompiler/tools/builtin/minimum.py +1 -1
- siliconcompiler/tools/builtin/mux.py +2 -1
- siliconcompiler/tools/builtin/nop.py +1 -1
- siliconcompiler/tools/builtin/verify.py +6 -4
- siliconcompiler/tools/chisel/convert.py +4 -4
- siliconcompiler/tools/genfasm/bitstream.py +3 -3
- siliconcompiler/tools/ghdl/convert.py +1 -1
- siliconcompiler/tools/icarus/compile.py +4 -4
- siliconcompiler/tools/icepack/bitstream.py +6 -1
- siliconcompiler/tools/klayout/convert_drc_db.py +5 -0
- siliconcompiler/tools/klayout/klayout_export.py +0 -1
- siliconcompiler/tools/klayout/klayout_utils.py +3 -10
- siliconcompiler/tools/nextpnr/apr.py +6 -1
- siliconcompiler/tools/nextpnr/nextpnr.py +4 -4
- siliconcompiler/tools/openroad/_apr.py +13 -0
- siliconcompiler/tools/openroad/rdlroute.py +3 -3
- siliconcompiler/tools/openroad/scripts/apr/postamble.tcl +1 -1
- siliconcompiler/tools/openroad/scripts/apr/preamble.tcl +5 -5
- siliconcompiler/tools/openroad/scripts/apr/sc_antenna_repair.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_clock_tree_synthesis.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_detailed_placement.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_detailed_route.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_endcap_tapcell_insertion.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_fillercell_insertion.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_fillmetal_insertion.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_global_placement.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_global_route.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_init_floorplan.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_macro_placement.tcl +3 -3
- siliconcompiler/tools/openroad/scripts/apr/sc_metrics.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_pin_placement.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_power_grid.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_repair_design.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_repair_timing.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/apr/sc_write_data.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/common/procs.tcl +57 -1
- siliconcompiler/tools/openroad/scripts/common/screenshot.tcl +2 -2
- siliconcompiler/tools/openroad/scripts/common/write_images.tcl +28 -3
- siliconcompiler/tools/openroad/scripts/sc_rcx.tcl +1 -1
- siliconcompiler/tools/openroad/scripts/sc_rdlroute.tcl +3 -3
- siliconcompiler/tools/openroad/scripts/sc_show.tcl +6 -6
- siliconcompiler/tools/slang/__init__.py +10 -10
- siliconcompiler/tools/surelog/parse.py +4 -4
- siliconcompiler/tools/sv2v/convert.py +20 -3
- siliconcompiler/tools/verilator/compile.py +2 -2
- siliconcompiler/tools/verilator/verilator.py +3 -3
- siliconcompiler/tools/vpr/place.py +1 -1
- siliconcompiler/tools/vpr/route.py +4 -4
- siliconcompiler/tools/vpr/screenshot.py +1 -1
- siliconcompiler/tools/vpr/show.py +5 -5
- siliconcompiler/tools/vpr/vpr.py +24 -24
- siliconcompiler/tools/xdm/convert.py +2 -2
- siliconcompiler/tools/xyce/simulate.py +1 -1
- siliconcompiler/tools/yosys/sc_synth_asic.tcl +74 -68
- siliconcompiler/tools/yosys/syn_asic.py +2 -2
- siliconcompiler/toolscripts/_tools.json +7 -7
- siliconcompiler/toolscripts/ubuntu22/install-vpr.sh +0 -2
- siliconcompiler/toolscripts/ubuntu24/install-vpr.sh +0 -2
- siliconcompiler/utils/__init__.py +8 -112
- siliconcompiler/utils/flowgraph.py +339 -0
- siliconcompiler/{issue.py → utils/issue.py} +4 -3
- siliconcompiler/utils/logging.py +1 -2
- {siliconcompiler-0.32.3.dist-info → siliconcompiler-0.33.0.dist-info}/METADATA +9 -8
- {siliconcompiler-0.32.3.dist-info → siliconcompiler-0.33.0.dist-info}/RECORD +151 -134
- {siliconcompiler-0.32.3.dist-info → siliconcompiler-0.33.0.dist-info}/WHEEL +1 -1
- {siliconcompiler-0.32.3.dist-info → siliconcompiler-0.33.0.dist-info}/entry_points.txt +8 -8
- siliconcompiler/schema/schema_obj.py +0 -1936
- siliconcompiler/toolscripts/ubuntu20/install-vpr.sh +0 -29
- siliconcompiler/toolscripts/ubuntu20/install-yosys-parmys.sh +0 -61
- /siliconcompiler/{templates → data/templates}/__init__.py +0 -0
- /siliconcompiler/{templates → data/templates}/email/__init__.py +0 -0
- /siliconcompiler/{templates → data/templates}/email/general.j2 +0 -0
- /siliconcompiler/{templates → data/templates}/email/summary.j2 +0 -0
- /siliconcompiler/{templates → data/templates}/issue/README.txt +0 -0
- /siliconcompiler/{templates → data/templates}/issue/__init__.py +0 -0
- /siliconcompiler/{templates → data/templates}/issue/run.sh +0 -0
- /siliconcompiler/{templates → data/templates}/replay/replay.py.j2 +0 -0
- /siliconcompiler/{templates → data/templates}/replay/replay.sh.j2 +0 -0
- /siliconcompiler/{templates → data/templates}/replay/requirements.txt +0 -0
- /siliconcompiler/{templates → data/templates}/replay/setup.sh +0 -0
- /siliconcompiler/{templates → data/templates}/report/__init__.py +0 -0
- /siliconcompiler/{templates → data/templates}/report/bootstrap.min.css +0 -0
- /siliconcompiler/{templates → data/templates}/report/bootstrap.min.js +0 -0
- /siliconcompiler/{templates → data/templates}/report/bootstrap_LICENSE.md +0 -0
- /siliconcompiler/{templates → data/templates}/report/sc_report.j2 +0 -0
- /siliconcompiler/{templates → data/templates}/slurm/__init__.py +0 -0
- /siliconcompiler/{templates → data/templates}/slurm/run.sh +0 -0
- /siliconcompiler/{templates → data/templates}/tcl/__init__.py +0 -0
- /siliconcompiler/{templates → data/templates}/tcl/manifest.tcl.j2 +0 -0
- /siliconcompiler/{units.py → utils/units.py} +0 -0
- {siliconcompiler-0.32.3.dist-info → siliconcompiler-0.33.0.dist-info}/licenses/LICENSE +0 -0
- {siliconcompiler-0.32.3.dist-info → siliconcompiler-0.33.0.dist-info}/top_level.txt +0 -0
|
@@ -1,1936 +0,0 @@
|
|
|
1
|
-
# Copyright 2022 Silicon Compiler Authors. All Rights Reserved.
|
|
2
|
-
|
|
3
|
-
# NOTE: this file cannot rely on any third-party dependencies, including other
|
|
4
|
-
# SC dependencies outside of its directory, since it may be used by tool drivers
|
|
5
|
-
# that have isolated Python environments.
|
|
6
|
-
|
|
7
|
-
import copy
|
|
8
|
-
import logging
|
|
9
|
-
import os
|
|
10
|
-
import re
|
|
11
|
-
import pathlib
|
|
12
|
-
import argparse
|
|
13
|
-
import sys
|
|
14
|
-
import shlex
|
|
15
|
-
|
|
16
|
-
try:
|
|
17
|
-
import orjson as json
|
|
18
|
-
_has_orjson = True
|
|
19
|
-
except ModuleNotFoundError:
|
|
20
|
-
import json
|
|
21
|
-
_has_orjson = False
|
|
22
|
-
|
|
23
|
-
try:
|
|
24
|
-
import gzip
|
|
25
|
-
_has_gzip = True
|
|
26
|
-
except ModuleNotFoundError:
|
|
27
|
-
_has_gzip = False
|
|
28
|
-
|
|
29
|
-
try:
|
|
30
|
-
import csv
|
|
31
|
-
_has_csv = True
|
|
32
|
-
except ModuleNotFoundError:
|
|
33
|
-
_has_csv = False
|
|
34
|
-
|
|
35
|
-
try:
|
|
36
|
-
import yaml
|
|
37
|
-
_has_yaml = True
|
|
38
|
-
except ImportError:
|
|
39
|
-
_has_yaml = False
|
|
40
|
-
|
|
41
|
-
from .schema_cfg import schema_cfg
|
|
42
|
-
from .utils import escape_val_tcl, translate_loglevel, PerNode, Scope
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
class Schema:
|
|
46
|
-
"""Object for storing and accessing configuration values corresponding to
|
|
47
|
-
the SiliconCompiler schema.
|
|
48
|
-
|
|
49
|
-
Most user-facing interaction with the schema should occur through an
|
|
50
|
-
instance of :class:`~siliconcompiler.core.Chip`, but this class is available
|
|
51
|
-
for schema manipulation tasks that don't require the additional context of a
|
|
52
|
-
Chip object.
|
|
53
|
-
|
|
54
|
-
The two arguments to this class are mutually exclusive. If neither are
|
|
55
|
-
provided, the object is initialized to default values for all parameters.
|
|
56
|
-
|
|
57
|
-
Args:
|
|
58
|
-
cfg (dict): Initial configuration dictionary. This may be a subtree of
|
|
59
|
-
the schema.
|
|
60
|
-
manifest (str): Initial manifest.
|
|
61
|
-
logger (logging.Logger): instance of the parent logger if available
|
|
62
|
-
"""
|
|
63
|
-
|
|
64
|
-
_RECORD_ACCESS_IDENTIFIER = "SC_CFG_ACCESS_KEY"
|
|
65
|
-
|
|
66
|
-
# Special key in node dict that represents a value corresponds to a
|
|
67
|
-
# global default for all steps/indices.
|
|
68
|
-
GLOBAL_KEY = 'global'
|
|
69
|
-
PERNODE_FIELDS = ('value', 'filehash', 'date', 'author', 'signature', 'package')
|
|
70
|
-
|
|
71
|
-
def __init__(self, cfg=None, manifest=None, logger=None):
|
|
72
|
-
if cfg is not None and manifest is not None:
|
|
73
|
-
raise ValueError('You may not specify both cfg and manifest')
|
|
74
|
-
|
|
75
|
-
# Use during testing to record calls to Schema.get
|
|
76
|
-
self._init_record_access()
|
|
77
|
-
|
|
78
|
-
self._init_logger(logger)
|
|
79
|
-
|
|
80
|
-
self._stop_journal()
|
|
81
|
-
|
|
82
|
-
if manifest is not None:
|
|
83
|
-
# Normalize value to string in case we receive a pathlib.Path
|
|
84
|
-
cfg, self.__journal = Schema.__read_manifest_file(str(manifest))
|
|
85
|
-
else:
|
|
86
|
-
cfg = copy.deepcopy(cfg)
|
|
87
|
-
|
|
88
|
-
if cfg is not None:
|
|
89
|
-
try:
|
|
90
|
-
if Schema.__dict_requires_normalization(cfg):
|
|
91
|
-
cfg = Schema.__dict_to_schema(cfg)
|
|
92
|
-
self.cfg = cfg
|
|
93
|
-
except (TypeError, ValueError) as e:
|
|
94
|
-
raise ValueError('Attempting to read manifest with '
|
|
95
|
-
f'incompatible schema version: {e}') \
|
|
96
|
-
from e
|
|
97
|
-
else:
|
|
98
|
-
self.cfg = self._init_schema_cfg()
|
|
99
|
-
|
|
100
|
-
###########################################################################
|
|
101
|
-
def _init_schema_cfg(self):
|
|
102
|
-
return schema_cfg()
|
|
103
|
-
|
|
104
|
-
###########################################################################
|
|
105
|
-
@staticmethod
|
|
106
|
-
def __dict_to_schema_set(cfg, *key):
|
|
107
|
-
if Schema._is_leaf(cfg):
|
|
108
|
-
for field, value in cfg.items():
|
|
109
|
-
if field == 'node':
|
|
110
|
-
for step, substep in value.items():
|
|
111
|
-
if step == 'default':
|
|
112
|
-
continue
|
|
113
|
-
for index, values in substep.items():
|
|
114
|
-
if step == Schema.GLOBAL_KEY:
|
|
115
|
-
sstep = None
|
|
116
|
-
else:
|
|
117
|
-
sstep = step
|
|
118
|
-
if index == Schema.GLOBAL_KEY:
|
|
119
|
-
sindex = None
|
|
120
|
-
else:
|
|
121
|
-
sindex = index
|
|
122
|
-
for nodefield, nodevalue in values.items():
|
|
123
|
-
Schema.__set(*key, nodevalue,
|
|
124
|
-
cfg=cfg,
|
|
125
|
-
field=nodefield,
|
|
126
|
-
step=sstep, index=sindex)
|
|
127
|
-
else:
|
|
128
|
-
Schema.__set(*key, value, cfg=cfg, field=field)
|
|
129
|
-
else:
|
|
130
|
-
for nextkey, subcfg in cfg.items():
|
|
131
|
-
Schema.__dict_to_schema_set(subcfg, *key, nextkey)
|
|
132
|
-
|
|
133
|
-
###########################################################################
|
|
134
|
-
@staticmethod
|
|
135
|
-
def __dict_to_schema(cfg):
|
|
136
|
-
for category, subcfg in cfg.items():
|
|
137
|
-
if category in ('history', 'library'):
|
|
138
|
-
# History and library are subschemas
|
|
139
|
-
for _, value in subcfg.items():
|
|
140
|
-
Schema.__dict_to_schema(value)
|
|
141
|
-
else:
|
|
142
|
-
Schema.__dict_to_schema_set(subcfg, category)
|
|
143
|
-
return cfg
|
|
144
|
-
|
|
145
|
-
###########################################################################
|
|
146
|
-
@staticmethod
|
|
147
|
-
def __dict_requires_normalization(cfg):
|
|
148
|
-
'''
|
|
149
|
-
Recurse over scheme configuration to check for tuples
|
|
150
|
-
Returns: False if dict is correct, True is dict requires normalization,
|
|
151
|
-
None if tuples were not found
|
|
152
|
-
'''
|
|
153
|
-
if Schema._is_leaf(cfg):
|
|
154
|
-
if '(' in cfg['type']:
|
|
155
|
-
for step, substep in cfg['node'].items():
|
|
156
|
-
for index, values in substep.items():
|
|
157
|
-
values = values['value']
|
|
158
|
-
if not values:
|
|
159
|
-
continue
|
|
160
|
-
if isinstance(values, list):
|
|
161
|
-
for v in values:
|
|
162
|
-
if isinstance(v, tuple):
|
|
163
|
-
return False
|
|
164
|
-
if isinstance(values, tuple):
|
|
165
|
-
return False
|
|
166
|
-
return True
|
|
167
|
-
else:
|
|
168
|
-
return None
|
|
169
|
-
else:
|
|
170
|
-
for subcfg in cfg.values():
|
|
171
|
-
ret = Schema.__dict_requires_normalization(subcfg)
|
|
172
|
-
if ret is None:
|
|
173
|
-
continue
|
|
174
|
-
else:
|
|
175
|
-
return ret
|
|
176
|
-
|
|
177
|
-
###########################################################################
|
|
178
|
-
def _merge_with_init_schema(self):
|
|
179
|
-
new_schema = Schema()
|
|
180
|
-
|
|
181
|
-
for keylist in self.allkeys():
|
|
182
|
-
if keylist[0] in ('history', 'library'):
|
|
183
|
-
continue
|
|
184
|
-
|
|
185
|
-
if 'default' in keylist:
|
|
186
|
-
continue
|
|
187
|
-
|
|
188
|
-
# only read in valid keypaths without 'default'
|
|
189
|
-
key_valid = new_schema.valid(*keylist, default_valid=True)
|
|
190
|
-
if not key_valid:
|
|
191
|
-
self.logger.warning(f'Keypath {keylist} is not valid')
|
|
192
|
-
if not key_valid:
|
|
193
|
-
continue
|
|
194
|
-
|
|
195
|
-
for val, step, index in self._getvals(*keylist, return_defvalue=False):
|
|
196
|
-
new_schema.set(*keylist, val, step=step, index=index)
|
|
197
|
-
|
|
198
|
-
# update other pernode fields
|
|
199
|
-
# TODO: only update these if clobber is successful
|
|
200
|
-
step_key = Schema.GLOBAL_KEY if not step else step
|
|
201
|
-
idx_key = Schema.GLOBAL_KEY if not index else index
|
|
202
|
-
for field in self.getdict(*keylist)['node'][step_key][idx_key].keys():
|
|
203
|
-
if field == 'value':
|
|
204
|
-
continue
|
|
205
|
-
new_schema.set(*keylist,
|
|
206
|
-
self.get(*keylist, step=step, index=index, field=field),
|
|
207
|
-
step=step, index=index, field=field)
|
|
208
|
-
|
|
209
|
-
if 'library' in self.cfg:
|
|
210
|
-
# Handle libraries separately
|
|
211
|
-
for library in self.cfg['library'].keys():
|
|
212
|
-
lib_schema = Schema(cfg=self.getdict('library', library))
|
|
213
|
-
lib_schema._merge_with_init_schema()
|
|
214
|
-
new_schema.cfg['library'][library] = lib_schema.cfg
|
|
215
|
-
|
|
216
|
-
if 'history' in self.cfg:
|
|
217
|
-
# Copy over history
|
|
218
|
-
new_schema.cfg['history'] = self.cfg['history']
|
|
219
|
-
|
|
220
|
-
self.cfg = new_schema.cfg
|
|
221
|
-
|
|
222
|
-
###########################################################################
|
|
223
|
-
@staticmethod
|
|
224
|
-
def __read_manifest_file(filepath):
|
|
225
|
-
if not os.path.isfile(filepath):
|
|
226
|
-
raise ValueError(f'Manifest file not found {filepath}')
|
|
227
|
-
|
|
228
|
-
if os.path.splitext(filepath)[1].lower() == '.gz':
|
|
229
|
-
if not _has_gzip:
|
|
230
|
-
raise RuntimeError("gzip is not available")
|
|
231
|
-
fin = gzip.open(filepath, 'r')
|
|
232
|
-
else:
|
|
233
|
-
fin = open(filepath, 'r')
|
|
234
|
-
|
|
235
|
-
try:
|
|
236
|
-
if re.search(r'(\.json|\.sup)(\.gz)*$', filepath, flags=re.IGNORECASE):
|
|
237
|
-
localcfg = json.loads(fin.read())
|
|
238
|
-
elif re.search(r'(\.yaml|\.yml)(\.gz)*$', filepath, flags=re.IGNORECASE):
|
|
239
|
-
if not _has_yaml:
|
|
240
|
-
raise ImportError('yaml package required to read YAML manifest')
|
|
241
|
-
localcfg = yaml.load(fin, Loader=yaml.SafeLoader)
|
|
242
|
-
else:
|
|
243
|
-
raise ValueError(f'File format not recognized {filepath}')
|
|
244
|
-
finally:
|
|
245
|
-
fin.close()
|
|
246
|
-
|
|
247
|
-
journal = None
|
|
248
|
-
try:
|
|
249
|
-
if '__journal__' in localcfg:
|
|
250
|
-
journal = localcfg['__journal__']
|
|
251
|
-
del localcfg['__journal__']
|
|
252
|
-
except (TypeError, ValueError) as e:
|
|
253
|
-
raise ValueError(f'Attempting to read manifest with incompatible schema version: {e}') \
|
|
254
|
-
from e
|
|
255
|
-
|
|
256
|
-
return localcfg, journal
|
|
257
|
-
|
|
258
|
-
def get(self, *keypath, field='value', job=None, step=None, index=None):
|
|
259
|
-
"""
|
|
260
|
-
Returns a schema parameter field.
|
|
261
|
-
|
|
262
|
-
See :meth:`~siliconcompiler.core.Chip.get` for detailed documentation.
|
|
263
|
-
"""
|
|
264
|
-
|
|
265
|
-
if self.__record_access["recording"]:
|
|
266
|
-
self.__record_access["record"].add(tuple(keypath))
|
|
267
|
-
|
|
268
|
-
# Prevent accidental modifications of the schema content by not passing a reference
|
|
269
|
-
return copy.copy(self.__get(*keypath, field=field, job=job, step=step, index=index))
|
|
270
|
-
|
|
271
|
-
###########################################################################
|
|
272
|
-
def __get(self, *keypath, field='value', job=None, step=None, index=None):
|
|
273
|
-
cfg = self.__search(*keypath, job=job)
|
|
274
|
-
|
|
275
|
-
if not Schema._is_leaf(cfg):
|
|
276
|
-
raise ValueError(f'Invalid keypath {keypath}: get() '
|
|
277
|
-
'must be called on a complete keypath')
|
|
278
|
-
|
|
279
|
-
err = Schema.__validate_step_index(cfg['pernode'], field, step, index)
|
|
280
|
-
if err:
|
|
281
|
-
raise ValueError(f'Invalid args to get() of keypath {keypath}: {err}')
|
|
282
|
-
|
|
283
|
-
if isinstance(index, int):
|
|
284
|
-
index = str(index)
|
|
285
|
-
|
|
286
|
-
if field in self.PERNODE_FIELDS:
|
|
287
|
-
try:
|
|
288
|
-
return cfg['node'][step][index][field]
|
|
289
|
-
except KeyError:
|
|
290
|
-
if PerNode(cfg['pernode']) == PerNode.REQUIRED:
|
|
291
|
-
return cfg['node']['default']['default'][field]
|
|
292
|
-
|
|
293
|
-
try:
|
|
294
|
-
return cfg['node'][step][self.GLOBAL_KEY][field]
|
|
295
|
-
except KeyError:
|
|
296
|
-
pass
|
|
297
|
-
|
|
298
|
-
try:
|
|
299
|
-
return cfg['node'][self.GLOBAL_KEY][self.GLOBAL_KEY][field]
|
|
300
|
-
except KeyError:
|
|
301
|
-
return cfg['node']['default']['default'][field]
|
|
302
|
-
elif field in cfg:
|
|
303
|
-
if field == "pernode":
|
|
304
|
-
return PerNode(cfg[field])
|
|
305
|
-
if field == "scope":
|
|
306
|
-
return Scope(cfg[field])
|
|
307
|
-
return cfg[field]
|
|
308
|
-
else:
|
|
309
|
-
raise ValueError(f'Invalid field {field}')
|
|
310
|
-
|
|
311
|
-
###########################################################################
|
|
312
|
-
def set(self, *args, field='value', clobber=True, step=None, index=None):
|
|
313
|
-
'''
|
|
314
|
-
Sets a schema parameter field.
|
|
315
|
-
|
|
316
|
-
See :meth:`~siliconcompiler.core.Chip.set` for detailed documentation.
|
|
317
|
-
'''
|
|
318
|
-
|
|
319
|
-
keypath = args[:-1]
|
|
320
|
-
cfg = self.__search(*keypath, insert_defaults=True)
|
|
321
|
-
|
|
322
|
-
return self.__set(*args, logger=self.logger, cfg=cfg, field=field, clobber=clobber,
|
|
323
|
-
step=step, index=index, journal_callback=self.__record_journal)
|
|
324
|
-
|
|
325
|
-
###########################################################################
|
|
326
|
-
@staticmethod
|
|
327
|
-
def __set(*args, logger=None, cfg=None, field='value', clobber=True,
|
|
328
|
-
step=None, index=None,
|
|
329
|
-
journal_callback=None):
|
|
330
|
-
'''
|
|
331
|
-
Sets a schema parameter field.
|
|
332
|
-
|
|
333
|
-
See :meth:`~siliconcompiler.core.Chip.set` for detailed documentation.
|
|
334
|
-
'''
|
|
335
|
-
keypath = args[:-1]
|
|
336
|
-
value = args[-1]
|
|
337
|
-
|
|
338
|
-
if not Schema._is_leaf(cfg):
|
|
339
|
-
raise ValueError(f'Invalid keypath {keypath}: set() '
|
|
340
|
-
'must be called on a complete keypath')
|
|
341
|
-
|
|
342
|
-
err = Schema.__validate_step_index(cfg['pernode'], field, step, index)
|
|
343
|
-
if err:
|
|
344
|
-
raise ValueError(f'Invalid args to set() of keypath {keypath}: {err}')
|
|
345
|
-
|
|
346
|
-
if isinstance(index, int):
|
|
347
|
-
index = str(index)
|
|
348
|
-
|
|
349
|
-
if cfg['lock'] and field != 'lock':
|
|
350
|
-
if logger:
|
|
351
|
-
logger.debug(f'Failed to set value for {keypath}: parameter is locked')
|
|
352
|
-
return False
|
|
353
|
-
|
|
354
|
-
if Schema.__is_set(cfg, step=step, index=index) and not clobber:
|
|
355
|
-
if logger:
|
|
356
|
-
logger.debug(f'Failed to set value for {keypath}: clobber is False '
|
|
357
|
-
'and parameter is set')
|
|
358
|
-
return False
|
|
359
|
-
|
|
360
|
-
allowed_values = None
|
|
361
|
-
if 'enum' in cfg:
|
|
362
|
-
allowed_values = cfg['enum']
|
|
363
|
-
|
|
364
|
-
value = Schema.__check_and_normalize(value, cfg['type'], field, keypath, allowed_values)
|
|
365
|
-
|
|
366
|
-
if journal_callback:
|
|
367
|
-
journal_callback("set", keypath,
|
|
368
|
-
value=value,
|
|
369
|
-
field=field,
|
|
370
|
-
step=step, index=index)
|
|
371
|
-
|
|
372
|
-
if field in Schema.PERNODE_FIELDS:
|
|
373
|
-
step = step if step is not None else Schema.GLOBAL_KEY
|
|
374
|
-
index = index if index is not None else Schema.GLOBAL_KEY
|
|
375
|
-
|
|
376
|
-
if step not in cfg['node']:
|
|
377
|
-
cfg['node'][step] = {}
|
|
378
|
-
if index not in cfg['node'][step]:
|
|
379
|
-
cfg['node'][step][index] = copy.deepcopy(cfg['node']['default']['default'])
|
|
380
|
-
cfg['node'][step][index][field] = value
|
|
381
|
-
else:
|
|
382
|
-
cfg[field] = value
|
|
383
|
-
|
|
384
|
-
return True
|
|
385
|
-
|
|
386
|
-
###########################################################################
|
|
387
|
-
def add(self, *args, field='value', step=None, index=None):
|
|
388
|
-
'''
|
|
389
|
-
Adds item(s) to a schema parameter list.
|
|
390
|
-
|
|
391
|
-
See :meth:`~siliconcompiler.core.Chip.add` for detailed documentation.
|
|
392
|
-
'''
|
|
393
|
-
keypath = args[:-1]
|
|
394
|
-
|
|
395
|
-
cfg = self.__search(*keypath, insert_defaults=True)
|
|
396
|
-
|
|
397
|
-
return self._add(*args, cfg=cfg, field=field, step=step, index=index)
|
|
398
|
-
|
|
399
|
-
###########################################################################
|
|
400
|
-
def _add(self, *args, cfg=None, field='value', step=None, index=None, package=None):
|
|
401
|
-
'''
|
|
402
|
-
Adds item(s) to a schema parameter list.
|
|
403
|
-
|
|
404
|
-
See :meth:`~siliconcompiler.core.Chip.add` for detailed documentation.
|
|
405
|
-
'''
|
|
406
|
-
keypath = args[:-1]
|
|
407
|
-
value = args[-1]
|
|
408
|
-
|
|
409
|
-
if not Schema._is_leaf(cfg):
|
|
410
|
-
raise ValueError(f'Invalid keypath {keypath}: add() '
|
|
411
|
-
'must be called on a complete keypath')
|
|
412
|
-
|
|
413
|
-
err = Schema.__validate_step_index(cfg['pernode'], field, step, index)
|
|
414
|
-
if err:
|
|
415
|
-
raise ValueError(f'Invalid args to add() of keypath {keypath}: {err}')
|
|
416
|
-
|
|
417
|
-
if isinstance(index, int):
|
|
418
|
-
index = str(index)
|
|
419
|
-
|
|
420
|
-
if not Schema.__is_list(field, cfg['type']):
|
|
421
|
-
if field == 'value':
|
|
422
|
-
raise ValueError(f'Invalid keypath {keypath}: add() must be called on a list')
|
|
423
|
-
else:
|
|
424
|
-
raise ValueError(f'Invalid field {field}: add() must be called on a list')
|
|
425
|
-
|
|
426
|
-
if cfg['lock']:
|
|
427
|
-
self.logger.debug(f'Failed to set value for {keypath}: parameter is locked')
|
|
428
|
-
return False
|
|
429
|
-
|
|
430
|
-
allowed_values = None
|
|
431
|
-
if 'enum' in cfg:
|
|
432
|
-
allowed_values = cfg['enum']
|
|
433
|
-
|
|
434
|
-
value = Schema.__check_and_normalize(value, cfg['type'], field, keypath, allowed_values)
|
|
435
|
-
self.__record_journal("add", keypath, value=value, field=field, step=step, index=index)
|
|
436
|
-
if field in self.PERNODE_FIELDS:
|
|
437
|
-
modified_step = step if step is not None else self.GLOBAL_KEY
|
|
438
|
-
modified_index = index if index is not None else self.GLOBAL_KEY
|
|
439
|
-
|
|
440
|
-
if modified_step not in cfg['node']:
|
|
441
|
-
cfg['node'][modified_step] = {}
|
|
442
|
-
if modified_index not in cfg['node'][modified_step]:
|
|
443
|
-
cfg['node'][modified_step][modified_index] = copy.deepcopy(
|
|
444
|
-
cfg['node']['default']['default'])
|
|
445
|
-
cfg['node'][modified_step][modified_index][field].extend(value)
|
|
446
|
-
else:
|
|
447
|
-
cfg[field].extend(value)
|
|
448
|
-
|
|
449
|
-
return True
|
|
450
|
-
|
|
451
|
-
###########################################################################
|
|
452
|
-
def change_type(self, *key, type=None):
|
|
453
|
-
'''
|
|
454
|
-
Change the type of a key
|
|
455
|
-
|
|
456
|
-
Args:
|
|
457
|
-
key (list): Key to change.
|
|
458
|
-
type (str): New data type for this key
|
|
459
|
-
|
|
460
|
-
Examples:
|
|
461
|
-
>>> chip.set('option', 'var', 'run_test', 'true')
|
|
462
|
-
>>> chip.schema.change_type('option', 'var', 'run_test', 'bool')
|
|
463
|
-
Changes the type of ['option', 'var', 'run_test'] to a boolean.
|
|
464
|
-
'''
|
|
465
|
-
|
|
466
|
-
if not type:
|
|
467
|
-
raise ValueError('Type cannot be empty')
|
|
468
|
-
|
|
469
|
-
if 'file' in type or 'dir' in type:
|
|
470
|
-
raise ValueError(f'Cannot convert to {type}')
|
|
471
|
-
|
|
472
|
-
cfg = self.__search(*key, insert_defaults=True)
|
|
473
|
-
if not Schema._is_leaf(cfg):
|
|
474
|
-
raise ValueError(f'Invalid keypath {key}: change_type() '
|
|
475
|
-
'must be called on a complete keypath')
|
|
476
|
-
|
|
477
|
-
old_type = self.get(*key, field='type')
|
|
478
|
-
if 'file' in old_type or 'dir' in old_type:
|
|
479
|
-
raise ValueError(f'Cannot convert from {old_type}')
|
|
480
|
-
|
|
481
|
-
old_type_is_list = '[' in old_type
|
|
482
|
-
new_type_is_list = '[' in type
|
|
483
|
-
|
|
484
|
-
if 'file' in old_type or 'dir' in old_type:
|
|
485
|
-
raise ValueError(f'Cannot convert from {type}')
|
|
486
|
-
|
|
487
|
-
new_values = []
|
|
488
|
-
for values, step, index in [*self._getvals(*key),
|
|
489
|
-
(self.get_default(*key), 'default', 'default')]:
|
|
490
|
-
if old_type_is_list and not new_type_is_list:
|
|
491
|
-
# Old type is list, but new type in not a list
|
|
492
|
-
# Can only convert if list has 1 or 0 elements
|
|
493
|
-
if len(values) > 1:
|
|
494
|
-
raise ValueError(f'Too many values in {",".join(key)} to convert a '
|
|
495
|
-
'list of a scalar.')
|
|
496
|
-
if len(values) == 1:
|
|
497
|
-
values = values[0]
|
|
498
|
-
else:
|
|
499
|
-
values = None
|
|
500
|
-
|
|
501
|
-
if new_type_is_list and values is None:
|
|
502
|
-
values = []
|
|
503
|
-
|
|
504
|
-
new_values.append((step, index, values))
|
|
505
|
-
|
|
506
|
-
self.set(*key, type, field='type')
|
|
507
|
-
for step, index, values in new_values:
|
|
508
|
-
if step == 'default' and index == 'default':
|
|
509
|
-
self.set_default(*key, values)
|
|
510
|
-
else:
|
|
511
|
-
self.set(*key, values, step=step, index=index)
|
|
512
|
-
|
|
513
|
-
###########################################################################
|
|
514
|
-
def copy_key(self, src, dst):
|
|
515
|
-
'''
|
|
516
|
-
Copy a parameters information from the source keypath to the destination
|
|
517
|
-
keypath.
|
|
518
|
-
|
|
519
|
-
Args:
|
|
520
|
-
src (list): Key to use as source.
|
|
521
|
-
dst (list): Key to use as destination
|
|
522
|
-
'''
|
|
523
|
-
|
|
524
|
-
data = self.getdict(*src)
|
|
525
|
-
|
|
526
|
-
cfg = self.__search(*dst[0:-1], insert_defaults=True)
|
|
527
|
-
cfg[dst[-1]] = data
|
|
528
|
-
|
|
529
|
-
###########################################################################
|
|
530
|
-
def remove(self, *keypath):
|
|
531
|
-
'''
|
|
532
|
-
Remove a keypath
|
|
533
|
-
|
|
534
|
-
See :meth:`~siliconcompiler.core.Chip.remove` for detailed documentation.
|
|
535
|
-
'''
|
|
536
|
-
search_path = keypath[0:-1]
|
|
537
|
-
removal_key = keypath[-1]
|
|
538
|
-
|
|
539
|
-
if removal_key == 'default':
|
|
540
|
-
self.logger.error(f'Cannot remove default keypath: {keypath}')
|
|
541
|
-
return
|
|
542
|
-
|
|
543
|
-
cfg = self.__search(*search_path)
|
|
544
|
-
if 'default' not in cfg:
|
|
545
|
-
self.logger.error(f'Cannot remove a non-default keypath: {keypath}')
|
|
546
|
-
return
|
|
547
|
-
|
|
548
|
-
if removal_key not in cfg:
|
|
549
|
-
self.logger.error(f'Key does not exist: {keypath}')
|
|
550
|
-
return
|
|
551
|
-
|
|
552
|
-
for key in self.allkeys(*keypath):
|
|
553
|
-
fullpath = [*keypath, *key]
|
|
554
|
-
if self.get(*fullpath, field='lock'):
|
|
555
|
-
self.logger.error(f'Key is locked: {fullpath}')
|
|
556
|
-
return
|
|
557
|
-
|
|
558
|
-
del cfg[removal_key]
|
|
559
|
-
self.__record_journal("remove", keypath)
|
|
560
|
-
|
|
561
|
-
###########################################################################
|
|
562
|
-
def unset(self, *keypath, step=None, index=None):
|
|
563
|
-
'''
|
|
564
|
-
Unsets a schema parameter field.
|
|
565
|
-
|
|
566
|
-
See :meth:`~siliconcompiler.core.Chip.unset` for detailed documentation.
|
|
567
|
-
'''
|
|
568
|
-
cfg = self.__search(*keypath)
|
|
569
|
-
|
|
570
|
-
if not Schema._is_leaf(cfg):
|
|
571
|
-
raise ValueError(f'Invalid keypath {keypath}: unset() '
|
|
572
|
-
'must be called on a complete keypath')
|
|
573
|
-
|
|
574
|
-
err = Schema.__validate_step_index(cfg['pernode'], 'value', step, index)
|
|
575
|
-
if err:
|
|
576
|
-
raise ValueError(f'Invalid args to unset() of keypath {keypath}: {err}')
|
|
577
|
-
|
|
578
|
-
if isinstance(index, int):
|
|
579
|
-
index = str(index)
|
|
580
|
-
|
|
581
|
-
if cfg['lock']:
|
|
582
|
-
self.logger.debug(f'Failed to set value for {keypath}: parameter is locked')
|
|
583
|
-
return False
|
|
584
|
-
|
|
585
|
-
if step is None:
|
|
586
|
-
step = Schema.GLOBAL_KEY
|
|
587
|
-
if index is None:
|
|
588
|
-
index = Schema.GLOBAL_KEY
|
|
589
|
-
|
|
590
|
-
try:
|
|
591
|
-
del cfg['node'][step][index]
|
|
592
|
-
self.__record_journal("unset", keypath, step=step, index=index)
|
|
593
|
-
except KeyError:
|
|
594
|
-
# If this key doesn't exist, silently continue - it was never set
|
|
595
|
-
pass
|
|
596
|
-
|
|
597
|
-
return True
|
|
598
|
-
|
|
599
|
-
def _getvals(self, *keypath, return_defvalue=True):
|
|
600
|
-
"""
|
|
601
|
-
Returns all values (global and pernode) associated with a particular parameter.
|
|
602
|
-
|
|
603
|
-
Returns a list of tuples of the form (value, step, index). The list is
|
|
604
|
-
in no particular order. For the global value, step and index are None.
|
|
605
|
-
If return_defvalue is True, the default parameter value is added to the
|
|
606
|
-
list in place of a global value if a global value is not set.
|
|
607
|
-
"""
|
|
608
|
-
cfg = self.__search(*keypath)
|
|
609
|
-
|
|
610
|
-
if not Schema._is_leaf(cfg):
|
|
611
|
-
raise ValueError(f'Invalid keypath {keypath}: _getvals() '
|
|
612
|
-
'must be called on a complete keypath')
|
|
613
|
-
|
|
614
|
-
vals = []
|
|
615
|
-
has_global = False
|
|
616
|
-
for step in cfg['node']:
|
|
617
|
-
if step == 'default':
|
|
618
|
-
continue
|
|
619
|
-
|
|
620
|
-
for index in cfg['node'][step]:
|
|
621
|
-
step_arg = None if step == self.GLOBAL_KEY else step
|
|
622
|
-
index_arg = None if index == self.GLOBAL_KEY else index
|
|
623
|
-
if 'value' in cfg['node'][step][index]:
|
|
624
|
-
if step_arg is None and index_arg is None:
|
|
625
|
-
has_global = True
|
|
626
|
-
vals.append((cfg['node'][step][index]['value'], step_arg, index_arg))
|
|
627
|
-
|
|
628
|
-
if (PerNode(cfg['pernode']) != PerNode.REQUIRED) and not has_global and return_defvalue:
|
|
629
|
-
vals.append((cfg['node']['default']['default']['value'], None, None))
|
|
630
|
-
|
|
631
|
-
return vals
|
|
632
|
-
|
|
633
|
-
###########################################################################
|
|
634
|
-
def getkeys(self, *keypath, job=None):
|
|
635
|
-
"""
|
|
636
|
-
Returns a list of schema dictionary keys.
|
|
637
|
-
|
|
638
|
-
See :meth:`~siliconcompiler.core.Chip.getkeys` for detailed documentation.
|
|
639
|
-
"""
|
|
640
|
-
cfg = self.__search(*keypath, job=job, use_default=False)
|
|
641
|
-
keys = list(cfg.keys())
|
|
642
|
-
|
|
643
|
-
if 'default' in keys:
|
|
644
|
-
keys.remove('default')
|
|
645
|
-
|
|
646
|
-
return keys
|
|
647
|
-
|
|
648
|
-
###########################################################################
|
|
649
|
-
def getdict(self, *keypath):
|
|
650
|
-
"""
|
|
651
|
-
Returns a schema dictionary.
|
|
652
|
-
|
|
653
|
-
See :meth:`~siliconcompiler.core.Chip.getdict` for detailed
|
|
654
|
-
documentation.
|
|
655
|
-
"""
|
|
656
|
-
cfg = self.__search(*keypath)
|
|
657
|
-
return copy.deepcopy(cfg)
|
|
658
|
-
|
|
659
|
-
###########################################################################
|
|
660
|
-
def valid(self, *args, default_valid=False, job=None, check_complete=False):
|
|
661
|
-
"""
|
|
662
|
-
Checks validity of a keypath.
|
|
663
|
-
|
|
664
|
-
See :meth:`~siliconcompiler.core.Chip.valid` for detailed
|
|
665
|
-
documentation.
|
|
666
|
-
"""
|
|
667
|
-
keylist = list(args)
|
|
668
|
-
if default_valid:
|
|
669
|
-
default = 'default'
|
|
670
|
-
else:
|
|
671
|
-
default = None
|
|
672
|
-
|
|
673
|
-
if job is not None:
|
|
674
|
-
cfg = self.cfg['history'][job]
|
|
675
|
-
else:
|
|
676
|
-
cfg = self.cfg
|
|
677
|
-
|
|
678
|
-
for key in keylist:
|
|
679
|
-
if key in cfg:
|
|
680
|
-
cfg = cfg[key]
|
|
681
|
-
elif default_valid and default in cfg:
|
|
682
|
-
cfg = cfg[default]
|
|
683
|
-
else:
|
|
684
|
-
return False
|
|
685
|
-
if check_complete:
|
|
686
|
-
return Schema._is_leaf(cfg)
|
|
687
|
-
return True
|
|
688
|
-
|
|
689
|
-
##########################################################################
|
|
690
|
-
def has_field(self, *args):
|
|
691
|
-
keypath = args[:-1]
|
|
692
|
-
field = args[-1]
|
|
693
|
-
|
|
694
|
-
cfg = self.__search(*keypath)
|
|
695
|
-
return field in cfg
|
|
696
|
-
|
|
697
|
-
##########################################################################
|
|
698
|
-
def record_history(self):
|
|
699
|
-
'''
|
|
700
|
-
Copies all non-empty parameters from current job into the history
|
|
701
|
-
dictionary.
|
|
702
|
-
'''
|
|
703
|
-
|
|
704
|
-
# initialize new dict
|
|
705
|
-
jobname = self.get('option', 'jobname')
|
|
706
|
-
self.cfg['history'][jobname] = {}
|
|
707
|
-
|
|
708
|
-
# copy in all empty values of scope job
|
|
709
|
-
allkeys = self.allkeys()
|
|
710
|
-
for key in allkeys:
|
|
711
|
-
# ignore history in case of cumulative history
|
|
712
|
-
if key[0] != 'history':
|
|
713
|
-
scope = self.get(*key, field='scope')
|
|
714
|
-
if not self.is_empty(*key) and (scope == Scope.JOB):
|
|
715
|
-
self.__copyparam(self.cfg,
|
|
716
|
-
self.cfg['history'][jobname],
|
|
717
|
-
key)
|
|
718
|
-
|
|
719
|
-
@staticmethod
|
|
720
|
-
def __check_and_normalize(value, sc_type, field, keypath, allowed_values):
|
|
721
|
-
'''
|
|
722
|
-
This method validates that user-provided values match the expected type,
|
|
723
|
-
and returns a normalized version of the value.
|
|
724
|
-
|
|
725
|
-
The expected type is based on the schema parameter type string for
|
|
726
|
-
value-related fields, and is based on the field itself for other fields.
|
|
727
|
-
This function raises a TypeError if an illegal value is provided.
|
|
728
|
-
|
|
729
|
-
The normalization process provides some leeway in how users supply
|
|
730
|
-
values, while ensuring that values are stored consistently in the schema.
|
|
731
|
-
|
|
732
|
-
The normalization rules are as follows:
|
|
733
|
-
- If a scalar is provided for a list type, it is promoted to a list of
|
|
734
|
-
one element.
|
|
735
|
-
- If a list is provided for a tuple type, it is cast to a tuple (since
|
|
736
|
-
the JSON module serializes tuples as arrays, which are deserialized into
|
|
737
|
-
lists).
|
|
738
|
-
- Elements inside lists and tuples are normalized recursively.
|
|
739
|
-
- All non-list values have a string representation that gets cast to a
|
|
740
|
-
native Python type (since we receive strings from the CLI):
|
|
741
|
-
- bool: accepts "true" or "false"
|
|
742
|
-
- ints and floats: cast as if by int() or float()
|
|
743
|
-
- tuples: accepts comma-separated values surrounded by parens
|
|
744
|
-
'''
|
|
745
|
-
|
|
746
|
-
if value is None and not Schema.__is_list(field, sc_type):
|
|
747
|
-
# None is legal for all scalars, but not within collection types
|
|
748
|
-
# TODO: could consider normalizing "None" for lists to empty list?
|
|
749
|
-
return value
|
|
750
|
-
|
|
751
|
-
if field == 'value':
|
|
752
|
-
# Push down error_msg from the top since arguments get modified in recursive call
|
|
753
|
-
error_msg = f'Invalid value {value} for keypath {keypath}: expected type {sc_type}'
|
|
754
|
-
return Schema._normalize_value(value, sc_type, error_msg, allowed_values)
|
|
755
|
-
else:
|
|
756
|
-
return Schema.__normalize_field(value, sc_type, field, keypath)
|
|
757
|
-
|
|
758
|
-
@staticmethod
|
|
759
|
-
def _normalize_value(value, sc_type, error_msg, allowed_values):
|
|
760
|
-
if sc_type.startswith('['):
|
|
761
|
-
base_type = sc_type[1:-1]
|
|
762
|
-
|
|
763
|
-
# Need to try 2 different recursion strategies - if value is a list already, then we can
|
|
764
|
-
# recurse on it directly. However, if that doesn't work, then it might be a
|
|
765
|
-
# list-of-lists/tuples that needs to be wrapped in an outer list, so we try that.
|
|
766
|
-
if isinstance(value, (list, set, tuple)):
|
|
767
|
-
try:
|
|
768
|
-
return [Schema._normalize_value(v, base_type, error_msg, allowed_values)
|
|
769
|
-
for v in value]
|
|
770
|
-
except TypeError:
|
|
771
|
-
pass
|
|
772
|
-
|
|
773
|
-
value = [value]
|
|
774
|
-
return [Schema._normalize_value(v, base_type, error_msg, allowed_values) for v in value]
|
|
775
|
-
|
|
776
|
-
if sc_type.startswith('('):
|
|
777
|
-
# TODO: make parsing more robust to support tuples-of-tuples
|
|
778
|
-
if isinstance(value, str):
|
|
779
|
-
value = value[1:-1].split(',')
|
|
780
|
-
elif not (isinstance(value, tuple) or isinstance(value, list)):
|
|
781
|
-
raise TypeError(error_msg)
|
|
782
|
-
|
|
783
|
-
base_types = sc_type[1:-1].split(',')
|
|
784
|
-
if len(value) != len(base_types):
|
|
785
|
-
raise TypeError(error_msg)
|
|
786
|
-
return tuple(Schema._normalize_value(v, base_type, error_msg, allowed_values)
|
|
787
|
-
for v, base_type in zip(value, base_types))
|
|
788
|
-
|
|
789
|
-
if sc_type == 'bool':
|
|
790
|
-
if value == 'true':
|
|
791
|
-
return True
|
|
792
|
-
if value == 'false':
|
|
793
|
-
return False
|
|
794
|
-
if isinstance(value, bool):
|
|
795
|
-
return value
|
|
796
|
-
if isinstance(value, (int, float)):
|
|
797
|
-
return value != 0
|
|
798
|
-
raise TypeError(error_msg)
|
|
799
|
-
|
|
800
|
-
try:
|
|
801
|
-
if sc_type == 'int':
|
|
802
|
-
return int(value)
|
|
803
|
-
|
|
804
|
-
if sc_type == 'float':
|
|
805
|
-
return float(value)
|
|
806
|
-
except TypeError:
|
|
807
|
-
raise TypeError(error_msg) from None
|
|
808
|
-
|
|
809
|
-
if sc_type == 'str':
|
|
810
|
-
if isinstance(value, str):
|
|
811
|
-
return value
|
|
812
|
-
elif isinstance(value, bool):
|
|
813
|
-
return str(value).lower()
|
|
814
|
-
elif isinstance(value, (list, tuple)):
|
|
815
|
-
raise TypeError(error_msg)
|
|
816
|
-
else:
|
|
817
|
-
return str(value)
|
|
818
|
-
|
|
819
|
-
if sc_type in ('file', 'dir'):
|
|
820
|
-
if isinstance(value, (str, pathlib.Path)):
|
|
821
|
-
return str(value)
|
|
822
|
-
else:
|
|
823
|
-
raise TypeError(error_msg)
|
|
824
|
-
|
|
825
|
-
if sc_type == 'enum':
|
|
826
|
-
if isinstance(value, str):
|
|
827
|
-
if value in allowed_values:
|
|
828
|
-
return value
|
|
829
|
-
valid = ", ".join(allowed_values)
|
|
830
|
-
raise ValueError(error_msg + f", and value of {valid}")
|
|
831
|
-
else:
|
|
832
|
-
raise TypeError(error_msg)
|
|
833
|
-
|
|
834
|
-
raise ValueError(f'Invalid type specifier: {sc_type}')
|
|
835
|
-
|
|
836
|
-
@staticmethod
|
|
837
|
-
def __normalize_field(value, sc_type, field, keypath):
|
|
838
|
-
def error_msg(t):
|
|
839
|
-
return f'Invalid value {value} for field {field} of keypath {keypath}: expected {t}'
|
|
840
|
-
|
|
841
|
-
if field in ('author', 'date') and ('file' not in sc_type):
|
|
842
|
-
raise TypeError(f'Invalid field {field} for keypath {keypath}: '
|
|
843
|
-
'this field only exists for file parameters')
|
|
844
|
-
|
|
845
|
-
if field in ('copy', 'filehash', 'package', 'hashalgo') and \
|
|
846
|
-
('file' not in sc_type and 'dir' not in sc_type):
|
|
847
|
-
raise TypeError(f'Invalid field {field} for keypath {keypath}: '
|
|
848
|
-
'this field only exists for file and dir parameters')
|
|
849
|
-
|
|
850
|
-
is_list = Schema.__is_list(field, sc_type)
|
|
851
|
-
if field == 'package' and is_list:
|
|
852
|
-
if not isinstance(value, list):
|
|
853
|
-
value = [value]
|
|
854
|
-
if not all((v is None or isinstance(v, (str, pathlib.Path))) for v in value):
|
|
855
|
-
raise TypeError(error_msg('None, str or pathlib.Path'))
|
|
856
|
-
return value
|
|
857
|
-
|
|
858
|
-
if is_list:
|
|
859
|
-
if not value:
|
|
860
|
-
# Replace none with an empty list
|
|
861
|
-
value = []
|
|
862
|
-
|
|
863
|
-
if not isinstance(value, list):
|
|
864
|
-
value = [value]
|
|
865
|
-
|
|
866
|
-
if not all(isinstance(v, str) for v in value):
|
|
867
|
-
raise TypeError(error_msg('str'))
|
|
868
|
-
return value
|
|
869
|
-
|
|
870
|
-
if field == 'scope':
|
|
871
|
-
# Restricted allowed values
|
|
872
|
-
if isinstance(value, Scope):
|
|
873
|
-
return value.value
|
|
874
|
-
scope_values = [val.value for val in Scope]
|
|
875
|
-
if not (isinstance(value, str) and value in scope_values):
|
|
876
|
-
raise TypeError(error_msg(f'one of {", ".join(sorted(scope_values))}'))
|
|
877
|
-
return value
|
|
878
|
-
|
|
879
|
-
if field == 'pernode':
|
|
880
|
-
# Restricted allowed values
|
|
881
|
-
if isinstance(value, PerNode):
|
|
882
|
-
return value.value
|
|
883
|
-
pernode_values = [val.value for val in PerNode]
|
|
884
|
-
if not (isinstance(value, str) and value in pernode_values):
|
|
885
|
-
raise TypeError(f'Invalid value {value} for field {field}: '
|
|
886
|
-
f'expected one of {", ".join(sorted(pernode_values))}')
|
|
887
|
-
return value
|
|
888
|
-
|
|
889
|
-
if field in (
|
|
890
|
-
'type', 'switch', 'shorthelp', 'help', 'unit', 'hashalgo', 'notes',
|
|
891
|
-
'signature'
|
|
892
|
-
):
|
|
893
|
-
if not isinstance(value, str):
|
|
894
|
-
raise TypeError(error_msg('str'))
|
|
895
|
-
return value
|
|
896
|
-
|
|
897
|
-
if field in ('lock', 'copy', 'require'):
|
|
898
|
-
if value == 'true':
|
|
899
|
-
return True
|
|
900
|
-
if value == 'false':
|
|
901
|
-
return False
|
|
902
|
-
if isinstance(value, bool):
|
|
903
|
-
return value
|
|
904
|
-
else:
|
|
905
|
-
raise TypeError(error_msg('bool'))
|
|
906
|
-
|
|
907
|
-
if field in ('node',):
|
|
908
|
-
if isinstance(value, dict):
|
|
909
|
-
return value
|
|
910
|
-
else:
|
|
911
|
-
raise TypeError(f'Invalid value {value} for field {field}: expected dict')
|
|
912
|
-
|
|
913
|
-
raise ValueError(f'Invalid field {field} for keypath {keypath}')
|
|
914
|
-
|
|
915
|
-
@staticmethod
|
|
916
|
-
def __is_set(cfg, step=None, index=None):
|
|
917
|
-
'''Returns whether a user has set a value for this parameter.
|
|
918
|
-
|
|
919
|
-
A value counts as set if a user has set a global value OR a value for
|
|
920
|
-
the provided step/index.
|
|
921
|
-
'''
|
|
922
|
-
if Schema.GLOBAL_KEY in cfg['node'] and \
|
|
923
|
-
Schema.GLOBAL_KEY in cfg['node'][Schema.GLOBAL_KEY] and \
|
|
924
|
-
'value' in cfg['node'][Schema.GLOBAL_KEY][Schema.GLOBAL_KEY]:
|
|
925
|
-
# global value is set
|
|
926
|
-
return True
|
|
927
|
-
|
|
928
|
-
if step is None:
|
|
929
|
-
return False
|
|
930
|
-
if index is None:
|
|
931
|
-
index = Schema.GLOBAL_KEY
|
|
932
|
-
|
|
933
|
-
return step in cfg['node'] and \
|
|
934
|
-
index in cfg['node'][step] and \
|
|
935
|
-
'value' in cfg['node'][step][index]
|
|
936
|
-
|
|
937
|
-
@staticmethod
|
|
938
|
-
def _is_leaf(cfg):
|
|
939
|
-
# 'shorthelp' chosen arbitrarily: any mandatory field with a consistent
|
|
940
|
-
# type would work.
|
|
941
|
-
return 'shorthelp' in cfg and isinstance(cfg['shorthelp'], str)
|
|
942
|
-
|
|
943
|
-
@staticmethod
|
|
944
|
-
def __is_list(field, type):
|
|
945
|
-
if field in ('filehash', 'date', 'author', 'example', 'enum', 'switch', 'package'):
|
|
946
|
-
return True
|
|
947
|
-
|
|
948
|
-
is_list = type.startswith('[')
|
|
949
|
-
if is_list and field in ('signature', 'value'):
|
|
950
|
-
return True
|
|
951
|
-
|
|
952
|
-
return False
|
|
953
|
-
|
|
954
|
-
@staticmethod
|
|
955
|
-
def __validate_step_index(pernode, field, step, index):
|
|
956
|
-
'''Shared validation logic for the step and index keyword arguments to
|
|
957
|
-
get(), set(), and add(), based on the pernode setting of a parameter and
|
|
958
|
-
field.
|
|
959
|
-
|
|
960
|
-
Returns an error message if there's a problem with the arguments,
|
|
961
|
-
otherwise None.
|
|
962
|
-
'''
|
|
963
|
-
if field not in Schema.PERNODE_FIELDS:
|
|
964
|
-
if step is not None or index is not None:
|
|
965
|
-
return 'step and index are only valid for value fields'
|
|
966
|
-
return None
|
|
967
|
-
|
|
968
|
-
if PerNode(pernode) == PerNode.NEVER and (step is not None or index is not None):
|
|
969
|
-
return 'step and index are not valid for this parameter'
|
|
970
|
-
|
|
971
|
-
if PerNode(pernode) == PerNode.REQUIRED and (step is None or index is None):
|
|
972
|
-
return 'step and index are required for this parameter'
|
|
973
|
-
|
|
974
|
-
if step is None and index is not None:
|
|
975
|
-
return 'if index is provided, step must be provided as well'
|
|
976
|
-
|
|
977
|
-
# Step and index for default should be accessed set_/get_default
|
|
978
|
-
if step == 'default':
|
|
979
|
-
return f'illegal step name: {step} is reserved'
|
|
980
|
-
|
|
981
|
-
if index == 'default':
|
|
982
|
-
return f'illegal index name: {step} is reserved'
|
|
983
|
-
|
|
984
|
-
return None
|
|
985
|
-
|
|
986
|
-
def __search(self, *keypath, insert_defaults=False, use_default=True, job=None):
|
|
987
|
-
if job is not None:
|
|
988
|
-
cfg = self.cfg['history'][job]
|
|
989
|
-
else:
|
|
990
|
-
cfg = self.cfg
|
|
991
|
-
|
|
992
|
-
for key in keypath:
|
|
993
|
-
if not isinstance(key, str):
|
|
994
|
-
raise TypeError(f'Invalid keypath {keypath}: key is not a string: {key}')
|
|
995
|
-
|
|
996
|
-
if Schema._is_leaf(cfg):
|
|
997
|
-
raise ValueError(f'Invalid keypath {keypath}: unexpected key: {key}')
|
|
998
|
-
|
|
999
|
-
if key in cfg:
|
|
1000
|
-
cfg = cfg[key]
|
|
1001
|
-
elif 'default' in cfg:
|
|
1002
|
-
cfg_default = cfg['default']
|
|
1003
|
-
if insert_defaults:
|
|
1004
|
-
if Schema._is_leaf(cfg_default) and cfg_default['lock']:
|
|
1005
|
-
raise ValueError(f'{keypath} is locked and key cannot be added')
|
|
1006
|
-
|
|
1007
|
-
cfg[key] = copy.deepcopy(cfg_default)
|
|
1008
|
-
cfg = cfg[key]
|
|
1009
|
-
elif use_default:
|
|
1010
|
-
cfg = cfg_default
|
|
1011
|
-
else:
|
|
1012
|
-
raise ValueError(f'Invalid keypath {keypath}: unexpected key: {key}')
|
|
1013
|
-
else:
|
|
1014
|
-
raise ValueError(f'Invalid keypath {keypath}: unexpected key: {key}')
|
|
1015
|
-
|
|
1016
|
-
return cfg
|
|
1017
|
-
|
|
1018
|
-
###########################################################################
|
|
1019
|
-
def allkeys(self, *keypath_prefix):
|
|
1020
|
-
'''
|
|
1021
|
-
Returns all keypaths in the schema as a list of lists.
|
|
1022
|
-
|
|
1023
|
-
See :meth:`~siliconcompiler.core.Chip.allkeys` for detailed documentation.
|
|
1024
|
-
'''
|
|
1025
|
-
if len(keypath_prefix) > 0:
|
|
1026
|
-
return self.__allkeys(cfg=self.getdict(*keypath_prefix))
|
|
1027
|
-
else:
|
|
1028
|
-
return self.__allkeys()
|
|
1029
|
-
|
|
1030
|
-
###########################################################################
|
|
1031
|
-
def __allkeys(self, cfg=None, base_key=None):
|
|
1032
|
-
if cfg is None:
|
|
1033
|
-
cfg = self.cfg
|
|
1034
|
-
|
|
1035
|
-
if Schema._is_leaf(cfg):
|
|
1036
|
-
return []
|
|
1037
|
-
|
|
1038
|
-
keylist = []
|
|
1039
|
-
if base_key is None:
|
|
1040
|
-
base_key = []
|
|
1041
|
-
for k in cfg:
|
|
1042
|
-
key = (*base_key, k)
|
|
1043
|
-
if Schema._is_leaf(cfg[k]):
|
|
1044
|
-
keylist.append(key)
|
|
1045
|
-
else:
|
|
1046
|
-
keylist.extend(self.__allkeys(cfg=cfg[k], base_key=key))
|
|
1047
|
-
return keylist
|
|
1048
|
-
|
|
1049
|
-
###########################################################################
|
|
1050
|
-
def __copyparam(self, cfgsrc, cfgdst, keypath):
|
|
1051
|
-
'''
|
|
1052
|
-
Copies a parameter into the manifest history dictionary.
|
|
1053
|
-
'''
|
|
1054
|
-
|
|
1055
|
-
# 1. descend keypath, pop each key as its used
|
|
1056
|
-
# 2. create key if missing in destination dict
|
|
1057
|
-
# 3. populate leaf cell when keypath empty
|
|
1058
|
-
if keypath:
|
|
1059
|
-
keypath = list(keypath)
|
|
1060
|
-
key = keypath[0]
|
|
1061
|
-
keypath.pop(0)
|
|
1062
|
-
if key not in cfgdst.keys():
|
|
1063
|
-
cfgdst[key] = {}
|
|
1064
|
-
self.__copyparam(cfgsrc[key], cfgdst[key], keypath)
|
|
1065
|
-
else:
|
|
1066
|
-
for key in cfgsrc.keys():
|
|
1067
|
-
if key not in ('example', 'switch', 'help'):
|
|
1068
|
-
cfgdst[key] = copy.deepcopy(cfgsrc[key])
|
|
1069
|
-
|
|
1070
|
-
###########################################################################
|
|
1071
|
-
def write_json(self, fout):
|
|
1072
|
-
localcfg = {**self.cfg}
|
|
1073
|
-
if self.__journal is not None:
|
|
1074
|
-
localcfg['__journal__'] = self.__journal
|
|
1075
|
-
if _has_orjson:
|
|
1076
|
-
manifest_str = json.dumps(localcfg, option=json.OPT_INDENT_2).decode()
|
|
1077
|
-
else:
|
|
1078
|
-
manifest_str = json.dumps(localcfg, indent=2)
|
|
1079
|
-
fout.write(manifest_str)
|
|
1080
|
-
|
|
1081
|
-
###########################################################################
|
|
1082
|
-
def write_yaml(self, fout):
|
|
1083
|
-
if not _has_yaml:
|
|
1084
|
-
raise ImportError('yaml package required to write YAML manifest')
|
|
1085
|
-
fout.write(yaml.dump(self.cfg, Dumper=YamlIndentDumper, default_flow_style=False))
|
|
1086
|
-
|
|
1087
|
-
###########################################################################
|
|
1088
|
-
def write_tcl(self, fout, prefix="", step=None, index=None, template=None):
|
|
1089
|
-
'''
|
|
1090
|
-
Prints out schema as TCL dictionary
|
|
1091
|
-
'''
|
|
1092
|
-
|
|
1093
|
-
tcl_set_cmds = []
|
|
1094
|
-
for key in self.allkeys():
|
|
1095
|
-
# print out all non default values
|
|
1096
|
-
if 'default' in key:
|
|
1097
|
-
continue
|
|
1098
|
-
|
|
1099
|
-
typestr = self.get(*key, field='type')
|
|
1100
|
-
pernode = self.get(*key, field='pernode')
|
|
1101
|
-
|
|
1102
|
-
if PerNode(pernode) == PerNode.REQUIRED and (step is None or index is None):
|
|
1103
|
-
# Skip mandatory per-node parameters if step and index are not specified
|
|
1104
|
-
# TODO: how should we dump these?
|
|
1105
|
-
continue
|
|
1106
|
-
|
|
1107
|
-
if not pernode.is_never():
|
|
1108
|
-
value = self.get(*key, step=step, index=index)
|
|
1109
|
-
else:
|
|
1110
|
-
value = self.get(*key)
|
|
1111
|
-
|
|
1112
|
-
# create a TCL dict
|
|
1113
|
-
keystr = ' '.join([escape_val_tcl(keypart, 'str') for keypart in key])
|
|
1114
|
-
|
|
1115
|
-
valstr = escape_val_tcl(value, typestr)
|
|
1116
|
-
|
|
1117
|
-
# Ensure empty values get something
|
|
1118
|
-
if valstr == '':
|
|
1119
|
-
valstr = '{}'
|
|
1120
|
-
|
|
1121
|
-
tcl_set_cmds.append(f"{prefix} {keystr} {valstr}")
|
|
1122
|
-
|
|
1123
|
-
if template:
|
|
1124
|
-
fout.write(template.render(manifest_dict='\n'.join(tcl_set_cmds),
|
|
1125
|
-
scroot=os.path.abspath(
|
|
1126
|
-
os.path.join(os.path.dirname(__file__), '..')),
|
|
1127
|
-
record_access=self._do_record_access(),
|
|
1128
|
-
record_access_id=Schema._RECORD_ACCESS_IDENTIFIER))
|
|
1129
|
-
else:
|
|
1130
|
-
for cmd in tcl_set_cmds:
|
|
1131
|
-
fout.write(cmd + '\n')
|
|
1132
|
-
fout.write('\n')
|
|
1133
|
-
|
|
1134
|
-
###########################################################################
|
|
1135
|
-
def write_csv(self, fout):
|
|
1136
|
-
if not _has_csv:
|
|
1137
|
-
raise RuntimeError("csv is not available")
|
|
1138
|
-
|
|
1139
|
-
csvwriter = csv.writer(fout)
|
|
1140
|
-
csvwriter.writerow(['Keypath', 'Value'])
|
|
1141
|
-
|
|
1142
|
-
allkeys = self.allkeys()
|
|
1143
|
-
for key in allkeys:
|
|
1144
|
-
keypath = ','.join(key)
|
|
1145
|
-
for value, step, index in self._getvals(*key):
|
|
1146
|
-
if step is None and index is None:
|
|
1147
|
-
keypath = ','.join(key)
|
|
1148
|
-
elif index is None:
|
|
1149
|
-
keypath = ','.join([*key, step, 'default'])
|
|
1150
|
-
else:
|
|
1151
|
-
keypath = ','.join([*key, step, index])
|
|
1152
|
-
|
|
1153
|
-
if isinstance(value, list):
|
|
1154
|
-
for item in value:
|
|
1155
|
-
csvwriter.writerow([keypath, item])
|
|
1156
|
-
else:
|
|
1157
|
-
csvwriter.writerow([keypath, value])
|
|
1158
|
-
|
|
1159
|
-
###########################################################################
|
|
1160
|
-
def copy(self):
|
|
1161
|
-
'''Returns deep copy of Schema object.'''
|
|
1162
|
-
newscheme = Schema(cfg=self.cfg)
|
|
1163
|
-
if self.__journal:
|
|
1164
|
-
newscheme.__journal = copy.deepcopy(self.__journal)
|
|
1165
|
-
return newscheme
|
|
1166
|
-
|
|
1167
|
-
###########################################################################
|
|
1168
|
-
def prune(self):
|
|
1169
|
-
'''Remove all empty parameters from configuration dictionary.
|
|
1170
|
-
|
|
1171
|
-
Also deletes 'help' and 'example' keys.
|
|
1172
|
-
'''
|
|
1173
|
-
# When at top of tree loop maxdepth times to make sure all stale
|
|
1174
|
-
# branches have been removed, not elegant, but stupid-simple
|
|
1175
|
-
# "good enough"
|
|
1176
|
-
|
|
1177
|
-
# 10 should be enough for anyone...
|
|
1178
|
-
maxdepth = 10
|
|
1179
|
-
|
|
1180
|
-
for _ in range(maxdepth):
|
|
1181
|
-
self.__prune()
|
|
1182
|
-
|
|
1183
|
-
###########################################################################
|
|
1184
|
-
def __prune(self, *keypath):
|
|
1185
|
-
'''
|
|
1186
|
-
Internal recursive function that creates a local copy of the Chip
|
|
1187
|
-
schema (cfg) with only essential non-empty parameters retained.
|
|
1188
|
-
|
|
1189
|
-
'''
|
|
1190
|
-
cfg = self.__search(*keypath)
|
|
1191
|
-
|
|
1192
|
-
# Prune when the default & value are set to the following
|
|
1193
|
-
# Loop through all keys starting at the top
|
|
1194
|
-
for k in list(cfg.keys()):
|
|
1195
|
-
# removing all default/template keys
|
|
1196
|
-
# reached a default subgraph, delete it
|
|
1197
|
-
if k == 'default':
|
|
1198
|
-
del cfg[k]
|
|
1199
|
-
# reached leaf-cell
|
|
1200
|
-
elif 'help' in cfg[k].keys():
|
|
1201
|
-
del cfg[k]['help']
|
|
1202
|
-
elif 'example' in cfg[k].keys():
|
|
1203
|
-
del cfg[k]['example']
|
|
1204
|
-
elif Schema._is_leaf(cfg[k]):
|
|
1205
|
-
pass
|
|
1206
|
-
# removing stale branches
|
|
1207
|
-
elif not cfg[k]:
|
|
1208
|
-
cfg.pop(k)
|
|
1209
|
-
# keep traversing tree
|
|
1210
|
-
else:
|
|
1211
|
-
self.__prune(*keypath, k)
|
|
1212
|
-
|
|
1213
|
-
###########################################################################
|
|
1214
|
-
def is_empty(self, *keypath):
|
|
1215
|
-
'''
|
|
1216
|
-
Utility function to check key for an empty value.
|
|
1217
|
-
'''
|
|
1218
|
-
empty = (None, [])
|
|
1219
|
-
|
|
1220
|
-
values = self._getvals(*keypath)
|
|
1221
|
-
defvalue = self.get_default(*keypath)
|
|
1222
|
-
value_empty = (defvalue in empty) and \
|
|
1223
|
-
all([value in empty for value, _, _ in values])
|
|
1224
|
-
return value_empty
|
|
1225
|
-
|
|
1226
|
-
###########################################################################
|
|
1227
|
-
def history(self, job):
|
|
1228
|
-
'''
|
|
1229
|
-
Returns a *mutable* reference to ['history', job] as a Schema object.
|
|
1230
|
-
|
|
1231
|
-
If job doesn't currently exist in history, create it with default
|
|
1232
|
-
values.
|
|
1233
|
-
|
|
1234
|
-
Args:
|
|
1235
|
-
job (str): Name of historical job to return.
|
|
1236
|
-
'''
|
|
1237
|
-
if job not in self.cfg['history']:
|
|
1238
|
-
self.cfg['history'][job] = self._init_schema_cfg()
|
|
1239
|
-
|
|
1240
|
-
# Can't initialize Schema() by passing in cfg since it performs a deep
|
|
1241
|
-
# copy.
|
|
1242
|
-
schema = Schema()
|
|
1243
|
-
schema.cfg = self.cfg['history'][job]
|
|
1244
|
-
return schema
|
|
1245
|
-
|
|
1246
|
-
#######################################
|
|
1247
|
-
def _init_logger(self, parent=None):
|
|
1248
|
-
if parent:
|
|
1249
|
-
# If parent provided, create a child logger
|
|
1250
|
-
self.logger = parent.getChild('schema')
|
|
1251
|
-
else:
|
|
1252
|
-
# Check if the logger exists and create
|
|
1253
|
-
if not hasattr(self, 'logger') or not self.logger:
|
|
1254
|
-
self.logger = logging.getLogger(f'sc_schema_{id(self)}')
|
|
1255
|
-
self.logger.propagate = False
|
|
1256
|
-
|
|
1257
|
-
#######################################
|
|
1258
|
-
def __getstate__(self):
|
|
1259
|
-
attributes = self.__dict__.copy()
|
|
1260
|
-
|
|
1261
|
-
# We have to remove the chip's logger before serializing the object
|
|
1262
|
-
# since the logger object is not serializable.
|
|
1263
|
-
del attributes['logger']
|
|
1264
|
-
return attributes
|
|
1265
|
-
|
|
1266
|
-
#######################################
|
|
1267
|
-
def __setstate__(self, state):
|
|
1268
|
-
self.__dict__ = state
|
|
1269
|
-
|
|
1270
|
-
# Reinitialize logger on restore
|
|
1271
|
-
self._init_logger()
|
|
1272
|
-
|
|
1273
|
-
#######################################
|
|
1274
|
-
def __record_journal(self, record_type, key, value=None, field=None, step=None, index=None):
|
|
1275
|
-
'''
|
|
1276
|
-
Record the schema transaction
|
|
1277
|
-
'''
|
|
1278
|
-
if self.__journal is None:
|
|
1279
|
-
return
|
|
1280
|
-
|
|
1281
|
-
self.__journal.append({
|
|
1282
|
-
"type": record_type,
|
|
1283
|
-
"key": key,
|
|
1284
|
-
"value": value,
|
|
1285
|
-
"field": field,
|
|
1286
|
-
"step": step,
|
|
1287
|
-
"index": index
|
|
1288
|
-
})
|
|
1289
|
-
|
|
1290
|
-
#######################################
|
|
1291
|
-
def _start_journal(self):
|
|
1292
|
-
'''
|
|
1293
|
-
Start journaling the schema transactions
|
|
1294
|
-
'''
|
|
1295
|
-
self.__journal = []
|
|
1296
|
-
|
|
1297
|
-
#######################################
|
|
1298
|
-
def _stop_journal(self):
|
|
1299
|
-
'''
|
|
1300
|
-
Stop journaling the schema transactions
|
|
1301
|
-
'''
|
|
1302
|
-
self.__journal = None
|
|
1303
|
-
|
|
1304
|
-
#######################################
|
|
1305
|
-
def read_journal(self, filename):
|
|
1306
|
-
'''
|
|
1307
|
-
Reads a manifest and replays the journal
|
|
1308
|
-
'''
|
|
1309
|
-
|
|
1310
|
-
schema = Schema(logger=self.logger)
|
|
1311
|
-
_, schema.__journal = Schema.__read_manifest_file(str(filename))
|
|
1312
|
-
self._import_journal(schema)
|
|
1313
|
-
|
|
1314
|
-
#######################################
|
|
1315
|
-
def _import_journal(self, schema):
|
|
1316
|
-
'''
|
|
1317
|
-
Import the journaled transactions from a different schema
|
|
1318
|
-
'''
|
|
1319
|
-
if not schema.__journal:
|
|
1320
|
-
return
|
|
1321
|
-
|
|
1322
|
-
for action in schema.__journal:
|
|
1323
|
-
record_type = action['type']
|
|
1324
|
-
keypath = action['key']
|
|
1325
|
-
value = action['value']
|
|
1326
|
-
field = action['field']
|
|
1327
|
-
step = action['step']
|
|
1328
|
-
index = action['index']
|
|
1329
|
-
try:
|
|
1330
|
-
if record_type == 'set':
|
|
1331
|
-
cfg = self.__search(*keypath, insert_defaults=True)
|
|
1332
|
-
self.__set(*keypath, value, logger=self.logger, cfg=cfg, field=field,
|
|
1333
|
-
step=step, index=index, journal_callback=None)
|
|
1334
|
-
elif record_type == 'add':
|
|
1335
|
-
cfg = self.__search(*keypath, insert_defaults=True)
|
|
1336
|
-
self._add(*keypath, value, cfg=cfg, field=field, step=step, index=index)
|
|
1337
|
-
elif record_type == 'unset':
|
|
1338
|
-
self.unset(*keypath, step=step, index=index)
|
|
1339
|
-
elif record_type == 'remove':
|
|
1340
|
-
self.remove(*keypath)
|
|
1341
|
-
else:
|
|
1342
|
-
raise ValueError(f'Unknown record type {record_type}')
|
|
1343
|
-
except Exception as e:
|
|
1344
|
-
self.logger.error(f'Exception: {e}')
|
|
1345
|
-
|
|
1346
|
-
#######################################
|
|
1347
|
-
def _do_record_access(self):
|
|
1348
|
-
'''
|
|
1349
|
-
Determine if Schema should record calls to .get
|
|
1350
|
-
'''
|
|
1351
|
-
return False
|
|
1352
|
-
|
|
1353
|
-
#######################################
|
|
1354
|
-
def _init_record_access(self):
|
|
1355
|
-
'''
|
|
1356
|
-
Initialize record access data record
|
|
1357
|
-
'''
|
|
1358
|
-
self.__record_access = {
|
|
1359
|
-
"do": self._do_record_access(),
|
|
1360
|
-
"recording": False,
|
|
1361
|
-
"record": set()
|
|
1362
|
-
}
|
|
1363
|
-
|
|
1364
|
-
#######################################
|
|
1365
|
-
def _start_record_access(self):
|
|
1366
|
-
'''
|
|
1367
|
-
Start recording calls to .get
|
|
1368
|
-
'''
|
|
1369
|
-
self.__record_access["recording"] = True
|
|
1370
|
-
|
|
1371
|
-
#######################################
|
|
1372
|
-
def _stop_record_access(self):
|
|
1373
|
-
'''
|
|
1374
|
-
Stop recording calls to .get
|
|
1375
|
-
'''
|
|
1376
|
-
self.__record_access["recording"] = False
|
|
1377
|
-
|
|
1378
|
-
#######################################
|
|
1379
|
-
def _get_record_access(self):
|
|
1380
|
-
'''
|
|
1381
|
-
Return calls to record_access
|
|
1382
|
-
'''
|
|
1383
|
-
return self.__record_access["record"].copy()
|
|
1384
|
-
|
|
1385
|
-
#######################################
|
|
1386
|
-
def get_default(self, *keypath):
|
|
1387
|
-
'''Returns default value of a parameter.
|
|
1388
|
-
|
|
1389
|
-
Args:
|
|
1390
|
-
keypath(list str): Variable length schema key list.
|
|
1391
|
-
'''
|
|
1392
|
-
cfg = self.__search(*keypath)
|
|
1393
|
-
|
|
1394
|
-
if not Schema._is_leaf(cfg):
|
|
1395
|
-
raise ValueError(f'Invalid keypath {keypath}: get_default() '
|
|
1396
|
-
'must be called on a complete keypath')
|
|
1397
|
-
|
|
1398
|
-
return cfg['node']['default']['default']['value']
|
|
1399
|
-
|
|
1400
|
-
#######################################
|
|
1401
|
-
def set_default(self, *args):
|
|
1402
|
-
'''Sets the default value of a parameter.
|
|
1403
|
-
|
|
1404
|
-
Args:
|
|
1405
|
-
args (list str): Variable length schema key list and value.
|
|
1406
|
-
'''
|
|
1407
|
-
keypath = args[:-1]
|
|
1408
|
-
value = args[-1]
|
|
1409
|
-
cfg = self.__search(*keypath)
|
|
1410
|
-
|
|
1411
|
-
if not Schema._is_leaf(cfg):
|
|
1412
|
-
raise ValueError(f'Invalid keypath {keypath}: set_default() '
|
|
1413
|
-
'must be called on a complete keypath')
|
|
1414
|
-
|
|
1415
|
-
allowed_values = None
|
|
1416
|
-
if 'enum' in cfg:
|
|
1417
|
-
allowed_values = cfg['enum']
|
|
1418
|
-
|
|
1419
|
-
cfg['node']['default']['default']['value'] = Schema.__check_and_normalize(
|
|
1420
|
-
value, cfg['type'], 'value', keypath, allowed_values)
|
|
1421
|
-
|
|
1422
|
-
###########################################################################
|
|
1423
|
-
def create_cmdline(self,
|
|
1424
|
-
progname,
|
|
1425
|
-
description=None,
|
|
1426
|
-
switchlist=None,
|
|
1427
|
-
input_map=None,
|
|
1428
|
-
additional_args=None,
|
|
1429
|
-
version=None,
|
|
1430
|
-
print_banner=None,
|
|
1431
|
-
input_map_handler=None,
|
|
1432
|
-
preprocess_keys=None,
|
|
1433
|
-
post_process=None,
|
|
1434
|
-
logger=None):
|
|
1435
|
-
"""Creates a Schema command line interface.
|
|
1436
|
-
|
|
1437
|
-
Exposes parameters in the SC schema as command line switches,
|
|
1438
|
-
simplifying creation of SC apps with a restricted set of schema
|
|
1439
|
-
parameters exposed at the command line. The order of command
|
|
1440
|
-
line switch settings parsed from the command line is as follows:
|
|
1441
|
-
|
|
1442
|
-
1. loglevel, if available in schema
|
|
1443
|
-
2. read_manifest([cfg]), if available in schema
|
|
1444
|
-
3. read inputs with input_map_handler
|
|
1445
|
-
4. all other switches
|
|
1446
|
-
5. Run post_process
|
|
1447
|
-
|
|
1448
|
-
The cmdline interface is implemented using the Python argparse package
|
|
1449
|
-
and the following use restrictions apply.
|
|
1450
|
-
|
|
1451
|
-
* Help is accessed with the '-h' switch.
|
|
1452
|
-
* Arguments that include spaces must be enclosed with double quotes.
|
|
1453
|
-
* List parameters are entered individually. (ie. -y libdir1 -y libdir2)
|
|
1454
|
-
* For parameters with Boolean types, the switch implies "true".
|
|
1455
|
-
* Special characters (such as '-') must be enclosed in double quotes.
|
|
1456
|
-
* Compiler compatible switches include: -D, -I, -O{0,1,2,3}
|
|
1457
|
-
* Legacy switch formats are supported: +libext+, +incdir+
|
|
1458
|
-
|
|
1459
|
-
Args:
|
|
1460
|
-
progname (str): Name of program to be executed.
|
|
1461
|
-
description (str): Short program description.
|
|
1462
|
-
switchlist (list of str): List of SC parameter switches to expose
|
|
1463
|
-
at the command line. By default all SC schema switches are
|
|
1464
|
-
available. Parameter switches should be entered based on the
|
|
1465
|
-
parameter 'switch' field in the schema. For parameters with
|
|
1466
|
-
multiple switches, both will be accepted if any one is included
|
|
1467
|
-
in this list.
|
|
1468
|
-
input_map (dict of str): Dictionary mapping file extensions to input
|
|
1469
|
-
filetypes. This is used to automatically assign positional
|
|
1470
|
-
source arguments to ['input', 'fileset', ...] keypaths based on their file
|
|
1471
|
-
extension. If None, the CLI will not accept positional source
|
|
1472
|
-
arguments.
|
|
1473
|
-
additional_args (dict of dict): Dictionary of extra arguments to add
|
|
1474
|
-
to the command line parser, with the arguments matching the
|
|
1475
|
-
argparse.add_argument() call.
|
|
1476
|
-
version (str): Version to report when calling with -version
|
|
1477
|
-
print_banner (function): Function callback to print command line banner
|
|
1478
|
-
input_map_handler (function): Function callback handle inputs to the input map
|
|
1479
|
-
preprocess_keys (function): Function callback to preprocess keys that need to be
|
|
1480
|
-
corrected
|
|
1481
|
-
post_process (function): Function callback to process arguments before returning
|
|
1482
|
-
|
|
1483
|
-
Returns:
|
|
1484
|
-
None if additional_args is not provided, otherwise a dictionary with the
|
|
1485
|
-
command line options detected from the additional_args
|
|
1486
|
-
|
|
1487
|
-
Examples:
|
|
1488
|
-
>>> schema.create_cmdline(progname='sc-show',switchlist=['-input','-cfg'])
|
|
1489
|
-
Creates a command line interface for 'sc-show' app.
|
|
1490
|
-
>>> schema.create_cmdline(progname='sc', input_map={'v': ('rtl', 'verilog')})
|
|
1491
|
-
All sources ending in .v will be stored in ['input', 'rtl', 'verilog']
|
|
1492
|
-
>>> extra = schema.create_cmdline(progname='sc',
|
|
1493
|
-
additional_args={'-demo': {'action': 'store_true'}})
|
|
1494
|
-
Returns extra = {'demo': False/True}
|
|
1495
|
-
"""
|
|
1496
|
-
|
|
1497
|
-
if not logger:
|
|
1498
|
-
logger = self.logger
|
|
1499
|
-
|
|
1500
|
-
# Argparse
|
|
1501
|
-
parser = argparse.ArgumentParser(prog=progname,
|
|
1502
|
-
prefix_chars='-+',
|
|
1503
|
-
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
1504
|
-
description=description,
|
|
1505
|
-
allow_abbrev=False)
|
|
1506
|
-
|
|
1507
|
-
# Get a new schema, in case values have already been set
|
|
1508
|
-
schema_class = type(self)
|
|
1509
|
-
schema = schema_class(logger=self.logger)
|
|
1510
|
-
|
|
1511
|
-
# Iterate over all keys from an empty schema to add parser arguments
|
|
1512
|
-
used_switches = set()
|
|
1513
|
-
for keypath in schema.allkeys():
|
|
1514
|
-
# Fetch fields from leaf cell
|
|
1515
|
-
helpstr = schema.get(*keypath, field='shorthelp')
|
|
1516
|
-
typestr = schema.get(*keypath, field='type')
|
|
1517
|
-
pernodestr = schema.get(*keypath, field='pernode')
|
|
1518
|
-
|
|
1519
|
-
# argparse 'dest' must be a string, so join keypath with commas
|
|
1520
|
-
dest = '_'.join(keypath)
|
|
1521
|
-
|
|
1522
|
-
switchstrs, metavar = self.__get_switches(schema, *keypath)
|
|
1523
|
-
|
|
1524
|
-
# Three switch types (bool, list, scalar)
|
|
1525
|
-
if switchlist is None or any(switch in switchlist for switch in switchstrs):
|
|
1526
|
-
used_switches.update(switchstrs)
|
|
1527
|
-
if typestr == 'bool':
|
|
1528
|
-
# Boolean type arguments
|
|
1529
|
-
if pernodestr.is_never():
|
|
1530
|
-
parser.add_argument(*switchstrs,
|
|
1531
|
-
nargs='?',
|
|
1532
|
-
metavar=metavar,
|
|
1533
|
-
dest=dest,
|
|
1534
|
-
const='true',
|
|
1535
|
-
help=helpstr,
|
|
1536
|
-
default=argparse.SUPPRESS)
|
|
1537
|
-
else:
|
|
1538
|
-
parser.add_argument(*switchstrs,
|
|
1539
|
-
metavar=metavar,
|
|
1540
|
-
nargs='?',
|
|
1541
|
-
dest=dest,
|
|
1542
|
-
action='append',
|
|
1543
|
-
help=helpstr,
|
|
1544
|
-
default=argparse.SUPPRESS)
|
|
1545
|
-
elif '[' in typestr or not pernodestr.is_never():
|
|
1546
|
-
# list type arguments
|
|
1547
|
-
parser.add_argument(*switchstrs,
|
|
1548
|
-
metavar=metavar,
|
|
1549
|
-
dest=dest,
|
|
1550
|
-
action='append',
|
|
1551
|
-
help=helpstr,
|
|
1552
|
-
default=argparse.SUPPRESS)
|
|
1553
|
-
else:
|
|
1554
|
-
# all the rest
|
|
1555
|
-
parser.add_argument(*switchstrs,
|
|
1556
|
-
metavar=metavar,
|
|
1557
|
-
dest=dest,
|
|
1558
|
-
help=helpstr,
|
|
1559
|
-
default=argparse.SUPPRESS)
|
|
1560
|
-
|
|
1561
|
-
print_additional_arg_value = {}
|
|
1562
|
-
if additional_args:
|
|
1563
|
-
# Add additional user specified arguments
|
|
1564
|
-
arg_dests = []
|
|
1565
|
-
for arg, arg_detail in additional_args.items():
|
|
1566
|
-
do_print = True
|
|
1567
|
-
if "sc_print" in arg_detail:
|
|
1568
|
-
do_print = arg_detail["sc_print"]
|
|
1569
|
-
del arg_detail["sc_print"]
|
|
1570
|
-
argument = parser.add_argument(arg, **arg_detail)
|
|
1571
|
-
print_additional_arg_value[argument.dest] = do_print
|
|
1572
|
-
|
|
1573
|
-
arg_dests.append(argument.dest)
|
|
1574
|
-
used_switches.add(arg)
|
|
1575
|
-
# rewrite additional_args with new dest information
|
|
1576
|
-
additional_args = arg_dests
|
|
1577
|
-
|
|
1578
|
-
if version:
|
|
1579
|
-
parser.add_argument('-version', action='version', version=version)
|
|
1580
|
-
|
|
1581
|
-
# Check if there are invalid switches
|
|
1582
|
-
if switchlist:
|
|
1583
|
-
for switch in switchlist:
|
|
1584
|
-
if switch not in used_switches:
|
|
1585
|
-
raise ValueError(f'{switch} is not a valid commandline argument')
|
|
1586
|
-
|
|
1587
|
-
if input_map is not None and input_map_handler:
|
|
1588
|
-
parser.add_argument('source',
|
|
1589
|
-
nargs='*',
|
|
1590
|
-
help='Input files with filetype inferred by extension')
|
|
1591
|
-
|
|
1592
|
-
# Preprocess sys.argv to enable linux commandline switch formats
|
|
1593
|
-
# (gcc, verilator, etc)
|
|
1594
|
-
scargs = []
|
|
1595
|
-
|
|
1596
|
-
# Iterate from index 1, otherwise we end up with script name as a
|
|
1597
|
-
# 'source' positional argument
|
|
1598
|
-
for argument in sys.argv[1:]:
|
|
1599
|
-
# Split switches with one character and a number after (O0,O1,O2)
|
|
1600
|
-
opt = re.match(r'(\-\w)(\d+)', argument)
|
|
1601
|
-
# Split assign switches (-DCFG_ASIC=1)
|
|
1602
|
-
assign = re.search(r'(\-\w)(\w+\=\w+)', argument)
|
|
1603
|
-
# Split plusargs (+incdir+/path)
|
|
1604
|
-
plusarg = re.search(r'(\+\w+\+)(.*)', argument)
|
|
1605
|
-
if opt:
|
|
1606
|
-
scargs.append(opt.group(1))
|
|
1607
|
-
scargs.append(opt.group(2))
|
|
1608
|
-
elif plusarg:
|
|
1609
|
-
scargs.append(plusarg.group(1))
|
|
1610
|
-
scargs.append(plusarg.group(2))
|
|
1611
|
-
elif assign:
|
|
1612
|
-
scargs.append(assign.group(1))
|
|
1613
|
-
scargs.append(assign.group(2))
|
|
1614
|
-
else:
|
|
1615
|
-
scargs.append(argument)
|
|
1616
|
-
|
|
1617
|
-
# Grab argument from pre-process sysargs
|
|
1618
|
-
cmdargs = vars(parser.parse_args(scargs))
|
|
1619
|
-
|
|
1620
|
-
# Set loglevel if set at command line
|
|
1621
|
-
do_print_banner = True
|
|
1622
|
-
if 'option_loglevel' in cmdargs.keys():
|
|
1623
|
-
log_level = cmdargs['option_loglevel']
|
|
1624
|
-
if isinstance(log_level, list):
|
|
1625
|
-
# if multiple found, pick the first one
|
|
1626
|
-
log_level = log_level[0]
|
|
1627
|
-
if log_level == 'quiet':
|
|
1628
|
-
do_print_banner = False
|
|
1629
|
-
logger.setLevel(translate_loglevel(log_level).split()[-1])
|
|
1630
|
-
|
|
1631
|
-
if print_banner and do_print_banner:
|
|
1632
|
-
print_banner()
|
|
1633
|
-
|
|
1634
|
-
extra_params = None
|
|
1635
|
-
if additional_args:
|
|
1636
|
-
# Grab user specified arguments
|
|
1637
|
-
extra_params = {}
|
|
1638
|
-
for arg in additional_args:
|
|
1639
|
-
if arg in cmdargs:
|
|
1640
|
-
val = cmdargs[arg]
|
|
1641
|
-
if print_additional_arg_value[arg] and val:
|
|
1642
|
-
msg = f'Command line argument entered: "{arg}" Value: {val}'
|
|
1643
|
-
self.logger.info(msg)
|
|
1644
|
-
extra_params[arg] = val
|
|
1645
|
-
# Remove from cmdargs
|
|
1646
|
-
del cmdargs[arg]
|
|
1647
|
-
|
|
1648
|
-
# Read in all cfg files
|
|
1649
|
-
if 'option_cfg' in cmdargs.keys():
|
|
1650
|
-
for item in cmdargs['option_cfg']:
|
|
1651
|
-
self.read_manifest(item, clobber=True, clear=True, allow_missing_keys=True)
|
|
1652
|
-
|
|
1653
|
-
if input_map_handler:
|
|
1654
|
-
# Map sources to ['input'] keypath.
|
|
1655
|
-
if 'source' in cmdargs:
|
|
1656
|
-
input_map_handler(cmdargs['source'])
|
|
1657
|
-
# we don't want to handle this in the next loop
|
|
1658
|
-
del cmdargs['source']
|
|
1659
|
-
|
|
1660
|
-
# Cycle through all command args and write to manifest
|
|
1661
|
-
for dest, vals in cmdargs.items():
|
|
1662
|
-
keypath = dest.split('_')
|
|
1663
|
-
|
|
1664
|
-
# Turn everything into a list for uniformity
|
|
1665
|
-
if not isinstance(vals, list):
|
|
1666
|
-
vals = [vals]
|
|
1667
|
-
|
|
1668
|
-
# Cycle through all items
|
|
1669
|
-
for item in vals:
|
|
1670
|
-
if item is None:
|
|
1671
|
-
# nargs=? leaves a None for booleans
|
|
1672
|
-
item = ''
|
|
1673
|
-
|
|
1674
|
-
if preprocess_keys:
|
|
1675
|
-
item = preprocess_keys(keypath, item)
|
|
1676
|
-
|
|
1677
|
-
num_free_keys = keypath.count('default')
|
|
1678
|
-
|
|
1679
|
-
switches, metavar = self.__get_switches(schema, *keypath)
|
|
1680
|
-
switchstr = '/'.join(switches)
|
|
1681
|
-
|
|
1682
|
-
if len(item.split(' ')) < num_free_keys + 1:
|
|
1683
|
-
# Error out if value provided doesn't have enough words to
|
|
1684
|
-
# fill in 'default' keys.
|
|
1685
|
-
raise ValueError(f'Invalid value {item} for switch {switchstr}. '
|
|
1686
|
-
f'Expected format {metavar}.')
|
|
1687
|
-
|
|
1688
|
-
# We replace 'default' in keypath with first N words in provided
|
|
1689
|
-
# value.
|
|
1690
|
-
*free_keys, remainder = item.split(' ', num_free_keys)
|
|
1691
|
-
args = [free_keys.pop(0) if key == 'default' else key for key in keypath]
|
|
1692
|
-
|
|
1693
|
-
# Remainder is the value we want to set, possibly with a step/index value beforehand
|
|
1694
|
-
sctype = self.get(*keypath, field='type')
|
|
1695
|
-
pernode = self.get(*keypath, field='pernode')
|
|
1696
|
-
step, index = None, None
|
|
1697
|
-
if PerNode(pernode) == PerNode.REQUIRED:
|
|
1698
|
-
try:
|
|
1699
|
-
step, index, val = remainder.split(' ', 2)
|
|
1700
|
-
except ValueError:
|
|
1701
|
-
self.logger.error(f"Invalid value '{item}' for switch {switchstr}. "
|
|
1702
|
-
"Requires step and index before final value.")
|
|
1703
|
-
elif PerNode(pernode) == PerNode.OPTIONAL:
|
|
1704
|
-
# Split on spaces, preserving items that are grouped in quotes
|
|
1705
|
-
items = shlex.split(remainder)
|
|
1706
|
-
if len(items) > 3:
|
|
1707
|
-
self.logger.error(f"Invalid value '{item}'' for switch {switchstr}. "
|
|
1708
|
-
"Too many arguments, please wrap multiline "
|
|
1709
|
-
"strings in quotes.")
|
|
1710
|
-
continue
|
|
1711
|
-
if sctype == 'bool':
|
|
1712
|
-
if len(items) == 3:
|
|
1713
|
-
step, index, val = items
|
|
1714
|
-
elif len(items) == 2:
|
|
1715
|
-
step, val = items
|
|
1716
|
-
if val != 'true' and val != 'false':
|
|
1717
|
-
index = val
|
|
1718
|
-
val = True
|
|
1719
|
-
elif len(items) == 1:
|
|
1720
|
-
val, = items
|
|
1721
|
-
if val != 'true' and val != 'false':
|
|
1722
|
-
step = val
|
|
1723
|
-
val = True
|
|
1724
|
-
else:
|
|
1725
|
-
val = True
|
|
1726
|
-
else:
|
|
1727
|
-
if len(items) == 3:
|
|
1728
|
-
step, index, val = items
|
|
1729
|
-
elif len(items) == 2:
|
|
1730
|
-
step, val = items
|
|
1731
|
-
else:
|
|
1732
|
-
val, = items
|
|
1733
|
-
else:
|
|
1734
|
-
val = remainder
|
|
1735
|
-
|
|
1736
|
-
msg = f'Command line argument entered: {args} Value: {val}'
|
|
1737
|
-
if step is not None:
|
|
1738
|
-
msg += f' Step: {step}'
|
|
1739
|
-
if index is not None:
|
|
1740
|
-
msg += f' Index: {index}'
|
|
1741
|
-
self.logger.info(msg)
|
|
1742
|
-
|
|
1743
|
-
# Storing in manifest
|
|
1744
|
-
typestr = schema.get(*keypath, field='type')
|
|
1745
|
-
if typestr.startswith('['):
|
|
1746
|
-
if self.valid(*args):
|
|
1747
|
-
self.add(*args, val, step=step, index=index)
|
|
1748
|
-
else:
|
|
1749
|
-
self.set(*args, val, step=step, index=index, clobber=True)
|
|
1750
|
-
else:
|
|
1751
|
-
self.set(*args, val, step=step, index=index, clobber=True)
|
|
1752
|
-
|
|
1753
|
-
if post_process:
|
|
1754
|
-
extra_params = post_process(cmdargs, extra_params)
|
|
1755
|
-
|
|
1756
|
-
return extra_params
|
|
1757
|
-
|
|
1758
|
-
###########################################################################
|
|
1759
|
-
def __get_switches(self, schema, *keypath):
|
|
1760
|
-
'''Helper function for parsing switches and metavars for a keypath.'''
|
|
1761
|
-
# Switch field fully describes switch format
|
|
1762
|
-
switch = schema.get(*keypath, field='switch')
|
|
1763
|
-
|
|
1764
|
-
if switch is None:
|
|
1765
|
-
switches = []
|
|
1766
|
-
elif isinstance(switch, list):
|
|
1767
|
-
switches = switch
|
|
1768
|
-
else:
|
|
1769
|
-
switches = [switch]
|
|
1770
|
-
switchstrs = []
|
|
1771
|
-
|
|
1772
|
-
# parse out switch from metavar
|
|
1773
|
-
# TODO: should we validate that metavar matches for each switch?
|
|
1774
|
-
for switch in switches:
|
|
1775
|
-
switchmatch = re.match(r'(-[\w_]+)\s+(.*)', switch)
|
|
1776
|
-
gccmatch = re.match(r'(-[\w_]+)(.*)', switch)
|
|
1777
|
-
plusmatch = re.match(r'(\+[\w_\+]+)(.*)', switch)
|
|
1778
|
-
|
|
1779
|
-
if switchmatch:
|
|
1780
|
-
switchstr = switchmatch.group(1)
|
|
1781
|
-
metavar = switchmatch.group(2)
|
|
1782
|
-
elif gccmatch:
|
|
1783
|
-
switchstr = gccmatch.group(1)
|
|
1784
|
-
metavar = gccmatch.group(2)
|
|
1785
|
-
elif plusmatch:
|
|
1786
|
-
switchstr = plusmatch.group(1)
|
|
1787
|
-
metavar = plusmatch.group(2)
|
|
1788
|
-
switchstrs.append(switchstr)
|
|
1789
|
-
|
|
1790
|
-
return switchstrs, metavar
|
|
1791
|
-
|
|
1792
|
-
###########################################################################
|
|
1793
|
-
def read_manifest(self, filename, clear=True, clobber=True, allow_missing_keys=True):
|
|
1794
|
-
"""
|
|
1795
|
-
Reads a manifest from disk and merges it with the current manifest.
|
|
1796
|
-
|
|
1797
|
-
The file format read is determined by the filename suffix. Currently
|
|
1798
|
-
json (*.json) and yaml(*.yaml) formats are supported.
|
|
1799
|
-
|
|
1800
|
-
Args:
|
|
1801
|
-
filename (filepath): Path to a manifest file to be loaded.
|
|
1802
|
-
clear (bool): If True, disables append operations for list type.
|
|
1803
|
-
clobber (bool): If True, overwrites existing parameter value.
|
|
1804
|
-
allow_missing_keys (bool): If True, keys not present in current schema will be ignored.
|
|
1805
|
-
|
|
1806
|
-
Examples:
|
|
1807
|
-
>>> chip.read_manifest('mychip.json')
|
|
1808
|
-
Loads the file mychip.json into the current Chip object.
|
|
1809
|
-
"""
|
|
1810
|
-
schema = Schema(manifest=filename, logger=self.logger)
|
|
1811
|
-
|
|
1812
|
-
if schema.get('schemaversion') != self.get('schemaversion'):
|
|
1813
|
-
self.logger.warning("Mismatch in schema versions: "
|
|
1814
|
-
f"{schema.get('schemaversion')} != {self.get('schemaversion')}")
|
|
1815
|
-
|
|
1816
|
-
for keylist in schema.allkeys():
|
|
1817
|
-
if keylist[0] in ('history', 'library'):
|
|
1818
|
-
continue
|
|
1819
|
-
if 'default' in keylist:
|
|
1820
|
-
continue
|
|
1821
|
-
typestr = schema.get(*keylist, field='type')
|
|
1822
|
-
should_append = '[' in typestr and not clear
|
|
1823
|
-
|
|
1824
|
-
if allow_missing_keys and not self.valid(*keylist, default_valid=True):
|
|
1825
|
-
self.logger.warning(f'{keylist} not found in schema, skipping...')
|
|
1826
|
-
continue
|
|
1827
|
-
|
|
1828
|
-
for val, step, index in schema._getvals(*keylist, return_defvalue=False):
|
|
1829
|
-
# update value, handling scalars vs. lists
|
|
1830
|
-
if should_append:
|
|
1831
|
-
self.add(*keylist, val, step=step, index=index)
|
|
1832
|
-
else:
|
|
1833
|
-
self.set(*keylist, val, step=step, index=index, clobber=clobber)
|
|
1834
|
-
|
|
1835
|
-
# update other pernode fields
|
|
1836
|
-
# TODO: only update these if clobber is successful
|
|
1837
|
-
step_key = Schema.GLOBAL_KEY if not step else step
|
|
1838
|
-
idx_key = Schema.GLOBAL_KEY if not index else index
|
|
1839
|
-
for field in schema.getdict(*keylist)['node'][step_key][idx_key].keys():
|
|
1840
|
-
if field == 'value':
|
|
1841
|
-
continue
|
|
1842
|
-
v = schema.get(*keylist, step=step, index=index, field=field)
|
|
1843
|
-
if should_append:
|
|
1844
|
-
self.add(*keylist, v, step=step, index=index, field=field)
|
|
1845
|
-
else:
|
|
1846
|
-
self.set(*keylist, v, step=step, index=index, field=field)
|
|
1847
|
-
|
|
1848
|
-
# update other fields that a user might modify
|
|
1849
|
-
for field in schema.getdict(*keylist).keys():
|
|
1850
|
-
if field in ('node',):
|
|
1851
|
-
# skip these fields (node handled above)
|
|
1852
|
-
continue
|
|
1853
|
-
|
|
1854
|
-
# TODO: should we be taking into consideration clobber for these fields?
|
|
1855
|
-
v = schema.get(*keylist, field=field)
|
|
1856
|
-
self.set(*keylist, v, field=field)
|
|
1857
|
-
|
|
1858
|
-
# Read history, if we're not already reading into a job
|
|
1859
|
-
if 'history' in schema.getkeys():
|
|
1860
|
-
for historic_job in schema.getkeys('history'):
|
|
1861
|
-
self.cfg['history'][historic_job] = schema.getdict('history', historic_job)
|
|
1862
|
-
|
|
1863
|
-
# TODO: better way to handle this?
|
|
1864
|
-
if 'library' in schema.getkeys():
|
|
1865
|
-
for libname in schema.getkeys('library'):
|
|
1866
|
-
self.cfg['library'][libname] = schema.getdict('library', libname)
|
|
1867
|
-
|
|
1868
|
-
###########################################################################
|
|
1869
|
-
def merge_manifest(self, src, job=None, clobber=True, clear=True, check=False):
|
|
1870
|
-
"""
|
|
1871
|
-
Merges a given manifest with the current compilation manifest.
|
|
1872
|
-
|
|
1873
|
-
All value fields in the provided schema dictionary are merged into the
|
|
1874
|
-
current chip object. Dictionaries with non-existent keypath produces a
|
|
1875
|
-
logger error message and raises the Chip object error flag.
|
|
1876
|
-
|
|
1877
|
-
Args:
|
|
1878
|
-
src (Schema): Schema object to merge
|
|
1879
|
-
job (str): Specifies non-default job to merge into
|
|
1880
|
-
clear (bool): If True, disables append operations for list type
|
|
1881
|
-
clobber (bool): If True, overwrites existing parameter value
|
|
1882
|
-
check (bool): If True, checks the validity of each key
|
|
1883
|
-
"""
|
|
1884
|
-
if job is not None:
|
|
1885
|
-
dest = self.history(job)
|
|
1886
|
-
else:
|
|
1887
|
-
dest = self
|
|
1888
|
-
|
|
1889
|
-
for keylist in src.allkeys():
|
|
1890
|
-
if keylist[0] in ('history', 'library'):
|
|
1891
|
-
continue
|
|
1892
|
-
# only read in valid keypaths without 'default'
|
|
1893
|
-
key_valid = True
|
|
1894
|
-
if check:
|
|
1895
|
-
key_valid = dest.valid(*keylist, default_valid=True)
|
|
1896
|
-
if not key_valid:
|
|
1897
|
-
self.logger.warning(f'Keypath {keylist} is not valid')
|
|
1898
|
-
if key_valid and 'default' not in keylist:
|
|
1899
|
-
typestr = src.get(*keylist, field='type')
|
|
1900
|
-
should_append = '[' in typestr and not clear
|
|
1901
|
-
key_cfg = src.__search(*keylist)
|
|
1902
|
-
for val, step, index in src._getvals(*keylist, return_defvalue=False):
|
|
1903
|
-
# update value, handling scalars vs. lists
|
|
1904
|
-
if should_append:
|
|
1905
|
-
dest.add(*keylist, val, step=step, index=index)
|
|
1906
|
-
else:
|
|
1907
|
-
dest.set(*keylist, val, step=step, index=index, clobber=clobber)
|
|
1908
|
-
|
|
1909
|
-
# update other pernode fields
|
|
1910
|
-
# TODO: only update these if clobber is successful
|
|
1911
|
-
step_key = Schema.GLOBAL_KEY if not step else step
|
|
1912
|
-
idx_key = Schema.GLOBAL_KEY if not index else index
|
|
1913
|
-
for field in key_cfg['node'][step_key][idx_key].keys():
|
|
1914
|
-
if field == 'value':
|
|
1915
|
-
continue
|
|
1916
|
-
v = src.get(*keylist, step=step, index=index, field=field)
|
|
1917
|
-
if should_append:
|
|
1918
|
-
dest.add(*keylist, v, step=step, index=index, field=field)
|
|
1919
|
-
else:
|
|
1920
|
-
dest.set(*keylist, v, step=step, index=index, field=field)
|
|
1921
|
-
|
|
1922
|
-
# update other fields that a user might modify
|
|
1923
|
-
for field in key_cfg.keys():
|
|
1924
|
-
if field in ('node', 'switch', 'type', 'require',
|
|
1925
|
-
'shorthelp', 'example', 'help'):
|
|
1926
|
-
# skip these fields (node handled above, others are static)
|
|
1927
|
-
continue
|
|
1928
|
-
# TODO: should we be taking into consideration clobber for these fields?
|
|
1929
|
-
v = src.get(*keylist, field=field)
|
|
1930
|
-
dest.set(*keylist, v, field=field)
|
|
1931
|
-
|
|
1932
|
-
|
|
1933
|
-
if _has_yaml:
|
|
1934
|
-
class YamlIndentDumper(yaml.Dumper):
|
|
1935
|
-
def increase_indent(self, flow=False, indentless=False):
|
|
1936
|
-
return super(YamlIndentDumper, self).increase_indent(flow, False)
|