annet 0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of annet might be problematic. Click here for more details.
- annet/__init__.py +61 -0
- annet/annet.py +25 -0
- annet/annlib/__init__.py +7 -0
- annet/annlib/command.py +49 -0
- annet/annlib/diff.py +158 -0
- annet/annlib/errors.py +8 -0
- annet/annlib/filter_acl.py +196 -0
- annet/annlib/jsontools.py +89 -0
- annet/annlib/lib.py +495 -0
- annet/annlib/netdev/__init__.py +0 -0
- annet/annlib/netdev/db.py +62 -0
- annet/annlib/netdev/devdb/__init__.py +28 -0
- annet/annlib/netdev/devdb/data/devdb.json +137 -0
- annet/annlib/netdev/views/__init__.py +0 -0
- annet/annlib/netdev/views/dump.py +121 -0
- annet/annlib/netdev/views/hardware.py +112 -0
- annet/annlib/output.py +246 -0
- annet/annlib/patching.py +533 -0
- annet/annlib/rbparser/__init__.py +0 -0
- annet/annlib/rbparser/acl.py +120 -0
- annet/annlib/rbparser/deploying.py +55 -0
- annet/annlib/rbparser/ordering.py +52 -0
- annet/annlib/rbparser/platform.py +51 -0
- annet/annlib/rbparser/syntax.py +115 -0
- annet/annlib/rulebook/__init__.py +0 -0
- annet/annlib/rulebook/common.py +350 -0
- annet/annlib/tabparser.py +648 -0
- annet/annlib/types.py +35 -0
- annet/api/__init__.py +807 -0
- annet/argparse.py +415 -0
- annet/cli.py +192 -0
- annet/cli_args.py +493 -0
- annet/configs/context.yml +18 -0
- annet/configs/logging.yaml +39 -0
- annet/connectors.py +64 -0
- annet/deploy.py +441 -0
- annet/diff.py +85 -0
- annet/executor.py +551 -0
- annet/filtering.py +40 -0
- annet/gen.py +828 -0
- annet/generators/__init__.py +987 -0
- annet/generators/common/__init__.py +0 -0
- annet/generators/common/initial.py +33 -0
- annet/hardware.py +45 -0
- annet/implicit.py +139 -0
- annet/lib.py +128 -0
- annet/output.py +170 -0
- annet/parallel.py +448 -0
- annet/patching.py +25 -0
- annet/reference.py +148 -0
- annet/rulebook/__init__.py +114 -0
- annet/rulebook/arista/__init__.py +0 -0
- annet/rulebook/arista/iface.py +16 -0
- annet/rulebook/aruba/__init__.py +16 -0
- annet/rulebook/aruba/ap_env.py +146 -0
- annet/rulebook/aruba/misc.py +8 -0
- annet/rulebook/cisco/__init__.py +0 -0
- annet/rulebook/cisco/iface.py +68 -0
- annet/rulebook/cisco/misc.py +57 -0
- annet/rulebook/cisco/vlandb.py +90 -0
- annet/rulebook/common.py +19 -0
- annet/rulebook/deploying.py +87 -0
- annet/rulebook/huawei/__init__.py +0 -0
- annet/rulebook/huawei/aaa.py +75 -0
- annet/rulebook/huawei/bgp.py +97 -0
- annet/rulebook/huawei/iface.py +33 -0
- annet/rulebook/huawei/misc.py +337 -0
- annet/rulebook/huawei/vlandb.py +115 -0
- annet/rulebook/juniper/__init__.py +107 -0
- annet/rulebook/nexus/__init__.py +0 -0
- annet/rulebook/nexus/iface.py +92 -0
- annet/rulebook/patching.py +143 -0
- annet/rulebook/ribbon/__init__.py +12 -0
- annet/rulebook/texts/arista.deploy +20 -0
- annet/rulebook/texts/arista.order +125 -0
- annet/rulebook/texts/arista.rul +59 -0
- annet/rulebook/texts/aruba.deploy +20 -0
- annet/rulebook/texts/aruba.order +83 -0
- annet/rulebook/texts/aruba.rul +87 -0
- annet/rulebook/texts/cisco.deploy +27 -0
- annet/rulebook/texts/cisco.order +82 -0
- annet/rulebook/texts/cisco.rul +105 -0
- annet/rulebook/texts/huawei.deploy +188 -0
- annet/rulebook/texts/huawei.order +388 -0
- annet/rulebook/texts/huawei.rul +471 -0
- annet/rulebook/texts/juniper.rul +120 -0
- annet/rulebook/texts/nexus.deploy +24 -0
- annet/rulebook/texts/nexus.order +85 -0
- annet/rulebook/texts/nexus.rul +83 -0
- annet/rulebook/texts/nokia.rul +31 -0
- annet/rulebook/texts/pc.order +5 -0
- annet/rulebook/texts/pc.rul +9 -0
- annet/rulebook/texts/ribbon.deploy +22 -0
- annet/rulebook/texts/ribbon.rul +77 -0
- annet/rulebook/texts/routeros.order +38 -0
- annet/rulebook/texts/routeros.rul +45 -0
- annet/storage.py +121 -0
- annet/tabparser.py +36 -0
- annet/text_term_format.py +95 -0
- annet/tracing.py +170 -0
- annet/types.py +223 -0
- annet-0.1.dist-info/AUTHORS +21 -0
- annet-0.1.dist-info/LICENSE +21 -0
- annet-0.1.dist-info/METADATA +24 -0
- annet-0.1.dist-info/RECORD +113 -0
- annet-0.1.dist-info/WHEEL +5 -0
- annet-0.1.dist-info/entry_points.txt +6 -0
- annet-0.1.dist-info/top_level.txt +3 -0
- annet_generators/__init__.py +0 -0
- annet_generators/example/__init__.py +12 -0
- annet_generators/example/lldp.py +52 -0
- annet_nbexport/__init__.py +220 -0
- annet_nbexport/main.py +46 -0
|
@@ -0,0 +1,987 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import abc
|
|
4
|
+
import contextlib
|
|
5
|
+
import dataclasses
|
|
6
|
+
import importlib
|
|
7
|
+
import os
|
|
8
|
+
import pkgutil
|
|
9
|
+
import re
|
|
10
|
+
import textwrap
|
|
11
|
+
import time
|
|
12
|
+
import types
|
|
13
|
+
from collections import OrderedDict as odict
|
|
14
|
+
from typing import (
|
|
15
|
+
Any,
|
|
16
|
+
Callable,
|
|
17
|
+
Dict,
|
|
18
|
+
FrozenSet,
|
|
19
|
+
Iterable,
|
|
20
|
+
List,
|
|
21
|
+
Optional,
|
|
22
|
+
Set,
|
|
23
|
+
Tuple,
|
|
24
|
+
Union,
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
from annet.annlib import jsontools
|
|
28
|
+
from annet.annlib.rbparser.acl import compile_acl_text
|
|
29
|
+
from contextlog import get_logger
|
|
30
|
+
|
|
31
|
+
from annet.storage import Device, Storage
|
|
32
|
+
|
|
33
|
+
from annet import patching, tabparser, tracing
|
|
34
|
+
from annet.cli_args import GenSelectOptions, ShowGeneratorsOptions
|
|
35
|
+
from annet.lib import (
|
|
36
|
+
add_annotation,
|
|
37
|
+
flatten,
|
|
38
|
+
get_context,
|
|
39
|
+
jinja_render,
|
|
40
|
+
mako_render,
|
|
41
|
+
merge_dicts,
|
|
42
|
+
)
|
|
43
|
+
from annet.reference import RefMatcher, RefTracker
|
|
44
|
+
from annet.tracing import tracing_connector
|
|
45
|
+
from annet.types import (
|
|
46
|
+
GeneratorEntireResult,
|
|
47
|
+
GeneratorJSONFragmentResult,
|
|
48
|
+
GeneratorPartialResult,
|
|
49
|
+
GeneratorPartialRunArgs,
|
|
50
|
+
GeneratorPerf,
|
|
51
|
+
GeneratorResult,
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
# =====
|
|
56
|
+
DISABLED_TAG = "disable"
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
# =====
|
|
60
|
+
class GeneratorError(Exception):
|
|
61
|
+
pass
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class NotSupportedDevice(GeneratorError):
|
|
65
|
+
pass
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
class DefaultBlockIfCondition:
|
|
69
|
+
pass
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
class GeneratorPerfMesurer:
|
|
73
|
+
def __init__(
|
|
74
|
+
self,
|
|
75
|
+
gen: Union["PartialGenerator", "Entire"],
|
|
76
|
+
storage: Storage,
|
|
77
|
+
run_args: Optional[GeneratorPartialRunArgs] = None,
|
|
78
|
+
trace_min_duration: tracing.MinDurationT = None
|
|
79
|
+
):
|
|
80
|
+
self._gen = gen
|
|
81
|
+
self._storage = storage
|
|
82
|
+
self._run_args = run_args
|
|
83
|
+
|
|
84
|
+
self._start_time: float = 0.0
|
|
85
|
+
self._span_ctx = None
|
|
86
|
+
self._span = None
|
|
87
|
+
self._trace_min_duration = trace_min_duration
|
|
88
|
+
|
|
89
|
+
self.last_result: Optional[GeneratorPerf] = None
|
|
90
|
+
|
|
91
|
+
def start(self) -> None:
|
|
92
|
+
self.last_result = None
|
|
93
|
+
|
|
94
|
+
self._storage.flush_perf()
|
|
95
|
+
|
|
96
|
+
self._span_ctx = tracing_connector.get().start_as_current_span(
|
|
97
|
+
"gen:call",
|
|
98
|
+
tracer_name=self._gen.__class__.__module__,
|
|
99
|
+
min_duration=self._trace_min_duration,
|
|
100
|
+
)
|
|
101
|
+
self._span = self._span_ctx.__enter__() # pylint: disable=unnecessary-dunder-call
|
|
102
|
+
|
|
103
|
+
if self._span:
|
|
104
|
+
self._span.set_attributes({"generator.class": self._gen.__class__.__name__})
|
|
105
|
+
if self._run_args:
|
|
106
|
+
tracing_connector.get().set_device_attributes(self._span, self._run_args.device)
|
|
107
|
+
|
|
108
|
+
self._start_time = time.monotonic()
|
|
109
|
+
|
|
110
|
+
def finish(self, exc_type=None, exc_val=None, exc_tb=None) -> GeneratorPerf:
|
|
111
|
+
total = time.monotonic() - self._start_time
|
|
112
|
+
rt = self._storage.flush_perf()
|
|
113
|
+
self._span_ctx.__exit__(exc_type, exc_val, exc_tb)
|
|
114
|
+
|
|
115
|
+
meta = {}
|
|
116
|
+
if tracing_connector.get().enabled:
|
|
117
|
+
span_context = self._span.get_span_context()
|
|
118
|
+
meta = {
|
|
119
|
+
"span": {
|
|
120
|
+
"trace_id": str(span_context.trace_id),
|
|
121
|
+
"span_id": str(span_context.span_id),
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
self.last_result = GeneratorPerf(total=total, rt=rt, meta=meta)
|
|
126
|
+
return self.last_result
|
|
127
|
+
|
|
128
|
+
def __enter__(self):
|
|
129
|
+
self.start()
|
|
130
|
+
return self
|
|
131
|
+
|
|
132
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
133
|
+
self.finish(exc_type, exc_val, exc_tb)
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
class RunGeneratorResult:
|
|
137
|
+
"""
|
|
138
|
+
Результат запуска run_partial_generators/run_file_generators
|
|
139
|
+
"""
|
|
140
|
+
|
|
141
|
+
def __init__(self):
|
|
142
|
+
self.partial_results: Dict[str, GeneratorPartialResult] = {}
|
|
143
|
+
self.entire_results: Dict[str, GeneratorEntireResult] = {}
|
|
144
|
+
self.json_fragment_results: Dict[str, GeneratorJSONFragmentResult] = {}
|
|
145
|
+
self.ref_track: RefTracker = RefTracker()
|
|
146
|
+
self.ref_matcher: RefMatcher = RefMatcher()
|
|
147
|
+
|
|
148
|
+
def add_partial(self, result: GeneratorPartialResult):
|
|
149
|
+
self.partial_results[result.name] = result
|
|
150
|
+
|
|
151
|
+
def add_entire(self, result: GeneratorEntireResult) -> None:
|
|
152
|
+
# Если есть несколько генераторов на один файл, выбрать тот, что с большим приоритетом
|
|
153
|
+
if result.path:
|
|
154
|
+
if result.path not in self.entire_results or result.prio > self.entire_results[result.path].prio:
|
|
155
|
+
self.entire_results[result.path] = result
|
|
156
|
+
|
|
157
|
+
def add_json_fragment(self, result: GeneratorJSONFragmentResult) -> None:
|
|
158
|
+
self.json_fragment_results[result.name] = result
|
|
159
|
+
|
|
160
|
+
def config_tree(self, safe: bool = False) -> Dict[str, Any]: # OrderedDict
|
|
161
|
+
tree = odict()
|
|
162
|
+
for gr in self.partial_results.values():
|
|
163
|
+
config = gr.safe_config if safe else gr.config
|
|
164
|
+
tree = merge_dicts(tree, config)
|
|
165
|
+
return tree
|
|
166
|
+
|
|
167
|
+
def new_files(self, safe: bool = False) -> Dict[str, Tuple[str, str]]:
|
|
168
|
+
files = {}
|
|
169
|
+
for gr in self.entire_results.values():
|
|
170
|
+
if not safe or gr.is_safe:
|
|
171
|
+
files[gr.path] = (gr.output, gr.reload)
|
|
172
|
+
return files
|
|
173
|
+
|
|
174
|
+
def acl_text(self) -> str:
|
|
175
|
+
return _combine_acl_text(self.partial_results, lambda gr: gr.acl)
|
|
176
|
+
|
|
177
|
+
def acl_safe_text(self) -> str:
|
|
178
|
+
return _combine_acl_text(self.partial_results, lambda gr: gr.acl_safe)
|
|
179
|
+
|
|
180
|
+
def new_json_fragment_files(
|
|
181
|
+
self,
|
|
182
|
+
old_files: Dict[str, Optional[str]],
|
|
183
|
+
safe: bool = False, # pylint: disable=unused-argument
|
|
184
|
+
) -> Dict[str, Tuple[Any, Optional[str]]]:
|
|
185
|
+
# TODO: safe
|
|
186
|
+
files: Dict[str, Tuple[Any, Optional[str]]] = {}
|
|
187
|
+
for generator_result in self.json_fragment_results.values():
|
|
188
|
+
filepath = generator_result.path
|
|
189
|
+
if filepath not in files:
|
|
190
|
+
if old_files.get(filepath) is not None:
|
|
191
|
+
files[filepath] = (old_files[filepath], None)
|
|
192
|
+
else:
|
|
193
|
+
files[filepath] = ({}, None)
|
|
194
|
+
previous_config: Dict[str, Any] = files[filepath][0]
|
|
195
|
+
new_fragment = generator_result.config
|
|
196
|
+
new_config = jsontools.apply_json_fragment(previous_config, new_fragment, generator_result.acl)
|
|
197
|
+
reload_cmd = generator_result.reload
|
|
198
|
+
files[generator_result.path] = (new_config, reload_cmd)
|
|
199
|
+
return files
|
|
200
|
+
|
|
201
|
+
def perf_mesures(self) -> Dict[str, Dict[str, int]]:
|
|
202
|
+
mesures = {}
|
|
203
|
+
for gr in self.partial_results.values():
|
|
204
|
+
mesures[gr.name] = {"total": gr.perf.total, "rt": gr.perf.rt, "meta": gr.perf.meta}
|
|
205
|
+
for gr in self.entire_results.values():
|
|
206
|
+
mesures[gr.name] = {"total": gr.perf.total, "rt": gr.perf.rt, "meta": gr.perf.meta}
|
|
207
|
+
return mesures
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
# =====
|
|
211
|
+
def get_list(args: ShowGeneratorsOptions):
|
|
212
|
+
if args.generators_context is not None:
|
|
213
|
+
os.environ["ANN_GENERATORS_CONTEXT"] = args.generators_context
|
|
214
|
+
return {
|
|
215
|
+
cls.__class__.__name__: {
|
|
216
|
+
"type": cls.TYPE,
|
|
217
|
+
"tags": set(cls.TAGS),
|
|
218
|
+
"description": get_description(cls.__class__),
|
|
219
|
+
}
|
|
220
|
+
for cls in _get_generators(get_context()["generators"], None)
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
def get_description(gen_cls) -> str:
|
|
225
|
+
return textwrap.dedent(" ".join([
|
|
226
|
+
(gen_cls.__doc__ or ""),
|
|
227
|
+
("Disabled. Use '-g %s' to enable" % gen_cls.__name__ if DISABLED_TAG in gen_cls.TAGS else "")
|
|
228
|
+
])).strip()
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
def validate_genselect(gens: GenSelectOptions, all_classes):
|
|
232
|
+
logger = get_logger()
|
|
233
|
+
unknown_err = "Unknown generator alias %s"
|
|
234
|
+
all_aliases = {
|
|
235
|
+
alias
|
|
236
|
+
for cls in all_classes
|
|
237
|
+
for alias in cls.get_aliases()
|
|
238
|
+
}
|
|
239
|
+
for gen_set in (gens.allowed_gens, gens.force_enabled):
|
|
240
|
+
for alias in set(gen_set or ()) - all_aliases:
|
|
241
|
+
logger.error(unknown_err, alias)
|
|
242
|
+
raise Exception(unknown_err % alias)
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
@dataclasses.dataclass
|
|
246
|
+
class Generators:
|
|
247
|
+
"""Collection of various types of generators."""
|
|
248
|
+
|
|
249
|
+
partial: List[PartialGenerator] = dataclasses.field(default_factory=list)
|
|
250
|
+
entire: List[Entire] = dataclasses.field(default_factory=list)
|
|
251
|
+
json_fragment: List[JSONFragment] = dataclasses.field(default_factory=list)
|
|
252
|
+
|
|
253
|
+
|
|
254
|
+
def build_generators(storage, gens: GenSelectOptions, device: Optional[Device] = None) -> Generators:
|
|
255
|
+
"""Return generators that meet the gens filter conditions."""
|
|
256
|
+
if gens.generators_context is not None:
|
|
257
|
+
os.environ["ANN_GENERATORS_CONTEXT"] = gens.generators_context
|
|
258
|
+
all_generators = _get_generators(get_context()["generators"], storage, device)
|
|
259
|
+
validate_genselect(gens, all_generators)
|
|
260
|
+
classes = list(select_generators(gens, all_generators))
|
|
261
|
+
partial = [obj for obj in classes if obj.TYPE == "PARTIAL"]
|
|
262
|
+
entire = [obj for obj in classes if obj.TYPE == "ENTIRE"]
|
|
263
|
+
entire = list(sorted(entire, key=lambda x: x.prio, reverse=True))
|
|
264
|
+
json_fragment = [obj for obj in classes if obj.TYPE == "JSON_FRAGMENT"]
|
|
265
|
+
return Generators(partial=partial, entire=entire, json_fragment=json_fragment)
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
@tracing.function
|
|
269
|
+
def run_partial_initial(device, storage):
|
|
270
|
+
from .common.initial import InitialConfig
|
|
271
|
+
|
|
272
|
+
tracing_connector.get().set_device_attributes(tracing_connector.get().get_current_span(), device)
|
|
273
|
+
|
|
274
|
+
run_args = GeneratorPartialRunArgs(device, storage)
|
|
275
|
+
return run_partial_generators([InitialConfig()], run_args)
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
@tracing.function
|
|
279
|
+
def run_partial_generators(gens: List["PartialGenerator"], run_args: GeneratorPartialRunArgs):
|
|
280
|
+
logger = get_logger(host=run_args.device.hostname)
|
|
281
|
+
tracing_connector.get().set_device_attributes(tracing_connector.get().get_current_span(), run_args.device)
|
|
282
|
+
|
|
283
|
+
ret = RunGeneratorResult()
|
|
284
|
+
if run_args.generators_context is not None:
|
|
285
|
+
os.environ["ANN_GENERATORS_CONTEXT"] = run_args.generators_context
|
|
286
|
+
for gen in _get_ref_generators(get_context()["generators"], run_args.storage):
|
|
287
|
+
ret.ref_matcher.add(gen.ref(run_args.device), gen.__class__)
|
|
288
|
+
|
|
289
|
+
logger.debug("Generating selected PARTIALs ...")
|
|
290
|
+
|
|
291
|
+
for gen in gens:
|
|
292
|
+
try:
|
|
293
|
+
result = _run_partial_generator(gen, run_args)
|
|
294
|
+
except NotSupportedDevice:
|
|
295
|
+
logger.info("generator %s is not supported for this device, skip generator for this devices!", gen)
|
|
296
|
+
continue
|
|
297
|
+
|
|
298
|
+
if not result:
|
|
299
|
+
continue
|
|
300
|
+
|
|
301
|
+
config = result.safe_config if run_args.use_acl_safe else result.config
|
|
302
|
+
|
|
303
|
+
ref_match = ret.ref_matcher.match(config)
|
|
304
|
+
for gen_cls, groups in ref_match:
|
|
305
|
+
gens.append(gen_cls(run_args.storage, groups))
|
|
306
|
+
ret.ref_track.add(gen.__class__, gen_cls)
|
|
307
|
+
|
|
308
|
+
ret.ref_track.config(gen.__class__, config)
|
|
309
|
+
ret.add_partial(result)
|
|
310
|
+
|
|
311
|
+
return ret
|
|
312
|
+
|
|
313
|
+
|
|
314
|
+
@tracing.function(name="run_partial_generator")
|
|
315
|
+
def _run_partial_generator(gen: "PartialGenerator", run_args: GeneratorPartialRunArgs) -> GeneratorPartialResult:
|
|
316
|
+
logger = get_logger(generator=_make_generator_ctx(gen))
|
|
317
|
+
device = run_args.device
|
|
318
|
+
output = ""
|
|
319
|
+
config = odict()
|
|
320
|
+
safe_config = odict()
|
|
321
|
+
|
|
322
|
+
span = tracing_connector.get().get_current_span()
|
|
323
|
+
if span:
|
|
324
|
+
tracing_connector.get().set_device_attributes(span, run_args.device)
|
|
325
|
+
tracing_connector.get().set_dimensions_attributes(span, gen, run_args.device)
|
|
326
|
+
span.set_attributes({
|
|
327
|
+
"use_acl": run_args.use_acl,
|
|
328
|
+
"use_acl_safe": run_args.use_acl_safe,
|
|
329
|
+
"generators_context": str(run_args.generators_context),
|
|
330
|
+
})
|
|
331
|
+
|
|
332
|
+
with GeneratorPerfMesurer(gen, run_args.storage, run_args=run_args) as pm:
|
|
333
|
+
if not run_args.no_new:
|
|
334
|
+
if gen.get_user_runner(device):
|
|
335
|
+
logger.info("Generating PARTIAL ...")
|
|
336
|
+
try:
|
|
337
|
+
output = gen(device, run_args.annotate)
|
|
338
|
+
except NotSupportedDevice:
|
|
339
|
+
# это исключение нужно передать выше в оригинальном виде
|
|
340
|
+
raise
|
|
341
|
+
except Exception as err:
|
|
342
|
+
filename, lineno = gen.get_running_line()
|
|
343
|
+
logger.exception("Generator error in file '%s:%i'", filename, lineno)
|
|
344
|
+
raise GeneratorError(f"{gen} on {device}") from err
|
|
345
|
+
|
|
346
|
+
fmtr = tabparser.make_formatter(device.hw)
|
|
347
|
+
try:
|
|
348
|
+
config = tabparser.parse_to_tree(text=output, splitter=fmtr.split)
|
|
349
|
+
except tabparser.ParserError as err:
|
|
350
|
+
logger.exception("Parser error")
|
|
351
|
+
raise GeneratorError from err
|
|
352
|
+
|
|
353
|
+
acl = gen.acl(device) or ""
|
|
354
|
+
rules = compile_acl_text(textwrap.dedent(acl), device.hw.vendor)
|
|
355
|
+
acl_safe = gen.acl_safe(device) or ""
|
|
356
|
+
safe_rules = compile_acl_text(textwrap.dedent(acl_safe), device.hw.vendor)
|
|
357
|
+
|
|
358
|
+
if run_args.use_acl:
|
|
359
|
+
try:
|
|
360
|
+
with tracing_connector.get().start_as_current_span("apply_acl", tracer_name=__name__, min_duration="0.01") as acl_span:
|
|
361
|
+
tracing_connector.get().set_device_attributes(acl_span, run_args.device)
|
|
362
|
+
config = patching.apply_acl(
|
|
363
|
+
config=config,
|
|
364
|
+
rules=rules,
|
|
365
|
+
fatal_acl=True,
|
|
366
|
+
with_annotations=run_args.annotate,
|
|
367
|
+
)
|
|
368
|
+
if run_args.use_acl_safe:
|
|
369
|
+
with tracing_connector.get().start_as_current_span(
|
|
370
|
+
"apply_acl_safe",
|
|
371
|
+
tracer_name=__name__,
|
|
372
|
+
min_duration="0.01"
|
|
373
|
+
) as acl_safe_span:
|
|
374
|
+
tracing_connector.get().set_device_attributes(acl_safe_span, run_args.device)
|
|
375
|
+
safe_config = patching.apply_acl(
|
|
376
|
+
config=config,
|
|
377
|
+
rules=safe_rules,
|
|
378
|
+
fatal_acl=False,
|
|
379
|
+
with_annotations=run_args.annotate,
|
|
380
|
+
)
|
|
381
|
+
except patching.AclError as err:
|
|
382
|
+
logger.error("ACL error: generator is not allowed to yield this command: %s", err)
|
|
383
|
+
raise GeneratorError from err
|
|
384
|
+
except NotImplementedError as err:
|
|
385
|
+
logger.error(str(err))
|
|
386
|
+
raise GeneratorError from err
|
|
387
|
+
|
|
388
|
+
return GeneratorPartialResult(
|
|
389
|
+
name=gen.__class__.__name__,
|
|
390
|
+
tags=gen.TAGS,
|
|
391
|
+
output=output,
|
|
392
|
+
acl=acl,
|
|
393
|
+
acl_rules=rules,
|
|
394
|
+
acl_safe=acl_safe,
|
|
395
|
+
acl_safe_rules=safe_rules,
|
|
396
|
+
config=config,
|
|
397
|
+
safe_config=safe_config,
|
|
398
|
+
perf=pm.last_result,
|
|
399
|
+
)
|
|
400
|
+
|
|
401
|
+
|
|
402
|
+
@tracing.function
|
|
403
|
+
def check_entire_generators_required_packages(gens, device_packages: FrozenSet[str]) -> List[str]:
|
|
404
|
+
errors: List[str] = []
|
|
405
|
+
for gen in gens:
|
|
406
|
+
if not gen.REQUIRED_PACKAGES.issubset(device_packages):
|
|
407
|
+
missing = gen.REQUIRED_PACKAGES - device_packages
|
|
408
|
+
missing_str = ", ".join("`{}'".format(pkg) for pkg in sorted(missing))
|
|
409
|
+
if len(missing) == 1:
|
|
410
|
+
errors.append("missing package {} required for {}".format(missing_str, gen))
|
|
411
|
+
else:
|
|
412
|
+
errors.append("missing packages {} required for {}".format(missing_str, gen))
|
|
413
|
+
return errors
|
|
414
|
+
|
|
415
|
+
|
|
416
|
+
@tracing.function
|
|
417
|
+
def run_file_generators(
|
|
418
|
+
gens: Iterable[Union["JSONFragment", "Entire"]],
|
|
419
|
+
device: "Device",
|
|
420
|
+
storage: Storage,
|
|
421
|
+
) -> RunGeneratorResult:
|
|
422
|
+
"""Run generators that generate files or file parts."""
|
|
423
|
+
ret = RunGeneratorResult()
|
|
424
|
+
logger = get_logger(host=device.hostname)
|
|
425
|
+
logger.debug("Generating selected ENTIREs and JSON_FRAGMENTs ...")
|
|
426
|
+
for gen in gens:
|
|
427
|
+
if gen.__class__.TYPE == "ENTIRE":
|
|
428
|
+
run_generator_fn = _run_entire_generator
|
|
429
|
+
add_result_fn = ret.add_entire
|
|
430
|
+
elif gen.__class__.TYPE == "JSON_FRAGMENT":
|
|
431
|
+
run_generator_fn = _run_json_fragment_generator
|
|
432
|
+
add_result_fn = ret.add_json_fragment
|
|
433
|
+
else:
|
|
434
|
+
raise RuntimeError(f"Unknown generator class type: cls={gen.__class__} TYPE={gen.__class__.TYPE}")
|
|
435
|
+
try:
|
|
436
|
+
result = run_generator_fn(gen, device, storage)
|
|
437
|
+
except NotSupportedDevice:
|
|
438
|
+
logger.info("generator %s is not supported for this device", gen)
|
|
439
|
+
continue
|
|
440
|
+
if result:
|
|
441
|
+
add_result_fn(result)
|
|
442
|
+
|
|
443
|
+
return ret
|
|
444
|
+
|
|
445
|
+
|
|
446
|
+
@tracing.function(min_duration="0.5")
|
|
447
|
+
def _run_entire_generator(gen: "Entire", device: "Device", storage: Storage) -> Optional[GeneratorResult]:
|
|
448
|
+
span = tracing_connector.get().get_current_span()
|
|
449
|
+
if span:
|
|
450
|
+
tracing_connector.get().set_device_attributes(span, device)
|
|
451
|
+
tracing_connector.get().set_dimensions_attributes(span, gen, device)
|
|
452
|
+
|
|
453
|
+
logger = get_logger(generator=_make_generator_ctx(gen))
|
|
454
|
+
path = gen.path(device)
|
|
455
|
+
if path:
|
|
456
|
+
logger.info("Generating ENTIRE ...")
|
|
457
|
+
|
|
458
|
+
with GeneratorPerfMesurer(gen, storage, trace_min_duration="0.5") as pm:
|
|
459
|
+
output = gen(device)
|
|
460
|
+
|
|
461
|
+
reload_cmds = gen.get_reload_cmds(device)
|
|
462
|
+
prio = gen.prio
|
|
463
|
+
|
|
464
|
+
return GeneratorEntireResult(
|
|
465
|
+
name=gen.__class__.__name__,
|
|
466
|
+
tags=gen.TAGS,
|
|
467
|
+
path=path,
|
|
468
|
+
output=output,
|
|
469
|
+
reload=reload_cmds,
|
|
470
|
+
prio=prio,
|
|
471
|
+
perf=pm.last_result,
|
|
472
|
+
is_safe=gen.is_safe(device),
|
|
473
|
+
)
|
|
474
|
+
return None
|
|
475
|
+
|
|
476
|
+
|
|
477
|
+
def _make_generator_ctx(gen):
|
|
478
|
+
return "%s.[%s]" % (gen.__module__, gen.__class__.__name__)
|
|
479
|
+
|
|
480
|
+
|
|
481
|
+
def _run_json_fragment_generator(
|
|
482
|
+
gen: "JSONFragment",
|
|
483
|
+
device: "Device",
|
|
484
|
+
storage: Storage,
|
|
485
|
+
) -> Optional[GeneratorResult]:
|
|
486
|
+
logger = get_logger(generator=_make_generator_ctx(gen))
|
|
487
|
+
path = gen.path(device)
|
|
488
|
+
|
|
489
|
+
acl_item_or_list_of_items = gen.acl(device)
|
|
490
|
+
if isinstance(acl_item_or_list_of_items, list):
|
|
491
|
+
acl = acl_item_or_list_of_items
|
|
492
|
+
else:
|
|
493
|
+
acl = [acl_item_or_list_of_items]
|
|
494
|
+
|
|
495
|
+
if path:
|
|
496
|
+
logger.info("Generating JSON_FRAGMENT ...")
|
|
497
|
+
|
|
498
|
+
with GeneratorPerfMesurer(gen, storage) as pm:
|
|
499
|
+
config = gen(device)
|
|
500
|
+
|
|
501
|
+
reload_cmds = gen.get_reload_cmds(device)
|
|
502
|
+
|
|
503
|
+
return GeneratorJSONFragmentResult(
|
|
504
|
+
name=gen.__class__.__name__,
|
|
505
|
+
tags=gen.TAGS,
|
|
506
|
+
path=path,
|
|
507
|
+
acl=acl,
|
|
508
|
+
config=config,
|
|
509
|
+
reload=reload_cmds,
|
|
510
|
+
perf=pm.last_result,
|
|
511
|
+
is_safe=gen.is_safe(device),
|
|
512
|
+
)
|
|
513
|
+
return None
|
|
514
|
+
|
|
515
|
+
|
|
516
|
+
def _get_generators(module_paths: Union[List[str], dict], storage, device=None):
|
|
517
|
+
if isinstance(module_paths, dict):
|
|
518
|
+
if device is None:
|
|
519
|
+
module_paths = module_paths.get("default")
|
|
520
|
+
else:
|
|
521
|
+
modules = []
|
|
522
|
+
seen = set()
|
|
523
|
+
for prop, prop_modules in module_paths.get("per_device_property", {}).items():
|
|
524
|
+
if getattr(device, prop, False) is True:
|
|
525
|
+
for module in prop_modules:
|
|
526
|
+
if module not in seen:
|
|
527
|
+
modules.append(module)
|
|
528
|
+
seen.add(module)
|
|
529
|
+
module_paths = modules or module_paths.get("default")
|
|
530
|
+
res_generators = []
|
|
531
|
+
for module_path in module_paths:
|
|
532
|
+
module = importlib.import_module(module_path)
|
|
533
|
+
if hasattr(module, "get_generators"):
|
|
534
|
+
generators: List[BaseGenerator] = module.get_generators(storage)
|
|
535
|
+
if device is None:
|
|
536
|
+
res_generators += generators
|
|
537
|
+
else:
|
|
538
|
+
logger = get_logger()
|
|
539
|
+
for gen in generators:
|
|
540
|
+
if gen.supports_vendor(device.hw.vendor):
|
|
541
|
+
res_generators.append(gen)
|
|
542
|
+
else:
|
|
543
|
+
logger.info("generator %s does not support device vendor %s, skipping", gen, device.hw.vendor)
|
|
544
|
+
return res_generators
|
|
545
|
+
|
|
546
|
+
|
|
547
|
+
def _get_ref_generators(module_paths: List[str], storage):
|
|
548
|
+
if isinstance(module_paths, dict):
|
|
549
|
+
module_paths = module_paths.get("default")
|
|
550
|
+
res_generators = []
|
|
551
|
+
for module_path in module_paths:
|
|
552
|
+
module = importlib.import_module(module_path)
|
|
553
|
+
if hasattr(module, "get_ref_generators"):
|
|
554
|
+
res_generators += module.get_ref_generators(storage)
|
|
555
|
+
return res_generators
|
|
556
|
+
|
|
557
|
+
|
|
558
|
+
class InvalidValueFromGenerator(ValueError):
|
|
559
|
+
pass
|
|
560
|
+
|
|
561
|
+
|
|
562
|
+
class GenStringable(abc.ABC):
|
|
563
|
+
@abc.abstractmethod
|
|
564
|
+
def gen_str(self) -> str:
|
|
565
|
+
pass
|
|
566
|
+
|
|
567
|
+
|
|
568
|
+
ParamsList = tabparser.JuniperList
|
|
569
|
+
|
|
570
|
+
|
|
571
|
+
def _filter_str(value: Union[str, int, float, tabparser.JuniperList, ParamsList, GenStringable]):
|
|
572
|
+
if isinstance(value, (
|
|
573
|
+
str,
|
|
574
|
+
int,
|
|
575
|
+
float,
|
|
576
|
+
tabparser.JuniperList,
|
|
577
|
+
ParamsList,
|
|
578
|
+
)):
|
|
579
|
+
return str(value)
|
|
580
|
+
|
|
581
|
+
if hasattr(value, "gen_str") and callable(value.gen_str):
|
|
582
|
+
return value.gen_str()
|
|
583
|
+
|
|
584
|
+
raise InvalidValueFromGenerator("Invalid yield type: %s(%s)" % (type(value).__name__, value))
|
|
585
|
+
|
|
586
|
+
|
|
587
|
+
# =====
|
|
588
|
+
class BaseGenerator:
|
|
589
|
+
TYPE: str
|
|
590
|
+
TAGS: list[str]
|
|
591
|
+
|
|
592
|
+
def supports_vendor(self, vendor: str) -> bool: # pylint: disable=unused-argument
|
|
593
|
+
return True
|
|
594
|
+
|
|
595
|
+
|
|
596
|
+
class TreeGenerator(BaseGenerator):
|
|
597
|
+
def __init__(self, indent=" "):
|
|
598
|
+
self._indents = []
|
|
599
|
+
self._rows = []
|
|
600
|
+
self._block_path = []
|
|
601
|
+
self._indent = indent
|
|
602
|
+
|
|
603
|
+
@tracing.contextmanager(min_duration="0.1")
|
|
604
|
+
@contextlib.contextmanager
|
|
605
|
+
def block(self, *tokens, indent=None):
|
|
606
|
+
span = tracing_connector.get().get_current_span()
|
|
607
|
+
if span:
|
|
608
|
+
span.set_attribute("tokens", " ".join(map(str, tokens)))
|
|
609
|
+
|
|
610
|
+
indent = self._indent if indent is None else indent
|
|
611
|
+
block = " ".join(map(_filter_str, tokens))
|
|
612
|
+
self._block_path.append(block)
|
|
613
|
+
self._append_text(block)
|
|
614
|
+
self._indents.append(indent)
|
|
615
|
+
yield
|
|
616
|
+
self._indents.pop(-1)
|
|
617
|
+
self._block_path.pop(-1)
|
|
618
|
+
|
|
619
|
+
@contextlib.contextmanager
|
|
620
|
+
def block_if(self, *tokens, condition=DefaultBlockIfCondition):
|
|
621
|
+
if condition is DefaultBlockIfCondition:
|
|
622
|
+
condition = (None not in tokens and "" not in tokens)
|
|
623
|
+
if condition:
|
|
624
|
+
with self.block(*tokens):
|
|
625
|
+
yield
|
|
626
|
+
return
|
|
627
|
+
yield
|
|
628
|
+
|
|
629
|
+
@contextlib.contextmanager
|
|
630
|
+
def multiblock(self, *blocks):
|
|
631
|
+
if blocks:
|
|
632
|
+
blk = blocks[0]
|
|
633
|
+
tokens = blk if isinstance(blk, (list, tuple)) else [blk]
|
|
634
|
+
with self.block(*tokens):
|
|
635
|
+
with self.multiblock(*blocks[1:]):
|
|
636
|
+
yield
|
|
637
|
+
return
|
|
638
|
+
yield
|
|
639
|
+
|
|
640
|
+
@contextlib.contextmanager
|
|
641
|
+
def multiblock_if(self, *blocks, condition=DefaultBlockIfCondition):
|
|
642
|
+
if condition is DefaultBlockIfCondition:
|
|
643
|
+
condition = (None not in blocks)
|
|
644
|
+
if condition:
|
|
645
|
+
if blocks:
|
|
646
|
+
blk = blocks[0]
|
|
647
|
+
tokens = blk if isinstance(blk, (list, tuple)) else [blk]
|
|
648
|
+
with self.block(*tokens):
|
|
649
|
+
with self.multiblock(*blocks[1:]):
|
|
650
|
+
yield
|
|
651
|
+
return
|
|
652
|
+
yield
|
|
653
|
+
|
|
654
|
+
# ===
|
|
655
|
+
def _append_text(self, text):
|
|
656
|
+
self._append_text_cb(text)
|
|
657
|
+
|
|
658
|
+
def _append_text_cb(self, text, row_cb=None):
|
|
659
|
+
for row in _split_and_strip(text):
|
|
660
|
+
if row_cb:
|
|
661
|
+
row = row_cb(row)
|
|
662
|
+
self._rows.append("".join(self._indents) + row)
|
|
663
|
+
|
|
664
|
+
|
|
665
|
+
class TextGenerator(TreeGenerator):
|
|
666
|
+
def __add__(self, line):
|
|
667
|
+
self._append_text(line)
|
|
668
|
+
return self
|
|
669
|
+
|
|
670
|
+
def __iter__(self):
|
|
671
|
+
yield from self._rows
|
|
672
|
+
|
|
673
|
+
|
|
674
|
+
class PartialGenerator(TreeGenerator):
|
|
675
|
+
TYPE = "PARTIAL"
|
|
676
|
+
TAGS: List[str] = []
|
|
677
|
+
|
|
678
|
+
def __init__(self, storage):
|
|
679
|
+
super().__init__()
|
|
680
|
+
self.storage = storage
|
|
681
|
+
self._annotate = False
|
|
682
|
+
self._running_gen = None
|
|
683
|
+
self._annotations = []
|
|
684
|
+
self._annotation_module = self.__class__.__module__ or ""
|
|
685
|
+
|
|
686
|
+
def supports_vendor(self, vendor: str) -> bool:
|
|
687
|
+
if self.__class__.run is PartialGenerator.run:
|
|
688
|
+
return hasattr(self, f"run_{vendor}")
|
|
689
|
+
else:
|
|
690
|
+
return True
|
|
691
|
+
|
|
692
|
+
def acl(self, device):
|
|
693
|
+
if hasattr(self, "acl_" + device.hw.vendor):
|
|
694
|
+
return getattr(self, "acl_" + device.hw.vendor)(device)
|
|
695
|
+
|
|
696
|
+
def acl_safe(self, device):
|
|
697
|
+
if hasattr(self, "acl_safe_" + device.hw.vendor):
|
|
698
|
+
return getattr(self, "acl_safe_" + device.hw.vendor)(device)
|
|
699
|
+
|
|
700
|
+
def run(self, device) -> Iterable[Union[str, tuple]]:
|
|
701
|
+
if hasattr(self, "run_" + device.hw.vendor):
|
|
702
|
+
return getattr(self, "run_" + device.hw.vendor)(device)
|
|
703
|
+
return iter(())
|
|
704
|
+
|
|
705
|
+
def get_user_runner(self, device):
|
|
706
|
+
if self.__class__.run is not PartialGenerator.run:
|
|
707
|
+
return self.run
|
|
708
|
+
elif hasattr(self, "run_" + device.hw.vendor):
|
|
709
|
+
return getattr(self, "run_" + device.hw.vendor)
|
|
710
|
+
return None
|
|
711
|
+
|
|
712
|
+
# =====
|
|
713
|
+
|
|
714
|
+
@classmethod
|
|
715
|
+
def get_aliases(cls) -> Set[str]:
|
|
716
|
+
return {cls.__name__, *cls.TAGS}
|
|
717
|
+
|
|
718
|
+
def __call__(self, device, annotate=False):
|
|
719
|
+
self._indents = []
|
|
720
|
+
self._rows = []
|
|
721
|
+
self._running_gen = self.run(device)
|
|
722
|
+
self._annotate = annotate
|
|
723
|
+
|
|
724
|
+
if annotate and self.__class__.__module__:
|
|
725
|
+
self._annotation_module = ".".join(self.__class__.__module__.split(".")[-2:])
|
|
726
|
+
|
|
727
|
+
for text in self._running_gen:
|
|
728
|
+
if isinstance(text, tuple):
|
|
729
|
+
text = " ".join(map(_filter_str, flatten(text)))
|
|
730
|
+
else:
|
|
731
|
+
text = _filter_str(text)
|
|
732
|
+
self._append_text(text)
|
|
733
|
+
|
|
734
|
+
for row in self._rows:
|
|
735
|
+
assert re.search(r"\bNone\b", row) is None, "Found 'None' in yield result: %s" % (row)
|
|
736
|
+
if annotate:
|
|
737
|
+
generated_rows = (add_annotation(x, y) for (x, y) in zip(self._rows, self._annotations))
|
|
738
|
+
else:
|
|
739
|
+
generated_rows = self._rows
|
|
740
|
+
return "\n".join(generated_rows) + "\n"
|
|
741
|
+
|
|
742
|
+
def _append_text(self, text):
|
|
743
|
+
def annotation_cb(row):
|
|
744
|
+
annotation = "%s:%d" % self.get_running_line()
|
|
745
|
+
self._annotations.append(annotation)
|
|
746
|
+
return row
|
|
747
|
+
|
|
748
|
+
self._append_text_cb(
|
|
749
|
+
text,
|
|
750
|
+
annotation_cb if self._annotate else None
|
|
751
|
+
)
|
|
752
|
+
|
|
753
|
+
def get_running_line(self):
|
|
754
|
+
if not self._running_gen or not self._running_gen.gi_frame:
|
|
755
|
+
return (repr(self._running_gen), -1)
|
|
756
|
+
return self._annotation_module, self._running_gen.gi_frame.f_lineno
|
|
757
|
+
|
|
758
|
+
@classmethod
|
|
759
|
+
def literal(cls, item):
|
|
760
|
+
return '"{}"'.format(item)
|
|
761
|
+
|
|
762
|
+
def __repr__(self):
|
|
763
|
+
return "<%s>" % self.__class__.__name__
|
|
764
|
+
|
|
765
|
+
|
|
766
|
+
class RefGenerator(PartialGenerator):
|
|
767
|
+
def __init__(self, storage, groups=None):
|
|
768
|
+
super().__init__(storage)
|
|
769
|
+
self.groups = groups
|
|
770
|
+
|
|
771
|
+
def ref(self, device):
|
|
772
|
+
if hasattr(self, "ref_" + device.hw.vendor):
|
|
773
|
+
return getattr(self, "ref_" + device.hw.vendor)(device)
|
|
774
|
+
return ""
|
|
775
|
+
|
|
776
|
+
|
|
777
|
+
class Entire(BaseGenerator):
|
|
778
|
+
TYPE = "ENTIRE"
|
|
779
|
+
TAGS: List[str] = []
|
|
780
|
+
REQUIRED_PACKAGES: FrozenSet[str] = frozenset()
|
|
781
|
+
|
|
782
|
+
def __init__(self, storage):
|
|
783
|
+
self.storage = storage
|
|
784
|
+
# между генераторами для одного и того же path - выбирается тот что больше
|
|
785
|
+
if not hasattr(self, "prio"):
|
|
786
|
+
self.prio = 100
|
|
787
|
+
self.__device = None
|
|
788
|
+
|
|
789
|
+
def run(self, device) -> Union[None, str, Iterable[Union[str, tuple]]]:
|
|
790
|
+
raise NotImplementedError
|
|
791
|
+
|
|
792
|
+
def reload(self, device) -> Optional[str]: # pylint: disable=unused-argument
|
|
793
|
+
return
|
|
794
|
+
|
|
795
|
+
def get_reload_cmds(self, device) -> str:
|
|
796
|
+
ret = self.reload(device) or ""
|
|
797
|
+
path = self.path(device)
|
|
798
|
+
if path and device.hw.PC and device.hw.soft.startswith(("Cumulus", "SwitchDev")):
|
|
799
|
+
parts = []
|
|
800
|
+
if ret:
|
|
801
|
+
parts.append(ret)
|
|
802
|
+
parts.append("/usr/bin/etckeeper commitreload %s" % path)
|
|
803
|
+
return "\n".join(parts)
|
|
804
|
+
return ret
|
|
805
|
+
|
|
806
|
+
def path(self, device) -> Optional[str]:
|
|
807
|
+
raise NotImplementedError("Required PATH for ENTIRE generator")
|
|
808
|
+
|
|
809
|
+
# pylint: disable=unused-argument
|
|
810
|
+
def is_safe(self, device) -> bool:
|
|
811
|
+
"""Output gen results when --acl-safe flag is used"""
|
|
812
|
+
return False
|
|
813
|
+
|
|
814
|
+
def read(self, path) -> str:
|
|
815
|
+
return pkgutil.get_data(__name__, path).decode()
|
|
816
|
+
|
|
817
|
+
def mako(self, text, **kwargs) -> str:
|
|
818
|
+
return mako_render(text, dedent=True, device=self.__device, **kwargs)
|
|
819
|
+
|
|
820
|
+
def jinja(self, text, **kwargs) -> str:
|
|
821
|
+
return jinja_render(text, dedent=True, device=self.__device, **kwargs)
|
|
822
|
+
|
|
823
|
+
# =====
|
|
824
|
+
|
|
825
|
+
@classmethod
|
|
826
|
+
def get_aliases(cls) -> Set[str]:
|
|
827
|
+
return {cls.__name__, *cls.TAGS}
|
|
828
|
+
|
|
829
|
+
def __call__(self, device):
|
|
830
|
+
self.__device = device
|
|
831
|
+
parts = []
|
|
832
|
+
run_res = self.run(device)
|
|
833
|
+
if isinstance(run_res, str):
|
|
834
|
+
run_res = (run_res,)
|
|
835
|
+
if run_res is None or not isinstance(run_res, (tuple, types.GeneratorType)):
|
|
836
|
+
raise Exception("generator %s returns %s" % (self.__class__.__name__, type(run_res)))
|
|
837
|
+
for text in run_res:
|
|
838
|
+
if isinstance(text, tuple):
|
|
839
|
+
text = " ".join(map(_filter_str, flatten(text)))
|
|
840
|
+
assert re.search(r"\bNone\b", text) is None, "Found 'None' in yield result: %s" % text
|
|
841
|
+
parts.append(text)
|
|
842
|
+
return "\n".join(parts)
|
|
843
|
+
|
|
844
|
+
|
|
845
|
+
def select_generators(gens: GenSelectOptions, classes: Iterable[BaseGenerator]):
|
|
846
|
+
def contains(obj, where):
|
|
847
|
+
if where:
|
|
848
|
+
return obj.get_aliases().intersection(where)
|
|
849
|
+
return False
|
|
850
|
+
|
|
851
|
+
def has(cls, what):
|
|
852
|
+
return what in cls.TAGS
|
|
853
|
+
|
|
854
|
+
flts = [lambda c: not isinstance(c, RefGenerator)]
|
|
855
|
+
if gens.allowed_gens:
|
|
856
|
+
flts.append(lambda c: contains(c, gens.allowed_gens))
|
|
857
|
+
elif gens.force_enabled:
|
|
858
|
+
flts.append(lambda c: not has(c, DISABLED_TAG) or contains(c, gens.force_enabled))
|
|
859
|
+
elif not gens.ignore_disabled:
|
|
860
|
+
flts.append(lambda c: not has(c, DISABLED_TAG))
|
|
861
|
+
|
|
862
|
+
if gens.excluded_gens:
|
|
863
|
+
flts.append(lambda c: not contains(c, gens.excluded_gens))
|
|
864
|
+
|
|
865
|
+
return filter(lambda x: all(f(x) for f in flts), classes)
|
|
866
|
+
|
|
867
|
+
|
|
868
|
+
def _split_and_strip(text):
|
|
869
|
+
if "\n" in text:
|
|
870
|
+
rows = textwrap.dedent(text).strip().split("\n")
|
|
871
|
+
else:
|
|
872
|
+
rows = [text]
|
|
873
|
+
return rows
|
|
874
|
+
|
|
875
|
+
|
|
876
|
+
def _combine_acl_text(
|
|
877
|
+
partial_results: Dict[str, GeneratorPartialResult],
|
|
878
|
+
acl_getter: Callable[[GeneratorPartialResult], str]
|
|
879
|
+
) -> str:
|
|
880
|
+
acl_text = ""
|
|
881
|
+
for gr in partial_results.values():
|
|
882
|
+
for line in textwrap.dedent(acl_getter(gr)).split("\n"):
|
|
883
|
+
if line and not line.isspace():
|
|
884
|
+
acl_text += line.rstrip()
|
|
885
|
+
acl_text += fr" %generator_names={gr.name}"
|
|
886
|
+
acl_text += "\n"
|
|
887
|
+
return acl_text
|
|
888
|
+
|
|
889
|
+
|
|
890
|
+
class JSONFragment(TreeGenerator):
|
|
891
|
+
"""Generates parts of JSON config file."""
|
|
892
|
+
|
|
893
|
+
TYPE = "JSON_FRAGMENT"
|
|
894
|
+
TAGS: List[str] = []
|
|
895
|
+
|
|
896
|
+
def __init__(self, storage: Storage):
|
|
897
|
+
super().__init__()
|
|
898
|
+
self.storage = storage
|
|
899
|
+
self._json_config: Dict[str, Any] = {}
|
|
900
|
+
self._config_pointer: List[str] = []
|
|
901
|
+
|
|
902
|
+
def path(self, device: Device) -> Optional[str]:
|
|
903
|
+
raise NotImplementedError("Required PATH for JSON_FRAGMENT generator")
|
|
904
|
+
|
|
905
|
+
@classmethod
|
|
906
|
+
def get_aliases(cls) -> Set[str]:
|
|
907
|
+
return {cls.__name__, *cls.TAGS}
|
|
908
|
+
|
|
909
|
+
def acl(self, device: Device) -> Union[str, List[str]]:
|
|
910
|
+
"""
|
|
911
|
+
Restrict the generator to a specified ACL using JSON Pointer syntax.
|
|
912
|
+
|
|
913
|
+
Expected ACL to be a list of strings, but a single string is also allowed.
|
|
914
|
+
"""
|
|
915
|
+
raise NotImplementedError("Required ACL for JSON_FRAGMENT generator")
|
|
916
|
+
|
|
917
|
+
def run(self, device: Device):
|
|
918
|
+
raise NotImplementedError
|
|
919
|
+
|
|
920
|
+
# pylint: disable=unused-argument
|
|
921
|
+
def is_safe(self, device: Device) -> bool:
|
|
922
|
+
"""Output gen results when --acl-safe flag is used"""
|
|
923
|
+
return False
|
|
924
|
+
|
|
925
|
+
def get_reload_cmds(self, device: Device) -> str:
|
|
926
|
+
ret = self.reload(device) or ""
|
|
927
|
+
return ret
|
|
928
|
+
|
|
929
|
+
def reload(self, device) -> Optional[str]:
|
|
930
|
+
raise NotImplementedError
|
|
931
|
+
|
|
932
|
+
@contextlib.contextmanager
|
|
933
|
+
def block(self, *tokens, indent=None): # pylint: disable=unused-argument
|
|
934
|
+
block_str = "".join(map(_filter_str, tokens))
|
|
935
|
+
self._config_pointer.append(block_str)
|
|
936
|
+
try:
|
|
937
|
+
yield
|
|
938
|
+
finally:
|
|
939
|
+
self._config_pointer.pop()
|
|
940
|
+
|
|
941
|
+
@contextlib.contextmanager
|
|
942
|
+
def block_piped(self, *tokens, indent=None): # pylint: disable=unused-argument
|
|
943
|
+
block_str = "|".join(map(_filter_str, tokens))
|
|
944
|
+
self._config_pointer.append(block_str)
|
|
945
|
+
try:
|
|
946
|
+
yield
|
|
947
|
+
finally:
|
|
948
|
+
self._config_pointer.pop()
|
|
949
|
+
|
|
950
|
+
def __call__(self, device: Device, annotate: bool = False):
|
|
951
|
+
for cfg_fragment in self.run(device):
|
|
952
|
+
self._set_or_replace_dict(self._config_pointer, cfg_fragment)
|
|
953
|
+
return self._json_config
|
|
954
|
+
|
|
955
|
+
def _set_or_replace_dict(self, pointer, value):
|
|
956
|
+
if not pointer:
|
|
957
|
+
if self._json_config == {}:
|
|
958
|
+
self._json_config = value
|
|
959
|
+
else:
|
|
960
|
+
self._json_config = [self._json_config, value]
|
|
961
|
+
else:
|
|
962
|
+
self._set_dict(self._json_config, pointer, value)
|
|
963
|
+
|
|
964
|
+
@classmethod
|
|
965
|
+
def _to_str(cls, value: Any) -> str:
|
|
966
|
+
if isinstance(value, str):
|
|
967
|
+
return value
|
|
968
|
+
elif isinstance(value, list):
|
|
969
|
+
return [cls._to_str(x) for x in value]
|
|
970
|
+
elif isinstance(value, dict):
|
|
971
|
+
for k, v in value.items():
|
|
972
|
+
value[k] = cls._to_str(v)
|
|
973
|
+
return value
|
|
974
|
+
return str(value)
|
|
975
|
+
|
|
976
|
+
@classmethod
|
|
977
|
+
def _set_dict(cls, cfg, pointer, value):
|
|
978
|
+
# pointer has at least one key
|
|
979
|
+
if len(pointer) == 1:
|
|
980
|
+
if pointer[0] in cfg:
|
|
981
|
+
cfg[pointer[0]] = [cfg[pointer[0]], cls._to_str(value)]
|
|
982
|
+
else:
|
|
983
|
+
cfg[pointer[0]] = cls._to_str(value)
|
|
984
|
+
else:
|
|
985
|
+
if pointer[0] not in cfg:
|
|
986
|
+
cfg[pointer[0]] = {}
|
|
987
|
+
cls._set_dict(cfg[pointer[0]], pointer[1:], cls._to_str(value))
|