annet 0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of annet might be problematic. Click here for more details.
- annet/__init__.py +61 -0
- annet/adapters/__init__.py +0 -0
- annet/adapters/netbox/__init__.py +0 -0
- annet/adapters/netbox/common/__init__.py +0 -0
- annet/adapters/netbox/common/client.py +87 -0
- annet/adapters/netbox/common/manufacturer.py +62 -0
- annet/adapters/netbox/common/models.py +105 -0
- annet/adapters/netbox/common/query.py +23 -0
- annet/adapters/netbox/common/status_client.py +25 -0
- annet/adapters/netbox/common/storage_opts.py +14 -0
- annet/adapters/netbox/provider.py +34 -0
- annet/adapters/netbox/v24/__init__.py +0 -0
- annet/adapters/netbox/v24/api_models.py +73 -0
- annet/adapters/netbox/v24/client.py +59 -0
- annet/adapters/netbox/v24/storage.py +196 -0
- annet/adapters/netbox/v37/__init__.py +0 -0
- annet/adapters/netbox/v37/api_models.py +38 -0
- annet/adapters/netbox/v37/client.py +62 -0
- annet/adapters/netbox/v37/storage.py +149 -0
- annet/annet.py +25 -0
- annet/annlib/__init__.py +7 -0
- annet/annlib/command.py +49 -0
- annet/annlib/diff.py +158 -0
- annet/annlib/errors.py +8 -0
- annet/annlib/filter_acl.py +196 -0
- annet/annlib/jsontools.py +116 -0
- annet/annlib/lib.py +495 -0
- annet/annlib/netdev/__init__.py +0 -0
- annet/annlib/netdev/db.py +62 -0
- annet/annlib/netdev/devdb/__init__.py +28 -0
- annet/annlib/netdev/devdb/data/devdb.json +137 -0
- annet/annlib/netdev/views/__init__.py +0 -0
- annet/annlib/netdev/views/dump.py +121 -0
- annet/annlib/netdev/views/hardware.py +112 -0
- annet/annlib/output.py +246 -0
- annet/annlib/patching.py +533 -0
- annet/annlib/rbparser/__init__.py +0 -0
- annet/annlib/rbparser/acl.py +120 -0
- annet/annlib/rbparser/deploying.py +55 -0
- annet/annlib/rbparser/ordering.py +52 -0
- annet/annlib/rbparser/platform.py +51 -0
- annet/annlib/rbparser/syntax.py +115 -0
- annet/annlib/rulebook/__init__.py +0 -0
- annet/annlib/rulebook/common.py +350 -0
- annet/annlib/tabparser.py +648 -0
- annet/annlib/types.py +35 -0
- annet/api/__init__.py +826 -0
- annet/argparse.py +415 -0
- annet/cli.py +237 -0
- annet/cli_args.py +503 -0
- annet/configs/context.yml +18 -0
- annet/configs/logging.yaml +39 -0
- annet/connectors.py +77 -0
- annet/deploy.py +536 -0
- annet/diff.py +84 -0
- annet/executor.py +551 -0
- annet/filtering.py +40 -0
- annet/gen.py +865 -0
- annet/generators/__init__.py +435 -0
- annet/generators/base.py +136 -0
- annet/generators/common/__init__.py +0 -0
- annet/generators/common/initial.py +33 -0
- annet/generators/entire.py +97 -0
- annet/generators/exceptions.py +10 -0
- annet/generators/jsonfragment.py +125 -0
- annet/generators/partial.py +119 -0
- annet/generators/perf.py +79 -0
- annet/generators/ref.py +15 -0
- annet/generators/result.py +127 -0
- annet/hardware.py +45 -0
- annet/implicit.py +139 -0
- annet/lib.py +128 -0
- annet/output.py +167 -0
- annet/parallel.py +448 -0
- annet/patching.py +25 -0
- annet/reference.py +148 -0
- annet/rulebook/__init__.py +114 -0
- annet/rulebook/arista/__init__.py +0 -0
- annet/rulebook/arista/iface.py +16 -0
- annet/rulebook/aruba/__init__.py +16 -0
- annet/rulebook/aruba/ap_env.py +146 -0
- annet/rulebook/aruba/misc.py +8 -0
- annet/rulebook/cisco/__init__.py +0 -0
- annet/rulebook/cisco/iface.py +68 -0
- annet/rulebook/cisco/misc.py +57 -0
- annet/rulebook/cisco/vlandb.py +90 -0
- annet/rulebook/common.py +19 -0
- annet/rulebook/deploying.py +87 -0
- annet/rulebook/huawei/__init__.py +0 -0
- annet/rulebook/huawei/aaa.py +75 -0
- annet/rulebook/huawei/bgp.py +97 -0
- annet/rulebook/huawei/iface.py +33 -0
- annet/rulebook/huawei/misc.py +337 -0
- annet/rulebook/huawei/vlandb.py +115 -0
- annet/rulebook/juniper/__init__.py +107 -0
- annet/rulebook/nexus/__init__.py +0 -0
- annet/rulebook/nexus/iface.py +92 -0
- annet/rulebook/patching.py +143 -0
- annet/rulebook/ribbon/__init__.py +12 -0
- annet/rulebook/texts/arista.deploy +20 -0
- annet/rulebook/texts/arista.order +125 -0
- annet/rulebook/texts/arista.rul +59 -0
- annet/rulebook/texts/aruba.deploy +20 -0
- annet/rulebook/texts/aruba.order +83 -0
- annet/rulebook/texts/aruba.rul +87 -0
- annet/rulebook/texts/cisco.deploy +27 -0
- annet/rulebook/texts/cisco.order +82 -0
- annet/rulebook/texts/cisco.rul +105 -0
- annet/rulebook/texts/huawei.deploy +188 -0
- annet/rulebook/texts/huawei.order +388 -0
- annet/rulebook/texts/huawei.rul +471 -0
- annet/rulebook/texts/juniper.rul +120 -0
- annet/rulebook/texts/nexus.deploy +24 -0
- annet/rulebook/texts/nexus.order +85 -0
- annet/rulebook/texts/nexus.rul +83 -0
- annet/rulebook/texts/nokia.rul +31 -0
- annet/rulebook/texts/pc.order +5 -0
- annet/rulebook/texts/pc.rul +9 -0
- annet/rulebook/texts/ribbon.deploy +22 -0
- annet/rulebook/texts/ribbon.rul +77 -0
- annet/rulebook/texts/routeros.order +38 -0
- annet/rulebook/texts/routeros.rul +45 -0
- annet/storage.py +125 -0
- annet/tabparser.py +36 -0
- annet/text_term_format.py +95 -0
- annet/tracing.py +170 -0
- annet/types.py +227 -0
- annet-0.0.dist-info/AUTHORS +21 -0
- annet-0.0.dist-info/LICENSE +21 -0
- annet-0.0.dist-info/METADATA +26 -0
- annet-0.0.dist-info/RECORD +137 -0
- annet-0.0.dist-info/WHEEL +5 -0
- annet-0.0.dist-info/entry_points.txt +5 -0
- annet-0.0.dist-info/top_level.txt +2 -0
- annet_generators/__init__.py +0 -0
- annet_generators/example/__init__.py +12 -0
- annet_generators/example/lldp.py +53 -0
annet/gen.py
ADDED
|
@@ -0,0 +1,865 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import dataclasses
|
|
4
|
+
import itertools
|
|
5
|
+
import json
|
|
6
|
+
import os
|
|
7
|
+
import sys
|
|
8
|
+
import textwrap
|
|
9
|
+
import time
|
|
10
|
+
from collections import OrderedDict as odict
|
|
11
|
+
from operator import itemgetter
|
|
12
|
+
from typing import (
|
|
13
|
+
Any,
|
|
14
|
+
Dict,
|
|
15
|
+
FrozenSet,
|
|
16
|
+
Generator,
|
|
17
|
+
Iterable,
|
|
18
|
+
Iterator,
|
|
19
|
+
List,
|
|
20
|
+
Optional,
|
|
21
|
+
Tuple,
|
|
22
|
+
Union,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
import tabulate
|
|
26
|
+
from contextlog import get_logger
|
|
27
|
+
|
|
28
|
+
from annet import generators, implicit, patching, tabparser, tracing
|
|
29
|
+
from annet.annlib import jsontools
|
|
30
|
+
from annet.annlib.rbparser import platform
|
|
31
|
+
from annet.annlib.rbparser.acl import compile_acl_text
|
|
32
|
+
from annet.cli_args import DeployOptions, GenOptions, ShowGenOptions
|
|
33
|
+
from annet.deploy import fetcher_connector, scrub_config
|
|
34
|
+
from annet.filtering import Filterer
|
|
35
|
+
from annet.generators import (
|
|
36
|
+
BaseGenerator,
|
|
37
|
+
Entire,
|
|
38
|
+
GeneratorError,
|
|
39
|
+
JSONFragment,
|
|
40
|
+
NotSupportedDevice,
|
|
41
|
+
PartialGenerator,
|
|
42
|
+
RefGenerator,
|
|
43
|
+
)
|
|
44
|
+
from annet.lib import merge_dicts, percentile
|
|
45
|
+
from annet.output import output_driver_connector
|
|
46
|
+
from annet.parallel import Parallel
|
|
47
|
+
from annet.storage import Device, Storage, storage_connector
|
|
48
|
+
from annet.tracing import tracing_connector
|
|
49
|
+
from annet.types import OldNewResult
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
# Вывод всех генераторов вместе.
|
|
53
|
+
# Значение такое же, как для аналогичной константы в ЧК.
|
|
54
|
+
ALL_GENS = "_all_gens"
|
|
55
|
+
|
|
56
|
+
live_configs = None
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
@dataclasses.dataclass
|
|
60
|
+
class DeviceGenerators:
|
|
61
|
+
"""Collections of various types of generators found for devices."""
|
|
62
|
+
|
|
63
|
+
# map device fqdn to found partial generators
|
|
64
|
+
partial: Dict[Any, List[PartialGenerator]] = dataclasses.field(default_factory=dict)
|
|
65
|
+
|
|
66
|
+
# ref generators
|
|
67
|
+
ref: Dict[Any, List[RefGenerator]] = dataclasses.field(default_factory=dict)
|
|
68
|
+
|
|
69
|
+
# map device fqdn to found entire generators
|
|
70
|
+
entire: Dict[Any, List[Entire]] = dataclasses.field(default_factory=dict)
|
|
71
|
+
|
|
72
|
+
# map device fqdn to found json fragment generators
|
|
73
|
+
json_fragment: Dict[Any, List[JSONFragment]] = dataclasses.field(default_factory=dict)
|
|
74
|
+
|
|
75
|
+
def iter_gens(self) -> Iterator[BaseGenerator]:
|
|
76
|
+
"""Iterate over generators."""
|
|
77
|
+
for device_to_gens_of_the_same_type in (self.partial, self.entire, self.json_fragment):
|
|
78
|
+
for gen_list in device_to_gens_of_the_same_type.values():
|
|
79
|
+
for gen in gen_list:
|
|
80
|
+
yield gen
|
|
81
|
+
|
|
82
|
+
def file_gens(self, device: Any) -> Iterator[Union[Entire, JSONFragment]]:
|
|
83
|
+
"""Iterate over generators that generate files or file parts."""
|
|
84
|
+
yield from itertools.chain(
|
|
85
|
+
self.entire.get(device, []),
|
|
86
|
+
self.json_fragment.get(device, []),
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
def update(self, other: "DeviceGenerators") -> None:
|
|
90
|
+
self.partial.update(other.partial)
|
|
91
|
+
self.ref.update(other.ref)
|
|
92
|
+
self.entire.update(other.entire)
|
|
93
|
+
self.json_fragment.update(other.json_fragment)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
@dataclasses.dataclass
|
|
97
|
+
class OldNewDeviceContext:
|
|
98
|
+
config: str
|
|
99
|
+
args: GenOptions
|
|
100
|
+
downloaded_files: Dict[Device, DeviceDownloadedFiles]
|
|
101
|
+
failed_files: Dict[Device, Exception]
|
|
102
|
+
running: Dict[Device, Dict[str, str]]
|
|
103
|
+
failed_running: Dict[Device, Exception]
|
|
104
|
+
no_new: bool
|
|
105
|
+
stdin: Optional[Dict[str, Optional[str]]]
|
|
106
|
+
add_annotations: bool
|
|
107
|
+
add_implicit: bool
|
|
108
|
+
do_files_download: bool
|
|
109
|
+
gens: DeviceGenerators
|
|
110
|
+
fetched_packages: Dict[Device, FrozenSet[str]]
|
|
111
|
+
failed_packages: Dict[Device, Exception]
|
|
112
|
+
device_count: int
|
|
113
|
+
do_print_perf: bool
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
@tracing.function
|
|
117
|
+
def _old_new_per_device(ctx: OldNewDeviceContext, device: Device, filterer: Filterer) -> OldNewResult:
|
|
118
|
+
tracing_connector.get().set_device_attributes(tracing_connector.get().get_current_span(), device)
|
|
119
|
+
|
|
120
|
+
start = time.monotonic()
|
|
121
|
+
acl_rules = None
|
|
122
|
+
acl_safe_rules = None
|
|
123
|
+
old = odict()
|
|
124
|
+
safe_old = odict()
|
|
125
|
+
old_files = DeviceDownloadedFiles()
|
|
126
|
+
new = odict()
|
|
127
|
+
safe_new = odict()
|
|
128
|
+
combined_perf = {}
|
|
129
|
+
partial_results = []
|
|
130
|
+
entire_results = []
|
|
131
|
+
implicit_rules: Optional[Dict[str, Any]] = None
|
|
132
|
+
filter_acl_rules: Optional[Dict[str, Any]] = None
|
|
133
|
+
new_json_fragment_files: Dict[str, Dict[str, Any]] = {}
|
|
134
|
+
json_fragment_results: Dict[str, generators.GeneratorJSONFragmentResult] = {}
|
|
135
|
+
|
|
136
|
+
if not device.is_pc():
|
|
137
|
+
try:
|
|
138
|
+
text = _old_new_get_config_cli(ctx, device)
|
|
139
|
+
except Exception as exc:
|
|
140
|
+
return OldNewResult(device=device, err=exc)
|
|
141
|
+
|
|
142
|
+
if not text and ctx.args.fail_on_empty_config:
|
|
143
|
+
return OldNewResult(
|
|
144
|
+
device=device,
|
|
145
|
+
err=Exception("no existing config retrieved (method: %s)" % ctx.config),
|
|
146
|
+
)
|
|
147
|
+
|
|
148
|
+
old = odict()
|
|
149
|
+
if ctx.config != "empty":
|
|
150
|
+
old = tabparser.parse_to_tree(
|
|
151
|
+
text=text,
|
|
152
|
+
splitter=tabparser.make_formatter(device.hw).split,
|
|
153
|
+
)
|
|
154
|
+
if not old:
|
|
155
|
+
res = generators.run_partial_initial(device)
|
|
156
|
+
old = res.config_tree()
|
|
157
|
+
perf = res.perf_mesures()
|
|
158
|
+
if ctx.args.profile and ctx.do_print_perf:
|
|
159
|
+
_print_perf("INITIAL", perf)
|
|
160
|
+
run_args = generators.GeneratorPartialRunArgs(
|
|
161
|
+
device=device,
|
|
162
|
+
use_acl=not ctx.args.no_acl,
|
|
163
|
+
use_acl_safe=ctx.args.acl_safe,
|
|
164
|
+
annotate=ctx.add_annotations,
|
|
165
|
+
generators_context=ctx.args.generators_context,
|
|
166
|
+
no_new=ctx.no_new,
|
|
167
|
+
)
|
|
168
|
+
res = generators.run_partial_generators(
|
|
169
|
+
ctx.gens.partial[device],
|
|
170
|
+
ctx.gens.ref[device],
|
|
171
|
+
run_args,
|
|
172
|
+
)
|
|
173
|
+
partial_results = res.partial_results
|
|
174
|
+
perf = res.perf_mesures()
|
|
175
|
+
if ctx.no_new:
|
|
176
|
+
new = odict()
|
|
177
|
+
safe_new = odict()
|
|
178
|
+
elif partial_results:
|
|
179
|
+
# skip one gen with not supported device
|
|
180
|
+
new = res.config_tree()
|
|
181
|
+
safe_new = res.config_tree(safe=True)
|
|
182
|
+
|
|
183
|
+
if ctx.args.profile:
|
|
184
|
+
if ctx.do_print_perf:
|
|
185
|
+
_print_perf("PARTIAL", perf)
|
|
186
|
+
combined_perf.update(perf)
|
|
187
|
+
|
|
188
|
+
implicit_rules = implicit.compile_rules(device)
|
|
189
|
+
if ctx.add_implicit:
|
|
190
|
+
old = merge_dicts(old, implicit.config(old, implicit_rules))
|
|
191
|
+
new = merge_dicts(new, implicit.config(new, implicit_rules))
|
|
192
|
+
safe_new = merge_dicts(safe_new, implicit.config(safe_new, implicit_rules))
|
|
193
|
+
|
|
194
|
+
if not ctx.args.no_acl:
|
|
195
|
+
acl_rules = generators.compile_acl_text(res.acl_text(), device.hw.vendor)
|
|
196
|
+
old = (old and patching.apply_acl(old, acl_rules))
|
|
197
|
+
|
|
198
|
+
new = patching.apply_acl(
|
|
199
|
+
new,
|
|
200
|
+
acl_rules,
|
|
201
|
+
exclusive=not ctx.args.no_acl_exclusive,
|
|
202
|
+
with_annotations=ctx.add_annotations,
|
|
203
|
+
)
|
|
204
|
+
if ctx.args.acl_safe:
|
|
205
|
+
acl_safe_rules = generators.compile_acl_text(res.acl_safe_text(), device.hw.vendor)
|
|
206
|
+
safe_old = (old and patching.apply_acl(old, acl_safe_rules))
|
|
207
|
+
safe_new = patching.apply_acl(
|
|
208
|
+
safe_new,
|
|
209
|
+
acl_safe_rules,
|
|
210
|
+
exclusive=not ctx.args.no_acl_exclusive,
|
|
211
|
+
with_annotations=ctx.add_annotations,
|
|
212
|
+
)
|
|
213
|
+
|
|
214
|
+
filter_acl_rules = build_filter_acl(filterer, device, ctx.stdin, ctx.args, ctx.config)
|
|
215
|
+
if filter_acl_rules is not None:
|
|
216
|
+
old = (old and patching.apply_acl(old, filter_acl_rules, fatal_acl=False))
|
|
217
|
+
new = patching.apply_acl(
|
|
218
|
+
new,
|
|
219
|
+
filter_acl_rules,
|
|
220
|
+
fatal_acl=False,
|
|
221
|
+
with_annotations=ctx.add_annotations,
|
|
222
|
+
)
|
|
223
|
+
else: # vendor == pc
|
|
224
|
+
try:
|
|
225
|
+
old_files = _old_new_get_config_files(ctx, device)
|
|
226
|
+
except Exception as exc:
|
|
227
|
+
return OldNewResult(device=device, err=exc)
|
|
228
|
+
|
|
229
|
+
new_files = {}
|
|
230
|
+
safe_new_files = {}
|
|
231
|
+
safe_new_json_fragment_files = {}
|
|
232
|
+
if not ctx.no_new:
|
|
233
|
+
if device in ctx.fetched_packages:
|
|
234
|
+
if ctx.args.required_packages_check:
|
|
235
|
+
errors = generators.check_entire_generators_required_packages(ctx.gens.entire[device.fqdn],
|
|
236
|
+
ctx.fetched_packages[device])
|
|
237
|
+
if errors:
|
|
238
|
+
error_msg = "; ".join(errors)
|
|
239
|
+
get_logger(host=device.hostname).error(error_msg)
|
|
240
|
+
return OldNewResult(device=device, err=Exception(error_msg))
|
|
241
|
+
res = generators.run_file_generators(
|
|
242
|
+
ctx.gens.file_gens(device),
|
|
243
|
+
device,
|
|
244
|
+
)
|
|
245
|
+
|
|
246
|
+
entire_results = res.entire_results
|
|
247
|
+
json_fragment_results = res.json_fragment_results
|
|
248
|
+
old_json_fragment_files = old_files.json_fragment_files
|
|
249
|
+
|
|
250
|
+
new_files = res.new_files()
|
|
251
|
+
new_json_fragment_files = res.new_json_fragment_files(old_json_fragment_files)
|
|
252
|
+
|
|
253
|
+
filters: List[str] = []
|
|
254
|
+
filters_text = build_filter_text(filterer, device, ctx.stdin, ctx.args, ctx.config)
|
|
255
|
+
if filters_text:
|
|
256
|
+
filters = filters_text.split("\n")
|
|
257
|
+
|
|
258
|
+
for file_name in new_json_fragment_files:
|
|
259
|
+
if new_json_fragment_files.get(file_name) is not None:
|
|
260
|
+
new_json_fragment_files = _update_json_config(
|
|
261
|
+
new_json_fragment_files,
|
|
262
|
+
file_name,
|
|
263
|
+
jsontools.apply_acl_filters(new_json_fragment_files[file_name][0], filters)
|
|
264
|
+
)
|
|
265
|
+
for file_name in old_json_fragment_files:
|
|
266
|
+
if old_json_fragment_files.get(file_name) is not None:
|
|
267
|
+
old_json_fragment_files[file_name] = jsontools.apply_acl_filters(old_json_fragment_files[file_name], filters)
|
|
268
|
+
|
|
269
|
+
if ctx.args.acl_safe:
|
|
270
|
+
safe_new_files = res.new_files(safe=True)
|
|
271
|
+
safe_new_json_fragment_files = res.new_json_fragment_files(old_json_fragment_files, safe=True)
|
|
272
|
+
if filters:
|
|
273
|
+
for file_name in safe_new_json_fragment_files:
|
|
274
|
+
if safe_new_json_fragment_files.get(file_name):
|
|
275
|
+
safe_new_json_fragment_files = _update_json_config(
|
|
276
|
+
safe_new_json_fragment_files,
|
|
277
|
+
file_name,
|
|
278
|
+
jsontools.apply_acl_filters(safe_new_json_fragment_files[file_name][0], filters)
|
|
279
|
+
)
|
|
280
|
+
|
|
281
|
+
if ctx.args.profile:
|
|
282
|
+
perf = res.perf_mesures()
|
|
283
|
+
combined_perf[ALL_GENS] = {"total": time.monotonic() - start}
|
|
284
|
+
combined_perf.update(perf)
|
|
285
|
+
if ctx.do_print_perf:
|
|
286
|
+
_print_perf("ENTIRE", perf)
|
|
287
|
+
|
|
288
|
+
return OldNewResult(
|
|
289
|
+
device=device,
|
|
290
|
+
old=old,
|
|
291
|
+
new=new,
|
|
292
|
+
acl_rules=acl_rules,
|
|
293
|
+
old_files=old_files.entire_files,
|
|
294
|
+
new_files=new_files,
|
|
295
|
+
partial_result=partial_results,
|
|
296
|
+
entire_result=entire_results,
|
|
297
|
+
old_json_fragment_files=old_json_fragment_files,
|
|
298
|
+
new_json_fragment_files=new_json_fragment_files,
|
|
299
|
+
json_fragment_result=json_fragment_results,
|
|
300
|
+
implicit_rules=implicit_rules,
|
|
301
|
+
perf=combined_perf,
|
|
302
|
+
acl_safe_rules=acl_safe_rules,
|
|
303
|
+
safe_old=safe_old,
|
|
304
|
+
safe_new=safe_new,
|
|
305
|
+
safe_new_files=safe_new_files,
|
|
306
|
+
safe_new_json_fragment_files=safe_new_json_fragment_files,
|
|
307
|
+
filter_acl_rules=filter_acl_rules,
|
|
308
|
+
)
|
|
309
|
+
|
|
310
|
+
|
|
311
|
+
def _update_json_config(json_files, file_name, new_config):
|
|
312
|
+
file = list(json_files[file_name])
|
|
313
|
+
file[0] = new_config
|
|
314
|
+
json_files[file_name] = tuple(file)
|
|
315
|
+
return json_files
|
|
316
|
+
|
|
317
|
+
|
|
318
|
+
@dataclasses.dataclass
|
|
319
|
+
class DeviceDownloadedFiles:
|
|
320
|
+
# map file path to file content for entire generators
|
|
321
|
+
entire_files: Dict[str, str] = dataclasses.field(default_factory=dict)
|
|
322
|
+
|
|
323
|
+
# map file path to file content for json fragment generators
|
|
324
|
+
json_fragment_files: Dict[str, Dict[str, Any]] = dataclasses.field(default_factory=dict)
|
|
325
|
+
|
|
326
|
+
def is_empty(self) -> bool:
|
|
327
|
+
return not self.entire_files and not self.json_fragment_files
|
|
328
|
+
|
|
329
|
+
|
|
330
|
+
def split_downloaded_files(
|
|
331
|
+
device_flat_files: Dict[str, Optional[str]],
|
|
332
|
+
gens: DeviceGenerators,
|
|
333
|
+
device: Device,
|
|
334
|
+
) -> DeviceDownloadedFiles:
|
|
335
|
+
"""Split downloaded files per generator type: entire/json_fragment."""
|
|
336
|
+
ret = DeviceDownloadedFiles()
|
|
337
|
+
|
|
338
|
+
for gen in gens.file_gens(device):
|
|
339
|
+
filepath = gen.path(device)
|
|
340
|
+
if filepath in device_flat_files:
|
|
341
|
+
if isinstance(gen, Entire):
|
|
342
|
+
ret.entire_files[filepath] = device_flat_files[filepath]
|
|
343
|
+
elif isinstance(gen, JSONFragment):
|
|
344
|
+
if device_flat_files[filepath] is not None: # file exists
|
|
345
|
+
ret.json_fragment_files[filepath] = json.loads(device_flat_files[filepath])
|
|
346
|
+
else:
|
|
347
|
+
ret.json_fragment_files[filepath] = None
|
|
348
|
+
|
|
349
|
+
return ret
|
|
350
|
+
|
|
351
|
+
|
|
352
|
+
def split_downloaded_files_multi_device(
|
|
353
|
+
flat_downloaded_files: Dict[Device, Dict[str, Optional[str]]],
|
|
354
|
+
gens: DeviceGenerators,
|
|
355
|
+
devices: List[Device],
|
|
356
|
+
) -> Dict[str, DeviceDownloadedFiles]:
|
|
357
|
+
"""Split downloaded files per generator type: entire/json_fragment."""
|
|
358
|
+
return {
|
|
359
|
+
device: split_downloaded_files(flat_downloaded_files[device], gens, device)
|
|
360
|
+
for device in devices
|
|
361
|
+
if device in flat_downloaded_files
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
|
|
365
|
+
# ====
|
|
366
|
+
@tracing.function
|
|
367
|
+
def old_new(
|
|
368
|
+
args: GenOptions,
|
|
369
|
+
config: str,
|
|
370
|
+
loader: "Loader",
|
|
371
|
+
filterer: Filterer,
|
|
372
|
+
add_implicit=True,
|
|
373
|
+
add_annotations=False,
|
|
374
|
+
stdin=None,
|
|
375
|
+
device_ids: List[int] = None,
|
|
376
|
+
no_new=False,
|
|
377
|
+
do_files_download=False,
|
|
378
|
+
do_print_perf=True,
|
|
379
|
+
):
|
|
380
|
+
devices = loader.devices
|
|
381
|
+
gens = loader.resolve_gens(devices)
|
|
382
|
+
running, failed_running = _old_resolve_running(config, devices)
|
|
383
|
+
downloaded_files, failed_files = _old_resolve_files(config, devices, gens, do_files_download)
|
|
384
|
+
|
|
385
|
+
if stdin is None:
|
|
386
|
+
stdin = args.stdin(filter_acl=args.filter_acl, config=config)
|
|
387
|
+
|
|
388
|
+
fetched_packages, failed_packages = {}, {}
|
|
389
|
+
if do_files_download and config == "running":
|
|
390
|
+
files_to_download = _get_files_to_download(devices, gens)
|
|
391
|
+
devices_with_files = [device for device in devices if device in files_to_download]
|
|
392
|
+
fetcher = fetcher_connector.get()
|
|
393
|
+
fetched_packages, failed_packages = fetcher.fetch_packages(devices_with_files)
|
|
394
|
+
|
|
395
|
+
ctx = OldNewDeviceContext(
|
|
396
|
+
config=config,
|
|
397
|
+
args=args,
|
|
398
|
+
downloaded_files=split_downloaded_files_multi_device(downloaded_files, gens, devices),
|
|
399
|
+
failed_files=failed_files,
|
|
400
|
+
running=running,
|
|
401
|
+
failed_running=failed_running,
|
|
402
|
+
no_new=no_new,
|
|
403
|
+
stdin=stdin,
|
|
404
|
+
add_annotations=add_annotations,
|
|
405
|
+
add_implicit=add_implicit,
|
|
406
|
+
do_files_download=do_files_download,
|
|
407
|
+
gens=gens,
|
|
408
|
+
fetched_packages=fetched_packages,
|
|
409
|
+
failed_packages=failed_packages,
|
|
410
|
+
device_count=len(devices),
|
|
411
|
+
do_print_perf=do_print_perf,
|
|
412
|
+
)
|
|
413
|
+
for device in devices:
|
|
414
|
+
logger = get_logger(host=device.hostname)
|
|
415
|
+
try:
|
|
416
|
+
result = _old_new_per_device(ctx, device, filterer)
|
|
417
|
+
except patching.AclNotExclusiveError as err:
|
|
418
|
+
logger.error("ACL error: more than one acl rules matches to this command: %s", err)
|
|
419
|
+
raise GeneratorError from err
|
|
420
|
+
if result is not None:
|
|
421
|
+
yield result
|
|
422
|
+
|
|
423
|
+
|
|
424
|
+
@tracing.function
|
|
425
|
+
def old_raw(
|
|
426
|
+
args: GenOptions, loader: Loader, config, stdin=None,
|
|
427
|
+
do_files_download=False, use_mesh=True,
|
|
428
|
+
) -> Iterable[Tuple[Device, Union[str, Dict[str, str]]]]:
|
|
429
|
+
device_gens = loader.resolve_gens(loader.devices)
|
|
430
|
+
running, failed_running = _old_resolve_running(config, loader.devices)
|
|
431
|
+
downloaded_files, failed_files = _old_resolve_files(config, loader.devices, device_gens, do_files_download)
|
|
432
|
+
if stdin is None:
|
|
433
|
+
stdin = args.stdin(filter_acl=args.filter_acl, config=config)
|
|
434
|
+
ctx = OldNewDeviceContext(
|
|
435
|
+
config=config,
|
|
436
|
+
args=args,
|
|
437
|
+
downloaded_files=split_downloaded_files_multi_device(downloaded_files, device_gens, loader.devices),
|
|
438
|
+
failed_files=failed_files,
|
|
439
|
+
running=running,
|
|
440
|
+
failed_running=failed_running,
|
|
441
|
+
stdin=stdin,
|
|
442
|
+
do_files_download=do_files_download,
|
|
443
|
+
device_count=len(loader.devices),
|
|
444
|
+
no_new=True,
|
|
445
|
+
add_annotations=False,
|
|
446
|
+
add_implicit=False,
|
|
447
|
+
gens=DeviceGenerators(),
|
|
448
|
+
fetched_packages={},
|
|
449
|
+
failed_packages={},
|
|
450
|
+
do_print_perf=True,
|
|
451
|
+
)
|
|
452
|
+
for device in loader.devices:
|
|
453
|
+
if not device.is_pc():
|
|
454
|
+
config = _old_new_get_config_cli(ctx, device)
|
|
455
|
+
config = scrub_config(config, device.breed)
|
|
456
|
+
yield device, config
|
|
457
|
+
else:
|
|
458
|
+
files = _old_new_get_config_files(ctx, device)
|
|
459
|
+
if files.entire_files:
|
|
460
|
+
yield device, files.entire_files
|
|
461
|
+
if files.json_fragment_files:
|
|
462
|
+
yield device, {
|
|
463
|
+
path: jsontools.format_json(data)
|
|
464
|
+
for path, data in files.json_fragment_files.items()
|
|
465
|
+
}
|
|
466
|
+
|
|
467
|
+
|
|
468
|
+
@tracing.function
|
|
469
|
+
def worker(device_id, args: ShowGenOptions, stdin, loader: "Loader", filterer: Filterer) -> Generator[Tuple[str, str, bool], None, None]:
|
|
470
|
+
span = tracing_connector.get().get_current_span()
|
|
471
|
+
if span:
|
|
472
|
+
span.set_attribute("device.id", device_id)
|
|
473
|
+
|
|
474
|
+
for res in old_new(
|
|
475
|
+
args,
|
|
476
|
+
config="/dev/null",
|
|
477
|
+
loader=loader,
|
|
478
|
+
filterer=filterer,
|
|
479
|
+
add_implicit=False,
|
|
480
|
+
add_annotations=args.annotate,
|
|
481
|
+
stdin=stdin,
|
|
482
|
+
device_ids=[device_id],
|
|
483
|
+
):
|
|
484
|
+
new = res.get_new(args.acl_safe)
|
|
485
|
+
new_files = res.get_new_files(args.acl_safe)
|
|
486
|
+
new_file_fragments = res.get_new_file_fragments(args.acl_safe)
|
|
487
|
+
output_driver = output_driver_connector.get()
|
|
488
|
+
device = res.device
|
|
489
|
+
if new is None:
|
|
490
|
+
continue
|
|
491
|
+
for (entire_path, (entire_data, _)) in sorted(new_files.items(), key=itemgetter(0)):
|
|
492
|
+
yield (output_driver.entire_config_dest_path(device, entire_path), entire_data, False)
|
|
493
|
+
|
|
494
|
+
for (path, (data, _)) in sorted(new_file_fragments.items(), key=itemgetter(0)):
|
|
495
|
+
dumped_data = json.dumps(data, indent=4, sort_keys=True, ensure_ascii=False)
|
|
496
|
+
yield (output_driver.entire_config_dest_path(device, path), dumped_data, False)
|
|
497
|
+
|
|
498
|
+
has_file_result = new_files or new_file_fragments
|
|
499
|
+
has_partial_result = new or not has_file_result
|
|
500
|
+
if device.hw.vendor in platform.VENDOR_REVERSES and has_partial_result:
|
|
501
|
+
orderer = patching.Orderer.from_hw(device.hw)
|
|
502
|
+
yield (output_driver.cfg_file_names(device)[0],
|
|
503
|
+
format_config_blocks(
|
|
504
|
+
orderer.order_config(new),
|
|
505
|
+
device.hw,
|
|
506
|
+
args.indent
|
|
507
|
+
),
|
|
508
|
+
False)
|
|
509
|
+
|
|
510
|
+
|
|
511
|
+
def old_new_worker(device_id, args: DeployOptions, config, stdin, loader: "Loader", filterer: Filterer):
|
|
512
|
+
yield from old_new(
|
|
513
|
+
args,
|
|
514
|
+
config=config,
|
|
515
|
+
loader=loader,
|
|
516
|
+
filterer=filterer,
|
|
517
|
+
stdin=stdin,
|
|
518
|
+
device_ids=[device_id],
|
|
519
|
+
no_new=args.clear,
|
|
520
|
+
do_files_download=True,
|
|
521
|
+
)
|
|
522
|
+
|
|
523
|
+
|
|
524
|
+
class OldNewParallel(Parallel):
|
|
525
|
+
def __init__(self, args: DeployOptions, loader: "Loader", filterer: Filterer):
|
|
526
|
+
stdin = args.stdin(filter_acl=args.filter_acl, config=args.config)
|
|
527
|
+
super().__init__(
|
|
528
|
+
old_new_worker,
|
|
529
|
+
args,
|
|
530
|
+
config=args.config,
|
|
531
|
+
stdin=stdin,
|
|
532
|
+
loader=loader,
|
|
533
|
+
filterer=filterer,
|
|
534
|
+
)
|
|
535
|
+
self.tune_args(args)
|
|
536
|
+
|
|
537
|
+
def generated_configs(self, devices: List[Device]) -> Generator[OldNewResult, None, None]:
|
|
538
|
+
devices_by_id = {device.id: device for device in devices}
|
|
539
|
+
device_ids = list(devices_by_id)
|
|
540
|
+
|
|
541
|
+
for task_result in self.irun(device_ids):
|
|
542
|
+
if task_result.exc is not None:
|
|
543
|
+
device = devices_by_id.pop(task_result.device_id)
|
|
544
|
+
yield OldNewResult(device=device, err=task_result.exc)
|
|
545
|
+
elif task_result.result is not None:
|
|
546
|
+
yield from task_result.result
|
|
547
|
+
devices_by_id.pop(task_result.device_id)
|
|
548
|
+
|
|
549
|
+
for device in devices_by_id.values():
|
|
550
|
+
yield OldNewResult(device=device, err=Exception(f"No config returned for {device.hostname}"))
|
|
551
|
+
|
|
552
|
+
|
|
553
|
+
@dataclasses.dataclass
|
|
554
|
+
class DeviceFilesToDownload:
|
|
555
|
+
entire: List[str] = dataclasses.field(default_factory=list)
|
|
556
|
+
json_fragment: List[str] = dataclasses.field(default_factory=list)
|
|
557
|
+
|
|
558
|
+
|
|
559
|
+
@tracing.function
|
|
560
|
+
def _get_files_to_download(devices: List[Device], gens: DeviceGenerators) -> Dict[Device, Any]:
|
|
561
|
+
files_to_download = {}
|
|
562
|
+
for device in devices:
|
|
563
|
+
paths = set()
|
|
564
|
+
try:
|
|
565
|
+
for generator in gens.file_gens(device):
|
|
566
|
+
try:
|
|
567
|
+
path = generator.path(device)
|
|
568
|
+
if path:
|
|
569
|
+
paths.add(path)
|
|
570
|
+
except NotSupportedDevice:
|
|
571
|
+
continue
|
|
572
|
+
except Exception as exc:
|
|
573
|
+
files_to_download[device] = exc
|
|
574
|
+
continue
|
|
575
|
+
if paths:
|
|
576
|
+
files_to_download[device] = sorted(paths)
|
|
577
|
+
return files_to_download
|
|
578
|
+
|
|
579
|
+
|
|
580
|
+
def _print_perf(gen_type, perf):
|
|
581
|
+
print(file=sys.stderr)
|
|
582
|
+
print(
|
|
583
|
+
tabulate.tabulate([
|
|
584
|
+
(
|
|
585
|
+
(gen if not method else None),
|
|
586
|
+
(method or "." * 30),
|
|
587
|
+
sum(map(itemgetter("time"), stat)),
|
|
588
|
+
(min(map(itemgetter("time"), stat)) if method else None),
|
|
589
|
+
(percentile(stat, 0.95, itemgetter("time")) if method else None),
|
|
590
|
+
(max(map(itemgetter("time"), stat)) if method else None),
|
|
591
|
+
(len(stat) if method else None),
|
|
592
|
+
(len(list(filter(
|
|
593
|
+
lambda item: item in ["call", "disk_write"],
|
|
594
|
+
map(itemgetter("op"), stat)))) if method else None),
|
|
595
|
+
)
|
|
596
|
+
|
|
597
|
+
for (gen, gen_perf) in sorted(
|
|
598
|
+
perf.items(),
|
|
599
|
+
key=(lambda item: item[1]["total"]),
|
|
600
|
+
reverse=True,
|
|
601
|
+
)
|
|
602
|
+
|
|
603
|
+
for (method, stat) in sorted(
|
|
604
|
+
[(None, [{"time": gen_perf["total"], "op": None}])] + list(gen_perf["rt"].items()),
|
|
605
|
+
key=(lambda item: sum(map(itemgetter("time"), item[1]))),
|
|
606
|
+
reverse=True,
|
|
607
|
+
)
|
|
608
|
+
],
|
|
609
|
+
[gen_type + "-Generator", "RT", "Total", "Min", "95%", "Max", "Calls", "Direct"],
|
|
610
|
+
tablefmt="orgtbl", floatfmt=".4f",
|
|
611
|
+
),
|
|
612
|
+
file=sys.stderr,
|
|
613
|
+
)
|
|
614
|
+
print(file=sys.stderr)
|
|
615
|
+
|
|
616
|
+
|
|
617
|
+
def build_filter_text(filterer, device, stdin, args, config):
|
|
618
|
+
filter_acl_text = None
|
|
619
|
+
if args.filter_acl:
|
|
620
|
+
filter_acl_text = stdin["filter_acl"]
|
|
621
|
+
if args.filter_acl and not stdin["filter_acl"]:
|
|
622
|
+
if os.path.isdir(args.filter_acl):
|
|
623
|
+
filename = os.path.join(config, "%s.acl" % device.hostname)
|
|
624
|
+
else:
|
|
625
|
+
filename = args.filter_acl
|
|
626
|
+
with open(filename) as fh:
|
|
627
|
+
filter_acl_text = fh.read()
|
|
628
|
+
|
|
629
|
+
if args.filter_ifaces:
|
|
630
|
+
filter_acl_text = filter_acl_text + "\n" if filter_acl_text else ""
|
|
631
|
+
filter_acl_text += filterer.for_ifaces(device, args.filter_ifaces)
|
|
632
|
+
|
|
633
|
+
if args.filter_peers:
|
|
634
|
+
filter_acl_text = filter_acl_text + "\n" if filter_acl_text else ""
|
|
635
|
+
filter_acl_text += filterer.for_peers(device, args.filter_peers)
|
|
636
|
+
|
|
637
|
+
if args.filter_policies:
|
|
638
|
+
filter_acl_text = filter_acl_text + "\n" if filter_acl_text else ""
|
|
639
|
+
filter_acl_text += filterer.for_policies(device, args.filter_policies)
|
|
640
|
+
return filter_acl_text
|
|
641
|
+
|
|
642
|
+
|
|
643
|
+
def build_filter_acl(filterer, device, stdin, args, config):
|
|
644
|
+
filter_acl_text = build_filter_text(filterer, device, stdin, args, config)
|
|
645
|
+
if filter_acl_text is not None:
|
|
646
|
+
return compile_acl_text(
|
|
647
|
+
textwrap.dedent(filter_acl_text),
|
|
648
|
+
device.hw.vendor,
|
|
649
|
+
allow_ignore=True,
|
|
650
|
+
)
|
|
651
|
+
|
|
652
|
+
|
|
653
|
+
def _existing_cfg_file_name(config_dir: str, device) -> Optional[str]:
|
|
654
|
+
cfg_files = output_driver_connector.get().cfg_file_names(device)
|
|
655
|
+
last: Optional[str] = None
|
|
656
|
+
for cfg_file in cfg_files:
|
|
657
|
+
filename = os.path.join(config_dir, cfg_file)
|
|
658
|
+
last = filename
|
|
659
|
+
if os.path.exists(filename):
|
|
660
|
+
return filename
|
|
661
|
+
return last
|
|
662
|
+
|
|
663
|
+
|
|
664
|
+
def format_config_blocks(config, hw, indent, _level=0):
|
|
665
|
+
formatter = tabparser.make_formatter(hw, indent=indent)
|
|
666
|
+
return formatter.join(config)
|
|
667
|
+
|
|
668
|
+
|
|
669
|
+
def format_files(files):
|
|
670
|
+
lines = []
|
|
671
|
+
for path in sorted(files):
|
|
672
|
+
lines.extend((
|
|
673
|
+
"# %s" % path,
|
|
674
|
+
files[path],
|
|
675
|
+
))
|
|
676
|
+
return "\n".join(lines)
|
|
677
|
+
|
|
678
|
+
|
|
679
|
+
def find_files_relative(path: str) -> Generator[str, None, None]:
|
|
680
|
+
"""Рекурсивно найти файлы в path и вернуть пути к ним относительно path"""
|
|
681
|
+
root_abs_path = os.path.abspath(path)
|
|
682
|
+
for dirpath, _dirnames, filenames in os.walk(path):
|
|
683
|
+
for filename in filenames:
|
|
684
|
+
full_path = os.path.join(dirpath, filename)
|
|
685
|
+
yield os.path.relpath(full_path, root_abs_path)
|
|
686
|
+
|
|
687
|
+
|
|
688
|
+
def load_pc_config(path: str, set_root=False) -> Dict[str, str]:
|
|
689
|
+
"""Подхватываем локально сохраненные файлы конфигов для вайтбоксов"""
|
|
690
|
+
ret: Dict[str, str] = {}
|
|
691
|
+
for relative_cfg_path in find_files_relative(path):
|
|
692
|
+
with open(os.path.join(path, relative_cfg_path)) as cfg_file:
|
|
693
|
+
text = cfg_file.read()
|
|
694
|
+
if set_root:
|
|
695
|
+
relative_cfg_path = os.path.join("/", relative_cfg_path)
|
|
696
|
+
ret[relative_cfg_path] = text
|
|
697
|
+
return ret
|
|
698
|
+
|
|
699
|
+
|
|
700
|
+
@tracing.function
|
|
701
|
+
def _old_new_get_config_cli(ctx: OldNewDeviceContext, device: Device) -> str:
|
|
702
|
+
if ctx.config == "empty":
|
|
703
|
+
text = ""
|
|
704
|
+
elif ctx.config == "running":
|
|
705
|
+
text = ctx.running.get(device)
|
|
706
|
+
if text is None:
|
|
707
|
+
exc = (ctx.failed_running.get(device.fqdn) or
|
|
708
|
+
ctx.failed_running.get(device.hostname) or
|
|
709
|
+
Exception("I can't get device config and I don't know why"))
|
|
710
|
+
get_logger(host=device.hostname).error("config error %s", exc)
|
|
711
|
+
raise exc
|
|
712
|
+
elif ctx.config == "-":
|
|
713
|
+
text = ctx.stdin["config"]
|
|
714
|
+
if ctx.device_count > 1:
|
|
715
|
+
raise ValueError("stdin config can not be used with multiple devices")
|
|
716
|
+
else:
|
|
717
|
+
if os.path.isdir(ctx.config):
|
|
718
|
+
filename = _existing_cfg_file_name(ctx.config, device)
|
|
719
|
+
else:
|
|
720
|
+
filename = ctx.config
|
|
721
|
+
try:
|
|
722
|
+
with open(filename) as fh:
|
|
723
|
+
text = fh.read()
|
|
724
|
+
except Exception as exc_info:
|
|
725
|
+
if not ctx.args.fail_on_empty_config and isinstance(exc_info, FileNotFoundError):
|
|
726
|
+
return ""
|
|
727
|
+
if ctx.device_count > 1:
|
|
728
|
+
get_logger(host=device.hostname).error(str(exc_info))
|
|
729
|
+
return None
|
|
730
|
+
raise
|
|
731
|
+
return text
|
|
732
|
+
|
|
733
|
+
|
|
734
|
+
@tracing.function
|
|
735
|
+
def _old_new_get_config_files(ctx: OldNewDeviceContext, device: Device) -> DeviceDownloadedFiles:
|
|
736
|
+
old_files = DeviceDownloadedFiles()
|
|
737
|
+
|
|
738
|
+
for attr in ("failed_files", "failed_running", "failed_packages"):
|
|
739
|
+
if device in getattr(ctx, attr):
|
|
740
|
+
exc = getattr(ctx, attr).get(device)
|
|
741
|
+
exc = exc or Exception(f"I can't get device {attr[len('failed_'):]} and I don't know why")
|
|
742
|
+
get_logger(host=device.hostname).error(str(exc))
|
|
743
|
+
raise exc
|
|
744
|
+
|
|
745
|
+
if ctx.do_files_download:
|
|
746
|
+
if ctx.config == "empty":
|
|
747
|
+
return old_files
|
|
748
|
+
if ctx.config == "running":
|
|
749
|
+
old_files_running = ctx.downloaded_files.get(device)
|
|
750
|
+
if not old_files_running:
|
|
751
|
+
return old_files
|
|
752
|
+
old_files = old_files_running
|
|
753
|
+
elif os.path.exists(ctx.config):
|
|
754
|
+
# try to find config in subdirectory: <ctx.config>/<device_name>.cfg/
|
|
755
|
+
config_path = _existing_cfg_file_name(ctx.config, device)
|
|
756
|
+
if config_path is None:
|
|
757
|
+
# if subdir does not exist, assume the whole dir is our config
|
|
758
|
+
config_path = ctx.config
|
|
759
|
+
if not os.path.isdir(config_path):
|
|
760
|
+
get_logger(host=device.hostname).error("I can't find device files in %s", config_path)
|
|
761
|
+
return old_files
|
|
762
|
+
old_files = split_downloaded_files(load_pc_config(config_path, True), ctx.gens, device)
|
|
763
|
+
else:
|
|
764
|
+
raise NotImplementedError("pc and not running or path")
|
|
765
|
+
return old_files
|
|
766
|
+
|
|
767
|
+
|
|
768
|
+
@tracing.function
|
|
769
|
+
def _old_resolve_gens(args: GenOptions, storage: Storage, devices: Iterable[Device]) -> DeviceGenerators:
|
|
770
|
+
per_device_gens = DeviceGenerators()
|
|
771
|
+
devices = devices or [None] # get all generators if no devices provided
|
|
772
|
+
for device in devices:
|
|
773
|
+
gens = generators.build_generators(storage, gens=args, device=device)
|
|
774
|
+
per_device_gens.partial[device] = gens.partial
|
|
775
|
+
per_device_gens.entire[device] = gens.entire
|
|
776
|
+
per_device_gens.json_fragment[device] = gens.json_fragment
|
|
777
|
+
per_device_gens.ref[device] = gens.ref
|
|
778
|
+
return per_device_gens
|
|
779
|
+
|
|
780
|
+
|
|
781
|
+
@tracing.function
|
|
782
|
+
def _old_resolve_running(config: str, devices: List[Device]) -> Tuple[Dict[Device, str], Dict[Device, Exception]]:
|
|
783
|
+
running, failed_running = {}, {}
|
|
784
|
+
if config == "running":
|
|
785
|
+
global live_configs # pylint: disable=global-statement
|
|
786
|
+
if live_configs is None:
|
|
787
|
+
# предварительно прочесть все конфиги прямо по ssh
|
|
788
|
+
fetcher = fetcher_connector.get()
|
|
789
|
+
running, failed_running = fetcher.fetch(devices)
|
|
790
|
+
else:
|
|
791
|
+
running, failed_running = live_configs # pylint: disable=unpacking-non-sequence
|
|
792
|
+
return running, failed_running
|
|
793
|
+
|
|
794
|
+
|
|
795
|
+
@tracing.function
|
|
796
|
+
def _old_resolve_files(config: str,
|
|
797
|
+
devices: List[Device],
|
|
798
|
+
gens: DeviceGenerators,
|
|
799
|
+
do_files_download: bool,
|
|
800
|
+
) -> Tuple[Dict[Device, Dict[str, Optional[str]]], Dict[Device, Exception]]:
|
|
801
|
+
downloaded_files, failed_files = {}, {}
|
|
802
|
+
if do_files_download and config == "running":
|
|
803
|
+
files_to_download = _get_files_to_download(devices, gens)
|
|
804
|
+
devices_with_files = [device for device in devices if device in files_to_download]
|
|
805
|
+
if devices_with_files:
|
|
806
|
+
fetcher = fetcher_connector.get()
|
|
807
|
+
downloaded_files, failed_files = fetcher.fetch(devices_with_files,
|
|
808
|
+
files_to_download=files_to_download)
|
|
809
|
+
return downloaded_files, failed_files
|
|
810
|
+
|
|
811
|
+
|
|
812
|
+
class Loader:
|
|
813
|
+
def __init__(
|
|
814
|
+
self, *storages: Storage,
|
|
815
|
+
args: GenOptions,
|
|
816
|
+
no_empty_warning: bool = False,
|
|
817
|
+
) -> None:
|
|
818
|
+
self._args = args
|
|
819
|
+
self._storages = storages
|
|
820
|
+
self._no_empty_warning = no_empty_warning
|
|
821
|
+
self._devices_map: Dict[int, Device] = {}
|
|
822
|
+
self._gens: DeviceGenerators = DeviceGenerators()
|
|
823
|
+
self._counter = itertools.count()
|
|
824
|
+
|
|
825
|
+
self._preload()
|
|
826
|
+
|
|
827
|
+
def _preload(self) -> None:
|
|
828
|
+
with tracing_connector.get().start_as_current_span("Resolve devices"):
|
|
829
|
+
for storage in self._storages:
|
|
830
|
+
devices = storage.make_devices(
|
|
831
|
+
self._args.query,
|
|
832
|
+
preload_neighbors=True,
|
|
833
|
+
use_mesh=not self._args.no_mesh,
|
|
834
|
+
preload_extra_fields=True,
|
|
835
|
+
)
|
|
836
|
+
for device in devices:
|
|
837
|
+
self._devices_map[next(self._counter)] = device
|
|
838
|
+
self._gens.update(_old_resolve_gens(self._args, storage, devices))
|
|
839
|
+
if not devices and not self._no_empty_warning:
|
|
840
|
+
get_logger().error("No devices found for %s", self._args.query)
|
|
841
|
+
return
|
|
842
|
+
|
|
843
|
+
@property
|
|
844
|
+
def device_fqdns(self):
|
|
845
|
+
return {
|
|
846
|
+
device_id: d.fqdn
|
|
847
|
+
for device_id, d in self._devices_map.items()
|
|
848
|
+
}
|
|
849
|
+
|
|
850
|
+
@property
|
|
851
|
+
def device_ids(self):
|
|
852
|
+
return list(self._devices_map)
|
|
853
|
+
|
|
854
|
+
@property
|
|
855
|
+
def devices(self) -> List[Device]:
|
|
856
|
+
if self._devices_map:
|
|
857
|
+
return list(self._devices_map.values())
|
|
858
|
+
return []
|
|
859
|
+
|
|
860
|
+
def resolve_gens(self, devices: Iterable[Device]) -> DeviceGenerators:
|
|
861
|
+
if self._gens is not None:
|
|
862
|
+
return self._gens
|
|
863
|
+
|
|
864
|
+
with tracing_connector.get().start_as_current_span("Resolve gens"):
|
|
865
|
+
return _old_resolve_gens(self._args, self._storage, devices)
|