annet 0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of annet might be problematic. Click here for more details.

Files changed (113) hide show
  1. annet/__init__.py +61 -0
  2. annet/annet.py +25 -0
  3. annet/annlib/__init__.py +7 -0
  4. annet/annlib/command.py +49 -0
  5. annet/annlib/diff.py +158 -0
  6. annet/annlib/errors.py +8 -0
  7. annet/annlib/filter_acl.py +196 -0
  8. annet/annlib/jsontools.py +89 -0
  9. annet/annlib/lib.py +495 -0
  10. annet/annlib/netdev/__init__.py +0 -0
  11. annet/annlib/netdev/db.py +62 -0
  12. annet/annlib/netdev/devdb/__init__.py +28 -0
  13. annet/annlib/netdev/devdb/data/devdb.json +137 -0
  14. annet/annlib/netdev/views/__init__.py +0 -0
  15. annet/annlib/netdev/views/dump.py +121 -0
  16. annet/annlib/netdev/views/hardware.py +112 -0
  17. annet/annlib/output.py +246 -0
  18. annet/annlib/patching.py +533 -0
  19. annet/annlib/rbparser/__init__.py +0 -0
  20. annet/annlib/rbparser/acl.py +120 -0
  21. annet/annlib/rbparser/deploying.py +55 -0
  22. annet/annlib/rbparser/ordering.py +52 -0
  23. annet/annlib/rbparser/platform.py +51 -0
  24. annet/annlib/rbparser/syntax.py +115 -0
  25. annet/annlib/rulebook/__init__.py +0 -0
  26. annet/annlib/rulebook/common.py +350 -0
  27. annet/annlib/tabparser.py +648 -0
  28. annet/annlib/types.py +35 -0
  29. annet/api/__init__.py +807 -0
  30. annet/argparse.py +415 -0
  31. annet/cli.py +192 -0
  32. annet/cli_args.py +493 -0
  33. annet/configs/context.yml +18 -0
  34. annet/configs/logging.yaml +39 -0
  35. annet/connectors.py +64 -0
  36. annet/deploy.py +441 -0
  37. annet/diff.py +85 -0
  38. annet/executor.py +551 -0
  39. annet/filtering.py +40 -0
  40. annet/gen.py +828 -0
  41. annet/generators/__init__.py +987 -0
  42. annet/generators/common/__init__.py +0 -0
  43. annet/generators/common/initial.py +33 -0
  44. annet/hardware.py +45 -0
  45. annet/implicit.py +139 -0
  46. annet/lib.py +128 -0
  47. annet/output.py +170 -0
  48. annet/parallel.py +448 -0
  49. annet/patching.py +25 -0
  50. annet/reference.py +148 -0
  51. annet/rulebook/__init__.py +114 -0
  52. annet/rulebook/arista/__init__.py +0 -0
  53. annet/rulebook/arista/iface.py +16 -0
  54. annet/rulebook/aruba/__init__.py +16 -0
  55. annet/rulebook/aruba/ap_env.py +146 -0
  56. annet/rulebook/aruba/misc.py +8 -0
  57. annet/rulebook/cisco/__init__.py +0 -0
  58. annet/rulebook/cisco/iface.py +68 -0
  59. annet/rulebook/cisco/misc.py +57 -0
  60. annet/rulebook/cisco/vlandb.py +90 -0
  61. annet/rulebook/common.py +19 -0
  62. annet/rulebook/deploying.py +87 -0
  63. annet/rulebook/huawei/__init__.py +0 -0
  64. annet/rulebook/huawei/aaa.py +75 -0
  65. annet/rulebook/huawei/bgp.py +97 -0
  66. annet/rulebook/huawei/iface.py +33 -0
  67. annet/rulebook/huawei/misc.py +337 -0
  68. annet/rulebook/huawei/vlandb.py +115 -0
  69. annet/rulebook/juniper/__init__.py +107 -0
  70. annet/rulebook/nexus/__init__.py +0 -0
  71. annet/rulebook/nexus/iface.py +92 -0
  72. annet/rulebook/patching.py +143 -0
  73. annet/rulebook/ribbon/__init__.py +12 -0
  74. annet/rulebook/texts/arista.deploy +20 -0
  75. annet/rulebook/texts/arista.order +125 -0
  76. annet/rulebook/texts/arista.rul +59 -0
  77. annet/rulebook/texts/aruba.deploy +20 -0
  78. annet/rulebook/texts/aruba.order +83 -0
  79. annet/rulebook/texts/aruba.rul +87 -0
  80. annet/rulebook/texts/cisco.deploy +27 -0
  81. annet/rulebook/texts/cisco.order +82 -0
  82. annet/rulebook/texts/cisco.rul +105 -0
  83. annet/rulebook/texts/huawei.deploy +188 -0
  84. annet/rulebook/texts/huawei.order +388 -0
  85. annet/rulebook/texts/huawei.rul +471 -0
  86. annet/rulebook/texts/juniper.rul +120 -0
  87. annet/rulebook/texts/nexus.deploy +24 -0
  88. annet/rulebook/texts/nexus.order +85 -0
  89. annet/rulebook/texts/nexus.rul +83 -0
  90. annet/rulebook/texts/nokia.rul +31 -0
  91. annet/rulebook/texts/pc.order +5 -0
  92. annet/rulebook/texts/pc.rul +9 -0
  93. annet/rulebook/texts/ribbon.deploy +22 -0
  94. annet/rulebook/texts/ribbon.rul +77 -0
  95. annet/rulebook/texts/routeros.order +38 -0
  96. annet/rulebook/texts/routeros.rul +45 -0
  97. annet/storage.py +121 -0
  98. annet/tabparser.py +36 -0
  99. annet/text_term_format.py +95 -0
  100. annet/tracing.py +170 -0
  101. annet/types.py +223 -0
  102. annet-0.1.dist-info/AUTHORS +21 -0
  103. annet-0.1.dist-info/LICENSE +21 -0
  104. annet-0.1.dist-info/METADATA +24 -0
  105. annet-0.1.dist-info/RECORD +113 -0
  106. annet-0.1.dist-info/WHEEL +5 -0
  107. annet-0.1.dist-info/entry_points.txt +6 -0
  108. annet-0.1.dist-info/top_level.txt +3 -0
  109. annet_generators/__init__.py +0 -0
  110. annet_generators/example/__init__.py +12 -0
  111. annet_generators/example/lldp.py +52 -0
  112. annet_nbexport/__init__.py +220 -0
  113. annet_nbexport/main.py +46 -0
annet/gen.py ADDED
@@ -0,0 +1,828 @@
1
+ from __future__ import annotations
2
+
3
+ import dataclasses
4
+ import itertools
5
+ import json
6
+ import os
7
+ import sys
8
+ import textwrap
9
+ import time
10
+ from collections import OrderedDict as odict
11
+ from operator import itemgetter
12
+ from typing import (
13
+ Any,
14
+ Dict,
15
+ FrozenSet,
16
+ Generator,
17
+ Iterable,
18
+ Iterator,
19
+ List,
20
+ Optional,
21
+ Tuple,
22
+ Union,
23
+ )
24
+
25
+ import tabulate
26
+ from contextlog import get_logger
27
+
28
+ from annet import generators, implicit, patching, tabparser, tracing
29
+ from annet.annlib import jsontools
30
+ from annet.annlib.rbparser import platform
31
+ from annet.annlib.rbparser.acl import compile_acl_text
32
+ from annet.cli_args import DeployOptions, GenOptions, ShowGenOptions
33
+ from annet.deploy import fetcher_connector, scrub_config
34
+ from annet.filtering import Filterer
35
+ from annet.generators import (
36
+ BaseGenerator,
37
+ Entire,
38
+ GeneratorError,
39
+ JSONFragment,
40
+ NotSupportedDevice,
41
+ PartialGenerator,
42
+ )
43
+ from annet.lib import merge_dicts, percentile
44
+ from annet.output import output_driver_connector
45
+ from annet.parallel import Parallel
46
+ from annet.storage import Device, Storage, storage_connector
47
+ from annet.tracing import tracing_connector
48
+ from annet.types import OldNewResult
49
+
50
+
51
+ # Вывод всех генераторов вместе.
52
+ # Значение такое же, как для аналогичной константы в ЧК.
53
+ ALL_GENS = "_all_gens"
54
+
55
+ live_configs = None
56
+
57
+
58
+ @dataclasses.dataclass
59
+ class DeviceGenerators:
60
+ """Collections of various types of generators found for devices."""
61
+
62
+ # map device fqdn to found partial generators
63
+ partial: Dict[str, List[PartialGenerator]] = dataclasses.field(default_factory=dict)
64
+
65
+ # map device fqdn to found entire generators
66
+ entire: Dict[str, List[Entire]] = dataclasses.field(default_factory=dict)
67
+
68
+ # map device fqdn to found json fragment generators
69
+ json_fragment: Dict[str, List[JSONFragment]] = dataclasses.field(default_factory=dict)
70
+
71
+ def iter_gens(self) -> Iterator[BaseGenerator]:
72
+ """Iterate over generators."""
73
+ for device_to_gens_of_the_same_type in (self.partial, self.entire, self.json_fragment):
74
+ for gen_list in device_to_gens_of_the_same_type.values():
75
+ for gen in gen_list:
76
+ yield gen
77
+
78
+ def file_gens(self, device_fqdn: str) -> Iterator[Union[Entire, JSONFragment]]:
79
+ """Iterate over generators that generate files or file parts."""
80
+ yield from itertools.chain(
81
+ self.entire.get(device_fqdn, []),
82
+ self.json_fragment.get(device_fqdn, []),
83
+ )
84
+
85
+
86
+ @dataclasses.dataclass
87
+ class OldNewDeviceContext:
88
+ config: str
89
+ args: GenOptions
90
+ storage: Storage
91
+ downloaded_files: Dict[Device, DeviceDownloadedFiles]
92
+ failed_files: Dict[Device, Exception]
93
+ running: Dict[str, Dict[str, str]]
94
+ failed_running: Dict[str, Exception]
95
+ no_new: bool
96
+ stdin: Optional[Dict[str, Optional[str]]]
97
+ add_annotations: bool
98
+ add_implicit: bool
99
+ do_files_download: bool
100
+ gens: DeviceGenerators
101
+ fetched_packages: Dict[Device, FrozenSet[str]]
102
+ failed_packages: Dict[Device, Exception]
103
+ device_count: int
104
+ do_print_perf: bool
105
+
106
+
107
+ @tracing.function
108
+ def _old_new_per_device(ctx: OldNewDeviceContext, device: Device, filterer: Filterer) -> OldNewResult:
109
+ tracing_connector.get().set_device_attributes(tracing_connector.get().get_current_span(), device)
110
+
111
+ start = time.monotonic()
112
+ acl_rules = None
113
+ acl_safe_rules = None
114
+ old = odict()
115
+ safe_old = odict()
116
+ old_files = DeviceDownloadedFiles()
117
+ new = odict()
118
+ safe_new = odict()
119
+ combined_perf = {}
120
+ partial_results = []
121
+ entire_results = []
122
+ implicit_rules: Optional[Dict[str, Any]] = None
123
+ filter_acl_rules: Optional[Dict[str, Any]] = None
124
+ new_json_fragment_files: Dict[str, Dict[str, Any]] = {}
125
+ json_fragment_results: Dict[str, generators.GeneratorJSONFragmentResult] = {}
126
+
127
+ if not device.is_pc():
128
+ try:
129
+ text = _old_new_get_config_cli(ctx, device)
130
+ except Exception as exc:
131
+ return OldNewResult(device=device, err=exc)
132
+
133
+ if not text and ctx.args.fail_on_empty_config:
134
+ return OldNewResult(
135
+ device=device,
136
+ err=Exception("no existing config retrieved (method: %s)" % ctx.config),
137
+ )
138
+
139
+ old = odict()
140
+ if ctx.config != "empty":
141
+ old = tabparser.parse_to_tree(
142
+ text=text,
143
+ splitter=tabparser.make_formatter(device.hw).split,
144
+ )
145
+ if not old:
146
+ res = generators.run_partial_initial(device, ctx.storage)
147
+ old = res.config_tree()
148
+ perf = res.perf_mesures()
149
+ if ctx.args.profile and ctx.do_print_perf:
150
+ _print_perf("INITIAL", perf)
151
+ run_args = generators.GeneratorPartialRunArgs(
152
+ device=device,
153
+ storage=ctx.storage,
154
+ use_acl=not ctx.args.no_acl,
155
+ use_acl_safe=ctx.args.acl_safe,
156
+ annotate=ctx.add_annotations,
157
+ generators_context=ctx.args.generators_context,
158
+ no_new=ctx.no_new,
159
+ )
160
+ res = generators.run_partial_generators(ctx.gens.partial[device.fqdn], run_args)
161
+ partial_results = res.partial_results
162
+ perf = res.perf_mesures()
163
+ if ctx.no_new:
164
+ new = odict()
165
+ safe_new = odict()
166
+ elif partial_results:
167
+ # skip one gen with not supported device
168
+ new = res.config_tree()
169
+ safe_new = res.config_tree(safe=True)
170
+
171
+ if ctx.args.profile:
172
+ if ctx.do_print_perf:
173
+ _print_perf("PARTIAL", perf)
174
+ combined_perf.update(perf)
175
+
176
+ implicit_rules = implicit.compile_rules(device)
177
+ if ctx.add_implicit:
178
+ old = merge_dicts(old, implicit.config(old, implicit_rules))
179
+ new = merge_dicts(new, implicit.config(new, implicit_rules))
180
+ safe_new = merge_dicts(safe_new, implicit.config(safe_new, implicit_rules))
181
+
182
+ if not ctx.args.no_acl:
183
+ acl_rules = generators.compile_acl_text(res.acl_text(), device.hw.vendor)
184
+ old = (old and patching.apply_acl(old, acl_rules))
185
+ new = patching.apply_acl(
186
+ new,
187
+ acl_rules,
188
+ exclusive=not ctx.args.no_acl_exclusive,
189
+ with_annotations=ctx.add_annotations,
190
+ )
191
+ if ctx.args.acl_safe:
192
+ acl_safe_rules = generators.compile_acl_text(res.acl_safe_text(), device.hw.vendor)
193
+ safe_old = (old and patching.apply_acl(old, acl_safe_rules))
194
+ safe_new = patching.apply_acl(
195
+ safe_new,
196
+ acl_safe_rules,
197
+ exclusive=not ctx.args.no_acl_exclusive,
198
+ with_annotations=ctx.add_annotations,
199
+ )
200
+
201
+ filter_acl_rules = build_filter_acl(filterer, device, ctx.stdin, ctx.args, ctx.config)
202
+ if filter_acl_rules is not None:
203
+ old = (old and patching.apply_acl(old, filter_acl_rules, fatal_acl=False))
204
+ new = patching.apply_acl(
205
+ new,
206
+ filter_acl_rules,
207
+ fatal_acl=False,
208
+ with_annotations=ctx.add_annotations,
209
+ )
210
+ else: # vendor == pc
211
+ try:
212
+ old_files = _old_new_get_config_files(ctx, device)
213
+ except Exception as exc:
214
+ return OldNewResult(device=device, err=exc)
215
+
216
+ new_files = {}
217
+ safe_new_files = {}
218
+ if not ctx.no_new:
219
+ if device in ctx.fetched_packages:
220
+ if ctx.args.required_packages_check:
221
+ errors = generators.check_entire_generators_required_packages(ctx.gens.entire[device.fqdn],
222
+ ctx.fetched_packages[device])
223
+ if errors:
224
+ error_msg = "; ".join(errors)
225
+ get_logger(host=device.hostname).error(error_msg)
226
+ return OldNewResult(device=device, err=Exception(error_msg))
227
+ res = generators.run_file_generators(
228
+ ctx.gens.file_gens(device.fqdn),
229
+ device,
230
+ ctx.storage,
231
+ )
232
+
233
+ entire_results = res.entire_results
234
+ json_fragment_results = res.json_fragment_results
235
+ new_files = res.new_files()
236
+ new_json_fragment_files = res.new_json_fragment_files(old_files.json_fragment_files)
237
+ if ctx.args.acl_safe:
238
+ safe_new_files = res.new_files(safe=True)
239
+
240
+ if ctx.args.profile:
241
+ perf = res.perf_mesures()
242
+ combined_perf[ALL_GENS] = {"total": time.monotonic() - start}
243
+ combined_perf.update(perf)
244
+ if ctx.do_print_perf:
245
+ _print_perf("ENTIRE", perf)
246
+
247
+ return OldNewResult(
248
+ device=device,
249
+ old=old,
250
+ new=new,
251
+ acl_rules=acl_rules,
252
+ old_files=old_files.entire_files,
253
+ new_files=new_files,
254
+ partial_result=partial_results,
255
+ entire_result=entire_results,
256
+ old_json_fragment_files=old_files.json_fragment_files,
257
+ new_json_fragment_files=new_json_fragment_files,
258
+ json_fragment_result=json_fragment_results,
259
+ implicit_rules=implicit_rules,
260
+ perf=combined_perf,
261
+ acl_safe_rules=acl_safe_rules,
262
+ safe_old=safe_old,
263
+ safe_new=safe_new,
264
+ safe_new_files=safe_new_files,
265
+ filter_acl_rules=filter_acl_rules,
266
+ )
267
+
268
+
269
+ @dataclasses.dataclass
270
+ class DeviceDownloadedFiles:
271
+ # map file path to file content for entire generators
272
+ entire_files: Dict[str, str] = dataclasses.field(default_factory=dict)
273
+
274
+ # map file path to file content for json fragment generators
275
+ json_fragment_files: Dict[str, Dict[str, Any]] = dataclasses.field(default_factory=dict)
276
+
277
+ def is_empty(self) -> bool:
278
+ return not self.entire_files and not self.json_fragment_files
279
+
280
+
281
+ def split_downloaded_files(
282
+ device_flat_files: Dict[str, Optional[str]],
283
+ gens: DeviceGenerators,
284
+ device: Device,
285
+ ) -> DeviceDownloadedFiles:
286
+ """Split downloaded files per generator type: entire/json_fragment."""
287
+ ret = DeviceDownloadedFiles()
288
+
289
+ for gen in gens.file_gens(device.fqdn):
290
+ filepath = gen.path(device)
291
+ if filepath in device_flat_files:
292
+ if isinstance(gen, Entire):
293
+ ret.entire_files[filepath] = device_flat_files[filepath]
294
+ elif isinstance(gen, JSONFragment):
295
+ if device_flat_files[filepath] is not None: # file exists
296
+ ret.json_fragment_files[filepath] = json.loads(device_flat_files[filepath])
297
+ else:
298
+ ret.json_fragment_files[filepath] = None
299
+
300
+ return ret
301
+
302
+
303
+ def split_downloaded_files_multi_device(
304
+ flat_downloaded_files: Dict[Device, Dict[str, Optional[str]]],
305
+ gens: DeviceGenerators,
306
+ devices: List[Device],
307
+ ) -> Dict[str, DeviceDownloadedFiles]:
308
+ """Split downloaded files per generator type: entire/json_fragment."""
309
+ return {
310
+ device: split_downloaded_files(flat_downloaded_files[device], gens, device)
311
+ for device in devices
312
+ if device in flat_downloaded_files
313
+ }
314
+
315
+
316
+ # ====
317
+ @tracing.function
318
+ def old_new(
319
+ args: GenOptions,
320
+ storage: Storage,
321
+ config: str,
322
+ loader: "Loader",
323
+ filterer: Filterer,
324
+ add_implicit=True,
325
+ add_annotations=False,
326
+ stdin=None,
327
+ device_ids: List[int] = None,
328
+ no_new=False,
329
+ do_files_download=False,
330
+ do_print_perf=True,
331
+ ):
332
+ devices = loader.resolve_devices(storage, device_ids)
333
+ gens = loader.resolve_gens(storage, devices)
334
+ running, failed_running = _old_resolve_running(config, devices)
335
+ downloaded_files, failed_files = _old_resolve_files(config, devices, gens, do_files_download)
336
+
337
+ if stdin is None:
338
+ stdin = args.stdin(storage=storage, filter_acl=args.filter_acl, config=config)
339
+
340
+ fetched_packages, failed_packages = {}, {}
341
+ if do_files_download and config == "running":
342
+ files_to_download = _get_files_to_download(devices, gens)
343
+ devices_with_files = [device for device in devices if device in files_to_download]
344
+ fetcher = fetcher_connector.get()
345
+ fetched_packages, failed_packages = fetcher.fetch_packages(devices_with_files)
346
+
347
+ ctx = OldNewDeviceContext(
348
+ config=config,
349
+ args=args,
350
+ storage=storage,
351
+ downloaded_files=split_downloaded_files_multi_device(downloaded_files, gens, devices),
352
+ failed_files=failed_files,
353
+ running=running,
354
+ failed_running=failed_running,
355
+ no_new=no_new,
356
+ stdin=stdin,
357
+ add_annotations=add_annotations,
358
+ add_implicit=add_implicit,
359
+ do_files_download=do_files_download,
360
+ gens=gens,
361
+ fetched_packages=fetched_packages,
362
+ failed_packages=failed_packages,
363
+ device_count=len(devices),
364
+ do_print_perf=do_print_perf,
365
+ )
366
+ for device in devices:
367
+ logger = get_logger(host=device.hostname)
368
+ try:
369
+ result = _old_new_per_device(ctx, device, filterer)
370
+ except patching.AclNotExclusiveError as err:
371
+ logger.error("ACL error: more than one acl rules matches to this command: %s", err)
372
+ raise GeneratorError from err
373
+ if result is not None:
374
+ yield result
375
+
376
+
377
+ @tracing.function
378
+ def old_raw(args: GenOptions, storage, config, stdin=None,
379
+ do_files_download=False, use_mesh=True,
380
+ ) -> Iterable[Tuple[Device, Union[str, Dict[str, str]]]]:
381
+ devices = storage.make_devices(args.query, preload_neighbors=True, use_mesh=use_mesh)
382
+ device_gens = _old_resolve_gens(args, storage, devices)
383
+ running, failed_running = _old_resolve_running(config, devices)
384
+ downloaded_files, failed_files = _old_resolve_files(config, devices, device_gens, do_files_download)
385
+ if stdin is None:
386
+ stdin = args.stdin(storage=storage, filter_acl=args.filter_acl, config=config)
387
+ ctx = OldNewDeviceContext(
388
+ config=config,
389
+ args=args,
390
+ storage=storage,
391
+ downloaded_files=split_downloaded_files_multi_device(downloaded_files, device_gens, devices),
392
+ failed_files=failed_files,
393
+ running=running,
394
+ failed_running=failed_running,
395
+ stdin=stdin,
396
+ do_files_download=do_files_download,
397
+ device_count=len(devices),
398
+ no_new=True,
399
+ add_annotations=False,
400
+ add_implicit=False,
401
+ gens=DeviceGenerators(),
402
+ fetched_packages={},
403
+ failed_packages={},
404
+ do_print_perf=True,
405
+ )
406
+ for device in devices:
407
+ if not device.is_pc():
408
+ config = _old_new_get_config_cli(ctx, device)
409
+ config = scrub_config(config, device.breed)
410
+ yield device, config
411
+ else:
412
+ files = _old_new_get_config_files(ctx, device)
413
+ if files.entire_files:
414
+ yield device, files.entire_files
415
+ if files.json_fragment_files:
416
+ yield device, {
417
+ path: jsontools.format_json(data)
418
+ for path, data in files.json_fragment_files.items()
419
+ }
420
+
421
+
422
+ @tracing.function
423
+ def worker(device_id, args: ShowGenOptions, stdin, loader: "Loader", filterer: Filterer) -> Generator[Tuple[str, str, bool], None, None]:
424
+ span = tracing_connector.get().get_current_span()
425
+ if span:
426
+ span.set_attribute("device.id", device_id)
427
+
428
+ with storage_connector.get().storage()(args) as storage:
429
+ for res in old_new(
430
+ args,
431
+ storage,
432
+ config="/dev/null",
433
+ loader=loader,
434
+ filterer=filterer,
435
+ add_implicit=False,
436
+ add_annotations=args.annotate,
437
+ stdin=stdin,
438
+ device_ids=[device_id],
439
+ ):
440
+ new = res.get_new(args.acl_safe)
441
+ new_files = res.get_new_files(args.acl_safe)
442
+ new_file_fragments = res.get_new_file_fragments(args.acl_safe)
443
+ output_driver = output_driver_connector.get()
444
+ device = res.device
445
+ if new is None:
446
+ continue
447
+ for (entire_path, (entire_data, _)) in sorted(new_files.items(), key=itemgetter(0)):
448
+ yield (output_driver.entire_config_dest_path(device, entire_path), entire_data, False)
449
+
450
+ for (path, (data, _)) in sorted(new_file_fragments.items(), key=itemgetter(0)):
451
+ dumped_data = json.dumps(data, indent=4, sort_keys=True, ensure_ascii=False)
452
+ yield (output_driver.entire_config_dest_path(device, path), dumped_data, False)
453
+
454
+ has_file_result = new_files or new_file_fragments
455
+ has_partial_result = new or not has_file_result
456
+ if device.hw.vendor in platform.VENDOR_REVERSES and has_partial_result:
457
+ orderer = patching.Orderer.from_hw(device.hw)
458
+ yield (output_driver.cfg_file_names(device)[0],
459
+ format_config_blocks(
460
+ orderer.order_config(new),
461
+ device.hw,
462
+ args.indent
463
+ ),
464
+ False)
465
+
466
+
467
+ def old_new_worker(device_id, args: DeployOptions, config, stdin, loader: "Loader", filterer: Filterer):
468
+ with storage_connector.get().storage()(args) as storage:
469
+ yield from old_new(
470
+ args,
471
+ storage,
472
+ config=config,
473
+ loader=loader,
474
+ filterer=filterer,
475
+ stdin=stdin,
476
+ device_ids=[device_id],
477
+ no_new=args.clear,
478
+ do_files_download=True,
479
+ )
480
+
481
+
482
+ class OldNewParallel(Parallel):
483
+ def __init__(self, storage: Storage, args: DeployOptions, loader: "Loader", filterer: Filterer):
484
+ self.storage = storage
485
+ stdin = args.stdin(storage=storage, filter_acl=args.filter_acl, config=args.config)
486
+ super().__init__(
487
+ old_new_worker,
488
+ args,
489
+ config=args.config,
490
+ stdin=stdin,
491
+ loader=loader,
492
+ filterer=filterer,
493
+ )
494
+ self.tune_args(args)
495
+
496
+ def generated_configs(self, device_ids: List[int]) -> Generator[OldNewResult, None, None]:
497
+ skipped = set(device_ids)
498
+
499
+ for task_result in self.irun(device_ids):
500
+ if task_result.exc is not None:
501
+ device = self.storage.get_device(task_result.device_id, use_mesh=False, preload_neighbors=False)
502
+ yield OldNewResult(device=device, err=task_result.exc)
503
+ skipped.discard(task_result.device_id)
504
+ elif task_result.result is not None:
505
+ yield from task_result.result
506
+ skipped.discard(task_result.device_id)
507
+
508
+ for device_id in skipped:
509
+ device = self.storage.get_device(device_id, use_mesh=False, preload_neighbors=False)
510
+ yield OldNewResult(device=device, err=Exception(f"No config returned for {device.hostname}"))
511
+
512
+
513
+ @dataclasses.dataclass
514
+ class DeviceFilesToDownload:
515
+ entire: List[str] = dataclasses.field(default_factory=list)
516
+ json_fragment: List[str] = dataclasses.field(default_factory=list)
517
+
518
+
519
+ @tracing.function
520
+ def _get_files_to_download(devices: List[Device], gens: DeviceGenerators) -> Dict[Device, Any]:
521
+ files_to_download = {}
522
+ for device in devices:
523
+ paths = set()
524
+ try:
525
+ for generator in gens.file_gens(device.fqdn):
526
+ try:
527
+ path = generator.path(device)
528
+ if path:
529
+ paths.add(path)
530
+ except NotSupportedDevice:
531
+ continue
532
+ except Exception as exc:
533
+ files_to_download[device] = exc
534
+ continue
535
+ if paths:
536
+ files_to_download[device] = sorted(paths)
537
+ return files_to_download
538
+
539
+
540
+ def _print_perf(gen_type, perf):
541
+ print(file=sys.stderr)
542
+ print(
543
+ tabulate.tabulate([
544
+ (
545
+ (gen if not method else None),
546
+ (method or "." * 30),
547
+ sum(map(itemgetter("time"), stat)),
548
+ (min(map(itemgetter("time"), stat)) if method else None),
549
+ (percentile(stat, 0.95, itemgetter("time")) if method else None),
550
+ (max(map(itemgetter("time"), stat)) if method else None),
551
+ (len(stat) if method else None),
552
+ (len(list(filter(
553
+ lambda item: item in ["call", "disk_write"],
554
+ map(itemgetter("op"), stat)))) if method else None),
555
+ )
556
+
557
+ for (gen, gen_perf) in sorted(
558
+ perf.items(),
559
+ key=(lambda item: item[1]["total"]),
560
+ reverse=True,
561
+ )
562
+
563
+ for (method, stat) in sorted(
564
+ [(None, [{"time": gen_perf["total"], "op": None}])] + list(gen_perf["rt"].items()),
565
+ key=(lambda item: sum(map(itemgetter("time"), item[1]))),
566
+ reverse=True,
567
+ )
568
+ ],
569
+ [gen_type + "-Generator", "RT", "Total", "Min", "95%", "Max", "Calls", "Direct"],
570
+ tablefmt="orgtbl", floatfmt=".4f",
571
+ ),
572
+ file=sys.stderr,
573
+ )
574
+ print(file=sys.stderr)
575
+
576
+
577
+ def build_filter_text(filterer, device, stdin, args, config):
578
+ filter_acl_text = None
579
+ if args.filter_acl:
580
+ filter_acl_text = stdin["filter_acl"]
581
+ if args.filter_acl and not stdin["filter_acl"]:
582
+ if os.path.isdir(args.filter_acl):
583
+ filename = os.path.join(config, "%s.acl" % device.hostname)
584
+ else:
585
+ filename = args.filter_acl
586
+ with open(filename) as fh:
587
+ filter_acl_text = fh.read()
588
+
589
+ if args.filter_ifaces:
590
+ filter_acl_text = filter_acl_text + "\n" if filter_acl_text else ""
591
+ filter_acl_text += filterer.for_ifaces(device, args.filter_ifaces)
592
+
593
+ if args.filter_peers:
594
+ filter_acl_text = filter_acl_text + "\n" if filter_acl_text else ""
595
+ filter_acl_text += filterer.for_peers(device, args.filter_peers)
596
+
597
+ if args.filter_policies:
598
+ filter_acl_text = filter_acl_text + "\n" if filter_acl_text else ""
599
+ filter_acl_text += filterer.for_policies(device, args.filter_policies)
600
+ return filter_acl_text
601
+
602
+
603
+ def build_filter_acl(filterer, device, stdin, args, config):
604
+ filter_acl_text = build_filter_text(filterer, device, stdin, args, config)
605
+ if filter_acl_text is not None:
606
+ return compile_acl_text(
607
+ textwrap.dedent(filter_acl_text),
608
+ device.hw.vendor,
609
+ allow_ignore=True,
610
+ )
611
+
612
+
613
+ def _existing_cfg_file_name(config_dir: str, device) -> Optional[str]:
614
+ cfg_files = output_driver_connector.get().cfg_file_names(device)
615
+ last: Optional[str] = None
616
+ for cfg_file in cfg_files:
617
+ filename = os.path.join(config_dir, cfg_file)
618
+ last = filename
619
+ if os.path.exists(filename):
620
+ return filename
621
+ return last
622
+
623
+
624
+ def format_config_blocks(config, hw, indent, _level=0):
625
+ formatter = tabparser.make_formatter(hw, indent=indent)
626
+ return formatter.join(config)
627
+
628
+
629
+ def format_files(files):
630
+ lines = []
631
+ for path in sorted(files):
632
+ lines.extend((
633
+ "# %s" % path,
634
+ files[path],
635
+ ))
636
+ return "\n".join(lines)
637
+
638
+
639
+ def find_files_relative(path: str) -> Generator[str, None, None]:
640
+ """Рекурсивно найти файлы в path и вернуть пути к ним относительно path"""
641
+ root_abs_path = os.path.abspath(path)
642
+ for dirpath, _dirnames, filenames in os.walk(path):
643
+ for filename in filenames:
644
+ full_path = os.path.join(dirpath, filename)
645
+ yield os.path.relpath(full_path, root_abs_path)
646
+
647
+
648
+ def load_pc_config(path: str, set_root=False) -> Dict[str, str]:
649
+ """Подхватываем локально сохраненные файлы конфигов для вайтбоксов"""
650
+ ret: Dict[str, str] = {}
651
+ for relative_cfg_path in find_files_relative(path):
652
+ with open(os.path.join(path, relative_cfg_path)) as cfg_file:
653
+ text = cfg_file.read()
654
+ if set_root:
655
+ relative_cfg_path = os.path.join("/", relative_cfg_path)
656
+ ret[relative_cfg_path] = text
657
+ return ret
658
+
659
+
660
+ @tracing.function
661
+ def _old_new_get_config_cli(ctx: OldNewDeviceContext, device: Device) -> str:
662
+ if ctx.config == "empty":
663
+ text = ""
664
+ elif ctx.config == "running":
665
+ text = ctx.running.get(device)
666
+ if text is None:
667
+ exc = (ctx.failed_running.get(device.fqdn) or
668
+ ctx.failed_running.get(device.hostname) or
669
+ Exception("I can't get device config and I don't know why"))
670
+ get_logger(host=device.hostname).error("config error %s", exc)
671
+ raise exc
672
+ elif ctx.config == "-":
673
+ text = ctx.stdin["config"]
674
+ else:
675
+ if os.path.isdir(ctx.config):
676
+ filename = _existing_cfg_file_name(ctx.config, device)
677
+ else:
678
+ filename = ctx.config
679
+ try:
680
+ with open(filename) as fh:
681
+ text = fh.read()
682
+ except Exception as exc_info:
683
+ if not ctx.args.fail_on_empty_config and isinstance(exc_info, FileNotFoundError):
684
+ return ""
685
+ if ctx.device_count > 1:
686
+ get_logger(host=device.hostname).error(str(exc_info))
687
+ return None
688
+ raise
689
+ return text
690
+
691
+
692
+ @tracing.function
693
+ def _old_new_get_config_files(ctx: OldNewDeviceContext, device: Device) -> DeviceDownloadedFiles:
694
+ old_files = DeviceDownloadedFiles()
695
+ if device in ctx.failed_packages:
696
+ exc = (
697
+ ctx.failed_packages.get(device) or
698
+ Exception("I can't get device packages and I don't know why")
699
+ )
700
+ get_logger(host=device.hostname).error(str(exc))
701
+ raise exc
702
+ if ctx.do_files_download:
703
+ if ctx.config == "empty":
704
+ return old_files
705
+ if ctx.config == "running":
706
+ old_files_running = ctx.downloaded_files.get(device)
707
+ if old_files_running and old_files_running.is_empty():
708
+ exc = (ctx.failed_files.get(device) or
709
+ Exception("I can't get device files and I don't know why"))
710
+ get_logger(host=device.hostname).error(str(exc))
711
+ raise exc
712
+ old_files = old_files_running
713
+ elif os.path.exists(ctx.config):
714
+ # try to find config in subdirectory: <ctx.config>/<device_name>.cfg/
715
+ config_path = _existing_cfg_file_name(ctx.config, device)
716
+ if config_path is None:
717
+ # if subdir does not exist, assume the whole dir is our config
718
+ config_path = ctx.config
719
+ if not os.path.isdir(config_path):
720
+ get_logger(host=device.hostname).error("I can't find device files in %s", config_path)
721
+ return old_files
722
+ old_files = split_downloaded_files(load_pc_config(config_path, True), ctx.gens, device)
723
+ else:
724
+ raise NotImplementedError("pc and not running or path")
725
+ return old_files
726
+
727
+
728
+ @tracing.function
729
+ def _old_resolve_gens(args: GenOptions, storage: Storage, devices: List[Device]) -> DeviceGenerators:
730
+ per_device_gens = DeviceGenerators()
731
+ for device in devices:
732
+ gens = generators.build_generators(storage, gens=args, device=device)
733
+ per_device_gens.partial[device.fqdn] = gens.partial
734
+ per_device_gens.entire[device.fqdn] = gens.entire
735
+ per_device_gens.json_fragment[device.fqdn] = gens.json_fragment
736
+ return per_device_gens
737
+
738
+
739
+ @tracing.function
740
+ def _old_resolve_running(config: str, devices: List[Device]) -> Tuple[Dict[int, str], Dict[int, Exception]]:
741
+ running, failed_running = {}, {}
742
+ if config == "running":
743
+ global live_configs # pylint: disable=global-statement
744
+ if live_configs is None:
745
+ # предварительно прочесть все конфиги прямо по ssh
746
+ fetcher = fetcher_connector.get()
747
+ running, failed_running = fetcher.fetch(devices)
748
+ else:
749
+ running, failed_running = live_configs # pylint: disable=unpacking-non-sequence
750
+ return running, failed_running
751
+
752
+
753
+ @tracing.function
754
+ def _old_resolve_files(config: str,
755
+ devices: List[Device],
756
+ gens: DeviceGenerators,
757
+ do_files_download: bool,
758
+ ) -> Tuple[Dict[Device, Dict[str, Optional[str]]], Dict[Device, Exception]]:
759
+ downloaded_files, failed_files = {}, {}
760
+ if do_files_download and config == "running":
761
+ files_to_download = _get_files_to_download(devices, gens)
762
+ devices_with_files = [device for device in devices if device in files_to_download]
763
+ if devices_with_files:
764
+ fetcher = fetcher_connector.get()
765
+ downloaded_files, failed_files = fetcher.fetch(devices_with_files,
766
+ files_to_download=files_to_download)
767
+ return downloaded_files, failed_files
768
+
769
+
770
+ class Loader:
771
+ def __init__(self, storage: Storage, args: GenOptions, no_empty_warning: bool = False) -> None:
772
+ self._args = args
773
+ self._storage = storage
774
+ self._no_empty_warning = no_empty_warning
775
+ self._devices_map: Optional[Dict[int, Device]] = None
776
+ self._gens: Optional[DeviceGenerators] = None
777
+
778
+ self._preload()
779
+
780
+ def _preload(self) -> None:
781
+ with tracing_connector.get().start_as_current_span("Resolve devices"):
782
+ devices = self._storage.make_devices(
783
+ self._args.query,
784
+ preload_neighbors=True,
785
+ use_mesh=not self._args.no_mesh,
786
+ preload_extra_fields=True,
787
+ )
788
+ if not devices and not self._no_empty_warning:
789
+ get_logger().error("No devices found for %s", self._args.query)
790
+ return
791
+
792
+ self._devices_map = {d.id: d for d in devices}
793
+ self._gens = _old_resolve_gens(self._args, self._storage, devices)
794
+
795
+ @property
796
+ def device_fqdns(self):
797
+ return {d.id: d.fqdn for d in self._devices_map.values()} if self._devices_map else {}
798
+
799
+ @property
800
+ def device_ids(self):
801
+ return list(self.device_fqdns)
802
+
803
+ @property
804
+ def devices(self) -> List[Device]:
805
+ if self._devices_map:
806
+ return list(self._devices_map.values())
807
+ return []
808
+
809
+ def resolve_devices(self, storage: Storage, device_ids: Iterable[int]) -> List[Device]:
810
+ devices = []
811
+ for device_id in device_ids:
812
+ device = self._devices_map[device_id]
813
+
814
+ # can not use self._storage here, we can be in another process
815
+ device.storage = storage
816
+
817
+ devices.append(device)
818
+ return devices
819
+
820
+ def resolve_gens(self, storage: Storage, devices: Iterable[Device]) -> DeviceGenerators:
821
+ if self._gens is not None:
822
+ for gen in self._gens.iter_gens():
823
+ # can not use self._storage here, we can be in another process
824
+ gen.storage = storage
825
+ return self._gens
826
+
827
+ with tracing_connector.get().start_as_current_span("Resolve gens"):
828
+ return _old_resolve_gens(self._args, storage, devices)