annet 1.1.2__py3-none-any.whl → 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of annet might be problematic. Click here for more details.

annet/annet.py CHANGED
@@ -2,7 +2,7 @@
2
2
  import sys
3
3
 
4
4
  import annet
5
- from annet import argparse, cli, generators, hardware, lib, rulebook
5
+ from annet import argparse, cli, generators, hardware, lib, rulebook, diff
6
6
 
7
7
 
8
8
  # =====
@@ -13,6 +13,7 @@ def main():
13
13
  cli.fill_base_args(parser, annet.__name__, "configs/logging.yaml")
14
14
  rulebook.rulebook_provider_connector.set(rulebook.DefaultRulebookProvider)
15
15
  hardware.hardware_connector.set(hardware.AnnetHardwareProvider)
16
+ diff.file_differ_connector.set(diff.UnifiedFileDiffer)
16
17
 
17
18
  parser.add_commands(parser.find_subcommands(cli.list_subcommands()))
18
19
  try:
annet/api/__init__.py CHANGED
@@ -1,5 +1,4 @@
1
1
  import abc
2
- import difflib
3
2
  import os
4
3
  import re
5
4
  import sys
@@ -37,6 +36,7 @@ from annet import diff as ann_diff
37
36
  from annet import filtering
38
37
  from annet import gen as ann_gen
39
38
  from annet import patching, rulebook, tabparser, tracing
39
+ from annet.diff import file_differ_connector
40
40
  from annet.rulebook import deploying
41
41
  from annet.filtering import Filterer
42
42
  from annet.hardware import hardware_connector
@@ -52,8 +52,6 @@ from annet.storage import Device, get_storage
52
52
  from annet.types import Diff, ExitCode, OldNewResult, Op, PCDiff, PCDiffFile
53
53
 
54
54
 
55
- live_configs = ann_gen.live_configs
56
-
57
55
  DEFAULT_INDENT = " "
58
56
 
59
57
 
@@ -242,29 +240,22 @@ def gen(args: cli_args.ShowGenOptions, loader: ann_gen.Loader):
242
240
 
243
241
 
244
242
  # =====
245
- def _diff_file(old_text: Optional[str], new_text: Optional[str], context=3):
246
- old_lines = old_text.splitlines() if old_text else []
247
- new_lines = new_text.splitlines() if new_text else []
248
- context = max(len(old_lines), len(new_lines)) if context is None else context
249
- return list(difflib.unified_diff(old_lines, new_lines, n=context, lineterm=""))
250
-
251
-
252
- def _diff_files(old_files, new_files, context=3):
243
+ def _diff_files(hw, old_files, new_files):
253
244
  ret = {}
245
+ differ = file_differ_connector.get()
254
246
  for (path, (new_text, reload_data)) in new_files.items():
255
247
  old_text = old_files.get(path)
256
248
  is_new = old_text is None
257
- diff_lines = _diff_file(old_text, new_text, context=context)
249
+ diff_lines = differ.diff_file(hw, path, old_text, new_text)
258
250
  ret[path] = (diff_lines, reload_data, is_new)
259
251
  return ret
260
252
 
261
253
 
262
254
  def patch(args: cli_args.ShowPatchOptions, loader: ann_gen.Loader):
263
255
  """ Сгенерировать патч для устройств """
264
- global live_configs # pylint: disable=global-statement
265
256
  if args.config == "running":
266
257
  fetcher = annet.deploy.get_fetcher()
267
- live_configs = annet.lib.do_async(fetcher.fetch(loader.devices, processes=args.parallel))
258
+ ann_gen.live_configs = annet.lib.do_async(fetcher.fetch(loader.devices, processes=args.parallel))
268
259
  stdin = args.stdin(filter_acl=args.filter_acl, config=args.config)
269
260
 
270
261
  filterer = filtering.filterer_connector.get()
@@ -355,9 +346,9 @@ def diff(
355
346
 
356
347
  pc_diff_files = []
357
348
  if res.old_files or new_files:
358
- pc_diff_files.extend(_pc_diff(device.hostname, res.old_files, new_files))
349
+ pc_diff_files.extend(_pc_diff(res.device.hw, device.hostname, res.old_files, new_files))
359
350
  if res.old_json_fragment_files or new_json_fragment_files:
360
- pc_diff_files.extend(_json_fragment_diff(device.hostname, res.old_json_fragment_files, new_json_fragment_files))
351
+ pc_diff_files.extend(_json_fragment_diff(res.device.hw, device.hostname, res.old_json_fragment_files, new_json_fragment_files))
361
352
 
362
353
  if pc_diff_files:
363
354
  pc_diff_files.sort(key=lambda f: f.label)
@@ -478,18 +469,19 @@ class PCDeployerJob(DeployerJob):
478
469
  upload_files: Dict[str, bytes] = {}
479
470
  reload_cmds: Dict[str, bytes] = {}
480
471
  generator_types: Dict[str, GeneratorType] = {}
472
+ differ = file_differ_connector.get()
481
473
  for generator_type, pc_files in [(GeneratorType.ENTIRE, new_files), (GeneratorType.JSON_FRAGMENT, new_json_fragment_files)]:
482
474
  for file, (file_content_or_json_cfg, cmds) in pc_files.items():
483
475
  if generator_type == GeneratorType.ENTIRE:
484
476
  file_content: str = file_content_or_json_cfg
485
- diff_content = "\n".join(_diff_file(old_files.get(file), file_content))
477
+ diff_content = "\n".join(differ.diff_file(res.device.hw, file, old_files.get(file), file_content))
486
478
  else: # generator_type == GeneratorType.JSON_FRAGMENT
487
479
  old_json_cfg = old_json_fragment_files[file]
488
480
  json_patch = jsontools.make_patch(old_json_cfg, file_content_or_json_cfg)
489
481
  file_content = jsontools.format_json(json_patch)
490
482
  old_text = jsontools.format_json(old_json_cfg)
491
483
  new_text = jsontools.format_json(file_content_or_json_cfg)
492
- diff_content = "\n".join(_diff_file(old_text, new_text))
484
+ diff_content = "\n".join(differ.diff_file(res.device.hw, file, old_text, new_text))
493
485
 
494
486
  if diff_content or force_reload:
495
487
  self._has_diff |= True
@@ -624,7 +616,6 @@ class Deployer:
624
616
  return ans
625
617
 
626
618
  def check_diff(self, result: annet.deploy.DeployResult, loader: ann_gen.Loader):
627
- global live_configs # pylint: disable=global-statement
628
619
  success_device_ids = []
629
620
  for host, hres in result.results.items():
630
621
  device = self.fqdn_to_device[host]
@@ -639,7 +630,7 @@ class Deployer:
639
630
  config="running",
640
631
  )
641
632
  if diff_args.query:
642
- live_configs = None
633
+ ann_gen.live_configs = None
643
634
  diffs = diff(diff_args, loader, success_device_ids, self._filterer)
644
635
  non_pc_diffs = {dev: diff for dev, diff in diffs.items() if not isinstance(diff, PCDiff)}
645
636
  devices_to_diff = ann_diff.collapse_diffs(non_pc_diffs)
@@ -682,13 +673,23 @@ async def adeploy(
682
673
  ) -> ExitCode:
683
674
  """ Сгенерировать конфиг для устройств и задеплоить его """
684
675
  ret: ExitCode = 0
685
- global live_configs # pylint: disable=global-statement
686
- live_configs = await fetcher.fetch(devices=loader.devices, processes=args.parallel)
687
- pool = ann_gen.OldNewParallel(args, loader, filterer)
676
+ ann_gen.live_configs = await fetcher.fetch(devices=loader.devices, processes=args.parallel)
688
677
 
689
- for res in pool.generated_configs(loader.devices):
678
+ device_ids = [d.id for d in loader.devices]
679
+ for res in ann_gen.old_new(
680
+ args,
681
+ config=args.config,
682
+ loader=loader,
683
+ no_new=args.clear,
684
+ stdin=args.stdin(filter_acl=args.filter_acl, config=args.config),
685
+ do_files_download=True,
686
+ device_ids=device_ids,
687
+ filterer=filterer,
688
+ ):
690
689
  # Меняем exit code если хоть один device ловил exception
691
690
  if res.err is not None:
691
+ if not args.tolerate_fails:
692
+ raise res.err
692
693
  get_logger(res.device.hostname).error("error generating configs", exc_info=res.err)
693
694
  ret |= 2 ** 3
694
695
  job = DeployerJob.from_device(res.device, args)
@@ -746,12 +747,16 @@ def file_diff(args: cli_args.FileDiffOptions):
746
747
  def file_diff_worker(old_new: Tuple[str, str], args: cli_args.FileDiffOptions) -> Generator[
747
748
  Tuple[str, str, bool], None, None]:
748
749
  old_path, new_path = old_new
750
+ hw = args.hw
751
+ if isinstance(args.hw, str):
752
+ hw = HardwareView(args.hw, "")
753
+
749
754
  if os.path.isdir(old_path) and os.path.isdir(new_path):
750
755
  hostname = os.path.basename(new_path)
751
756
  new_files = {relative_cfg_path: (cfg_text, "") for relative_cfg_path, cfg_text in
752
757
  ann_gen.load_pc_config(new_path).items()}
753
758
  old_files = ann_gen.load_pc_config(old_path)
754
- for diff_file in _pc_diff(hostname, old_files, new_files):
759
+ for diff_file in _pc_diff(hw, hostname, old_files, new_files):
755
760
  diff_text = (
756
761
  "\n".join(diff_file.diff_lines)
757
762
  if args.no_color
@@ -791,8 +796,8 @@ def file_patch_worker(old_new: Tuple[str, str], args: cli_args.FileDiffOptions)
791
796
  yield dest_name, patch_text, False
792
797
 
793
798
 
794
- def _pc_diff(hostname: str, old_files: Dict[str, str], new_files: Dict[str, str]) -> Generator[PCDiffFile, None, None]:
795
- sorted_lines = sorted(_diff_files(old_files, new_files).items())
799
+ def _pc_diff(hw, hostname: str, old_files: Dict[str, str], new_files: Dict[str, str]) -> Generator[PCDiffFile, None, None]:
800
+ sorted_lines = sorted(_diff_files(hw, old_files, new_files).items())
796
801
  for (path, (diff_lines, _reload_data, is_new)) in sorted_lines:
797
802
  if not diff_lines:
798
803
  continue
@@ -803,6 +808,7 @@ def _pc_diff(hostname: str, old_files: Dict[str, str], new_files: Dict[str, str]
803
808
 
804
809
 
805
810
  def _json_fragment_diff(
811
+ hw,
806
812
  hostname: str,
807
813
  old_files: Dict[str, Any],
808
814
  new_files: Dict[str, Tuple[Any, Optional[str]]],
@@ -820,7 +826,7 @@ def _json_fragment_diff(
820
826
  ret[path] = (jsontools.format_json(cfg), reload_cmd)
821
827
  return ret
822
828
  jold, jnew = jsonify_multi(old_files), jsonify_multi_with_cmd(new_files)
823
- return _pc_diff(hostname, jold, jnew)
829
+ return _pc_diff(hw, hostname, jold, jnew)
824
830
 
825
831
 
826
832
  def guess_hw(config_text: str):
annet/diff.py CHANGED
@@ -1,6 +1,9 @@
1
+ import abc
2
+ import difflib
1
3
  import re
2
4
  from itertools import groupby
3
- from typing import Generator, List, Mapping, Tuple, Union
5
+ from pathlib import Path
6
+ from typing import Generator, List, Mapping, Tuple, Union, Protocol
4
7
 
5
8
  from annet.annlib.diff import ( # pylint: disable=unused-import
6
9
  colorize_line,
@@ -9,10 +12,12 @@ from annet.annlib.diff import ( # pylint: disable=unused-import
9
12
  gen_pre_as_diff,
10
13
  resort_diff,
11
14
  )
15
+ from annet.annlib.netdev.views.hardware import HardwareView
12
16
  from annet.annlib.output import format_file_diff
13
17
 
14
- from annet import patching
18
+ from annet import patching, rulebook, tabparser, hardware
15
19
  from annet.cli_args import ShowDiffOptions
20
+ from annet.connectors import CachedConnector
16
21
  from annet.output import output_driver_connector
17
22
  from annet.storage import Device
18
23
  from annet.tabparser import make_formatter
@@ -82,3 +87,67 @@ def collapse_diffs(diffs: Mapping[Device, Diff]) -> Mapping[Tuple[Device, ...],
82
87
  res[tuple(x[0] for x in collapsed_diff)] = collapsed_diff[0][1][0]
83
88
 
84
89
  return res
90
+
91
+
92
+ class FileDiffer(Protocol):
93
+ @abc.abstractmethod
94
+ def diff_file(self, hw: HardwareView, path: str | Path, old: str, new: str) -> list[str]:
95
+ raise NotImplementedError
96
+
97
+
98
+ class UnifiedFileDiffer(FileDiffer):
99
+ def __init__(self):
100
+ self.context: int = 3
101
+
102
+ def diff_file(self, hw: HardwareView, path: str | Path, old: str, new: str) -> list[str]:
103
+ """Calculate the differences for config files.
104
+
105
+ Args:
106
+ hw: device hardware info
107
+ path: path to file on a device
108
+ old (Optional[str]): The old file content.
109
+ new (Optional[str]): The new file content.
110
+
111
+ Returns:
112
+ List[str]: List of difference lines.
113
+ """
114
+ return self._diff_text_file(old, new)
115
+
116
+ def _diff_text_file(self, old, new):
117
+ """Calculate the differences for plaintext files."""
118
+ context = self.context
119
+ old_lines = old.splitlines() if old else []
120
+ new_lines = new.splitlines() if new else []
121
+ context = max(len(old_lines), len(new_lines)) if context is None else context
122
+ return list(difflib.unified_diff(old_lines, new_lines, n=context, lineterm=""))
123
+
124
+
125
+ class FrrFileDiffer(UnifiedFileDiffer):
126
+ def diff_file(self, hw: HardwareView, path: str | Path, old: str, new: str) -> list[str]:
127
+ if (hw.PC.Mellanox or hw.PC.NVIDIA) and (path == "/etc/frr/frr.conf"):
128
+ return self._diff_frr_conf(hw, old, new)
129
+ return super().diff_file(hw, path, old, new)
130
+
131
+ def _diff_frr_conf(self, hw: HardwareView, old_text: str | None, new_text: str | None) -> list[str]:
132
+ """Calculate the differences for frr.conf files."""
133
+ indent = " "
134
+ rb = rulebook.rulebook_provider_connector.get()
135
+ rulebook_data = rb.get_rulebook(hw)
136
+ formatter = tabparser.make_formatter(hw, indent=indent)
137
+
138
+ old_tree = tabparser.parse_to_tree(old_text or "", splitter=formatter.split)
139
+ new_tree = tabparser.parse_to_tree(new_text or "", splitter=formatter.split)
140
+
141
+ diff_tree = patching.make_diff(old_tree, new_tree, rulebook_data, [])
142
+ pre_diff = patching.make_pre(diff_tree)
143
+ diff_iterator = gen_pre_as_diff(pre_diff, show_rules=False, indent=indent, no_color=True)
144
+
145
+ return [line.rstrip() for line in diff_iterator if "frr version" not in line]
146
+
147
+
148
+ class _FileDifferConnector(CachedConnector[FileDiffer]):
149
+ name = "Device file diff processor"
150
+ ep_name = "file_differ"
151
+
152
+
153
+ file_differ_connector = _FileDifferConnector()
annet/gen.py CHANGED
@@ -526,52 +526,6 @@ def worker(device_id, args: ShowGenOptions, stdin, loader: "Loader", filterer: F
526
526
  False)
527
527
 
528
528
 
529
- def old_new_worker(device_id, args: DeployOptions, config, stdin, loader: "Loader", filterer: Filterer):
530
- for res in old_new(
531
- args,
532
- config=config,
533
- loader=loader,
534
- filterer=filterer,
535
- stdin=stdin,
536
- device_ids=[device_id],
537
- no_new=args.clear,
538
- do_files_download=True,
539
- ):
540
- if res.err is not None and not args.tolerate_fails:
541
- raise res.err
542
- yield res
543
-
544
-
545
- class OldNewParallel(Parallel):
546
- def __init__(self, args: DeployOptions, loader: "Loader", filterer: Filterer):
547
- stdin = args.stdin(filter_acl=args.filter_acl, config=args.config)
548
- super().__init__(
549
- old_new_worker,
550
- args,
551
- config=args.config,
552
- stdin=stdin,
553
- loader=loader,
554
- filterer=filterer,
555
- )
556
- self.tune_args(args)
557
- self.tolerate_fails = args.tolerate_fails
558
-
559
- def generated_configs(self, devices: List[Device]) -> Generator[OldNewResult, None, None]:
560
- devices_by_id = {device.id: device for device in devices}
561
- device_ids = list(devices_by_id)
562
-
563
- for task_result in self.irun(device_ids, self.tolerate_fails):
564
- if task_result.exc is not None:
565
- device = devices_by_id.pop(task_result.device_id)
566
- yield OldNewResult(device=device, err=task_result.exc)
567
- elif task_result.result is not None:
568
- yield from task_result.result
569
- devices_by_id.pop(task_result.device_id)
570
-
571
- for device in devices_by_id.values():
572
- yield OldNewResult(device=device, err=Exception(f"No config returned for {device.hostname}"))
573
-
574
-
575
529
  @dataclasses.dataclass
576
530
  class DeviceFilesToDownload:
577
531
  entire: List[str] = dataclasses.field(default_factory=list)
annet/output.py CHANGED
@@ -155,8 +155,15 @@ class OutputDriverBasic(OutputDriver):
155
155
  ret.append((label, getattr(exc, "formatted_output", f"{repr(exc)} (formatted_output is absent)"), True))
156
156
  return ret
157
157
 
158
- def cfg_file_names(self, device: Device) -> List[str]:
159
- return [f"{device.hostname}.cfg"]
158
+ def cfg_file_names(self, device: Device) -> list[str]:
159
+ res = []
160
+ if device.hostname:
161
+ res.append(f"{device.hostname}.cfg")
162
+ if device.id is not None and device.id != "":
163
+ res.append(f"_id_{device.id}.cfg")
164
+ if not res:
165
+ raise RuntimeError("Neither hostname nor id is known for device")
166
+ return res
160
167
 
161
168
  def entire_config_dest_path(self, device, config_path: str) -> str:
162
169
  """Формирует путь к конфигу в директории destname.
annet/rpl/__init__.py CHANGED
@@ -16,11 +16,12 @@ __all__ = [
16
16
  "RoutingPolicy",
17
17
  "CommunityActionValue",
18
18
  "PrefixMatchValue",
19
+ "OrLonger",
19
20
  ]
20
21
 
21
22
  from .action import Action, ActionType, SingleAction
22
23
  from .condition import AndCondition, Condition, ConditionOperator, SingleCondition
23
- from .match_builder import R, MatchField, PrefixMatchValue
24
+ from .match_builder import R, MatchField, PrefixMatchValue, OrLonger
24
25
  from .policy import RoutingPolicyStatement, RoutingPolicy
25
26
  from .result import ResultType
26
27
  from .routemap import RouteMap, Route
@@ -63,11 +63,15 @@ class SetConditionFactory(Generic[ValueT]):
63
63
  return SingleCondition(self.field, ConditionOperator.HAS_ANY, values)
64
64
 
65
65
 
66
+ # OrLonger represents a pair of (le, ge)
67
+ # for prefix mask length match in prefix-lists
68
+ OrLonger = tuple[Optional[int], Optional[int]]
69
+
70
+
66
71
  @dataclass(frozen=True)
67
72
  class PrefixMatchValue:
68
73
  names: tuple[str, ...]
69
- greater_equal: Optional[int]
70
- less_equal: Optional[int]
74
+ or_longer: OrLonger = (None, None)
71
75
 
72
76
 
73
77
  class Checkable:
@@ -91,23 +95,23 @@ class Checkable:
91
95
  def match_v6(
92
96
  self,
93
97
  *names: str,
94
- or_longer: tuple[Optional[int], Optional[int]] = (None, None),
98
+ or_longer: OrLonger = (None, None),
95
99
  ) -> SingleCondition[PrefixMatchValue]:
96
100
  return SingleCondition(
97
101
  MatchField.ipv6_prefix,
98
102
  ConditionOperator.CUSTOM,
99
- PrefixMatchValue(names, greater_equal=or_longer[0], less_equal=or_longer[1]),
103
+ PrefixMatchValue(names, or_longer),
100
104
  )
101
105
 
102
106
  def match_v4(
103
107
  self,
104
108
  *names: str,
105
- or_longer: tuple[Optional[int], Optional[int]] = (None, None),
109
+ or_longer: OrLonger = (None, None),
106
110
  ) -> SingleCondition[PrefixMatchValue]:
107
111
  return SingleCondition(
108
112
  MatchField.ip_prefix,
109
113
  ConditionOperator.CUSTOM,
110
- PrefixMatchValue(names, greater_equal=or_longer[0], less_equal=or_longer[1]),
114
+ PrefixMatchValue(names, or_longer),
111
115
  )
112
116
 
113
117
 
@@ -10,14 +10,16 @@ __all__ = [
10
10
  "RDFilterFilterGenerator",
11
11
  "RDFilter",
12
12
  "IpPrefixList",
13
+ "IpPrefixListMember",
13
14
  "PrefixListFilterGenerator",
14
15
  "get_policies",
16
+ "ip_prefix_list",
15
17
  ]
16
18
 
17
19
  from .aspath import AsPathFilterGenerator
18
20
  from .community import CommunityListGenerator
19
21
  from .cumulus_frr import CumulusPolicyGenerator
20
- from .entities import CommunityList, AsPathFilter, CommunityType, CommunityLogic, RDFilter, IpPrefixList
22
+ from .entities import CommunityList, AsPathFilter, CommunityType, CommunityLogic, RDFilter, IpPrefixList, IpPrefixListMember, ip_prefix_list
21
23
  from .execute import get_policies
22
24
  from .policy import RoutingPolicyGenerator
23
25
  from .prefix_lists import PrefixListFilterGenerator
@@ -14,7 +14,7 @@ from .entities import (
14
14
  AsPathFilter, IpPrefixList, CommunityList, CommunityLogic, CommunityType,
15
15
  mangle_united_community_list_name, PrefixListNameGenerator,
16
16
  )
17
- from .prefix_lists import get_used_prefix_lists, new_prefix_list_name_generator
17
+
18
18
 
19
19
  FRR_RESULT_MAP = {
20
20
  ResultType.ALLOW: "permit",
@@ -51,12 +51,6 @@ class CumulusPolicyGenerator(ABC):
51
51
  def get_prefix_lists(self, device: Any) -> Sequence[IpPrefixList]:
52
52
  raise NotImplementedError()
53
53
 
54
- def get_used_prefix_lists(self, device: Any, name_generator: PrefixListNameGenerator) -> Sequence[IpPrefixList]:
55
- return get_used_prefix_lists(
56
- prefix_lists=self.get_prefix_lists(device),
57
- name_generator=name_generator,
58
- )
59
-
60
54
  @abstractmethod
61
55
  def get_community_lists(self, device: Any) -> list[CommunityList]:
62
56
  raise NotImplementedError()
@@ -66,8 +60,9 @@ class CumulusPolicyGenerator(ABC):
66
60
  raise NotImplementedError
67
61
 
68
62
  def generate_cumulus_rpl(self, device: Any) -> Iterator[Sequence[str]]:
63
+ prefix_lists = self.get_prefix_lists(device)
69
64
  policies = self.get_policies(device)
70
- prefix_list_name_generator = new_prefix_list_name_generator(policies)
65
+ prefix_list_name_generator = PrefixListNameGenerator(prefix_lists, policies)
71
66
 
72
67
  communities = {c.name: c for c in self.get_community_lists(device)}
73
68
  yield from self._cumulus_as_path_filters(device, policies)
@@ -89,60 +84,46 @@ class CumulusPolicyGenerator(ABC):
89
84
 
90
85
  def _cumulus_prefix_list(
91
86
  self,
92
- name: str,
93
87
  ip_type: Literal["ipv6", "ip"],
94
- match: PrefixMatchValue,
95
88
  plist: IpPrefixList,
96
89
  ) -> Iterable[Sequence[str]]:
97
- for i, prefix in enumerate(plist.members):
98
- addr_mask = ip_interface(prefix)
90
+ for i, m in enumerate(plist.members):
91
+ ge, le = m.or_longer
99
92
  yield (
100
93
  ip_type,
101
94
  "prefix-list",
102
- name,
95
+ plist.name,
103
96
  f"seq {i * 5 + 5}",
104
- "permit", f"{addr_mask.ip}/{addr_mask.network.prefixlen}",
97
+ "permit", str(m.prefix),
105
98
  ) + (
106
- ("ge", str(match.greater_equal)) if match.greater_equal is not None else ()
99
+ ("ge", str(ge)) if ge is not None else ()
107
100
  ) + (
108
- ("le", str(match.less_equal)) if match.less_equal is not None else ()
101
+ ("le", str(le)) if le is not None else ()
109
102
  )
110
103
 
111
104
  def _cumulus_prefix_lists(
112
105
  self, device: Any,
113
106
  policies: list[RoutingPolicy],
114
- prefix_list_name_generator: PrefixListNameGenerator,
107
+ name_generator: PrefixListNameGenerator,
115
108
  ) -> Iterable[Sequence[str]]:
116
- plists = {p.name: p for p in self.get_used_prefix_lists(device, prefix_list_name_generator)}
117
- if not plists.values():
118
- return
119
-
120
- precessed_names = set()
109
+ processed_names = set()
121
110
  for policy in policies:
122
111
  for statement in policy.statements:
123
112
  cond: SingleCondition[PrefixMatchValue]
124
113
  for cond in statement.match.find_all(MatchField.ip_prefix):
125
114
  for name in cond.value.names:
126
- mangled_name = prefix_list_name_generator.get_prefix_name(
127
- name=name,
128
- greater_equal=cond.value.greater_equal,
129
- less_equal=cond.value.less_equal,
130
- )
131
- if mangled_name in precessed_names:
115
+ plist = name_generator.get_prefix(name, cond.value)
116
+ if plist.name in processed_names:
132
117
  continue
133
- yield from self._cumulus_prefix_list(mangled_name, "ip", cond.value, plists[name])
134
- precessed_names.add(mangled_name)
118
+ yield from self._cumulus_prefix_list("ip", plist)
119
+ processed_names.add(plist.name)
135
120
  for cond in statement.match.find_all(MatchField.ipv6_prefix):
136
121
  for name in cond.value.names:
137
- mangled_name = prefix_list_name_generator.get_prefix_name(
138
- name=name,
139
- greater_equal=cond.value.greater_equal,
140
- less_equal=cond.value.less_equal,
141
- )
142
- if mangled_name in precessed_names:
122
+ plist = name_generator.get_prefix(name, cond.value)
123
+ if plist.name in processed_names:
143
124
  continue
144
- yield from self._cumulus_prefix_list(mangled_name, "ipv6", cond.value, plists[name])
145
- precessed_names.add(mangled_name)
125
+ yield from self._cumulus_prefix_list("ipv6", plist)
126
+ processed_names.add(plist.name)
146
127
  yield "!"
147
128
 
148
129
  def get_used_united_community_lists(
@@ -231,7 +212,7 @@ class CumulusPolicyGenerator(ABC):
231
212
  self,
232
213
  device: Any,
233
214
  condition: SingleCondition[Any],
234
- prefix_list_name_generator: PrefixListNameGenerator,
215
+ name_generator: PrefixListNameGenerator,
235
216
  ) -> Iterator[Sequence[str]]:
236
217
  if condition.field == MatchField.community:
237
218
  for comm_name in self._get_match_community_names(condition):
@@ -251,21 +232,13 @@ class CumulusPolicyGenerator(ABC):
251
232
  return
252
233
  if condition.field == MatchField.ip_prefix:
253
234
  for name in condition.value.names:
254
- mangled_name = prefix_list_name_generator.get_prefix_name(
255
- name=name,
256
- greater_equal=condition.value.greater_equal,
257
- less_equal=condition.value.less_equal,
258
- )
259
- yield "match", "ip address prefix-list", mangled_name
235
+ plist = name_generator.get_prefix(name, condition.value)
236
+ yield "match", "ip address prefix-list", plist.name
260
237
  return
261
238
  if condition.field == MatchField.ipv6_prefix:
262
239
  for name in condition.value.names:
263
- mangled_name = prefix_list_name_generator.get_prefix_name(
264
- name=name,
265
- greater_equal=condition.value.greater_equal,
266
- less_equal=condition.value.less_equal,
267
- )
268
- yield "match", "ipv6 address prefix-list", mangled_name
240
+ plist = name_generator.get_prefix(name, condition.value)
241
+ yield "match", "ipv6 address prefix-list", plist.name
269
242
  return
270
243
  if condition.operator is not ConditionOperator.EQ:
271
244
  raise NotImplementedError(
@@ -1,8 +1,10 @@
1
- from collections import defaultdict
1
+ from ipaddress import IPv4Network, IPv6Network, ip_network
2
2
  from collections.abc import Sequence
3
3
  from dataclasses import dataclass
4
4
  from enum import Enum
5
- from typing import Optional
5
+ from typing import Optional, List
6
+
7
+ from annet.rpl import RoutingPolicy, PrefixMatchValue, OrLonger
6
8
 
7
9
 
8
10
  class CommunityLogic(Enum):
@@ -40,10 +42,48 @@ class AsPathFilter:
40
42
  filters: Sequence[str]
41
43
 
42
44
 
43
- @dataclass(frozen=True)
45
+ @dataclass
46
+ class IpPrefixListMember:
47
+ prefix: IPv4Network | IPv6Network
48
+ or_longer: OrLonger = (None, None)
49
+
50
+ def __post_init__(self):
51
+ self.prefix = ip_network(self.prefix)
52
+
53
+
54
+ @dataclass
44
55
  class IpPrefixList:
45
56
  name: str
46
- members: Sequence[str]
57
+ members: list[IpPrefixListMember]
58
+
59
+ def __post_init__(self):
60
+ for (i, m) in enumerate(self.members):
61
+ if isinstance(m, str):
62
+ self.members[i] = IpPrefixListMember(m)
63
+
64
+
65
+ def ip_prefix_list(
66
+ name: str,
67
+ members_or_str: Sequence[IpPrefixListMember | str],
68
+ or_longer: OrLonger = (None, None),
69
+ ) -> IpPrefixList:
70
+ members: List[IpPrefixListMember] = []
71
+ for m in members_or_str:
72
+ if isinstance(m, str):
73
+ m = IpPrefixListMember(
74
+ prefix=ip_network(m),
75
+ or_longer=or_longer,
76
+ )
77
+ elif m.or_longer == (None, None):
78
+ m = IpPrefixListMember(
79
+ prefix=m.prefix,
80
+ or_longer=or_longer,
81
+ )
82
+ members.append(m)
83
+ return IpPrefixList(
84
+ name=name,
85
+ members=members,
86
+ )
47
87
 
48
88
 
49
89
  def arista_well_known_community(community: str) -> str:
@@ -58,26 +98,29 @@ def mangle_united_community_list_name(values: Sequence[str]) -> str:
58
98
 
59
99
 
60
100
  class PrefixListNameGenerator:
61
- def __init__(self):
62
- self._prefix_lists = defaultdict(set)
63
-
64
- def add_prefix(self, name: str, greater_equal: Optional[int], less_equal: Optional[int]) -> None:
65
- self._prefix_lists[name].add((greater_equal, less_equal))
66
-
67
- def is_used(self, name: str):
68
- return name in self._prefix_lists
69
-
70
- def get_prefix_name(self, name: str, greater_equal: Optional[int], less_equal: Optional[int]) -> str:
71
- if len(self._prefix_lists[name]) == 1:
72
- return name
73
- if greater_equal is less_equal is None:
74
- return name
75
- if greater_equal is None:
76
- ge_str = "unset"
77
- else:
78
- ge_str = str(greater_equal)
79
- if less_equal is None:
80
- le_str = "unset"
81
- else:
82
- le_str = str(less_equal)
83
- return f"{name}_{ge_str}_{le_str}"
101
+ def __init__(self, prefix_lists: Sequence[IpPrefixList], policies: Sequence[RoutingPolicy]):
102
+ self._prefix_lists = {x.name: x for x in prefix_lists}
103
+ self._policies = {x.name: x for x in policies} # this is here for a later use ~azryve@
104
+
105
+ def get_prefix(self, name: str, match: PrefixMatchValue) -> IpPrefixList:
106
+ orig_prefix = self._prefix_lists[name]
107
+ override_name: Optional[str] = None
108
+ override_orlonger: Optional[OrLonger] = None
109
+
110
+ if any(match.or_longer):
111
+ ge, le = match.or_longer
112
+ ge_str = "unset" if ge is None else str(ge)
113
+ le_str = "unset" if le is None else str(le)
114
+ override_name = f"{orig_prefix.name}_{ge_str}_{le_str}"
115
+ override_orlonger = match.or_longer
116
+
117
+ return IpPrefixList(
118
+ name=override_name or name,
119
+ members=[
120
+ IpPrefixListMember(
121
+ x.prefix,
122
+ or_longer=override_orlonger or x.or_longer,
123
+ )
124
+ for x in orig_prefix.members
125
+ ],
126
+ )
@@ -12,8 +12,9 @@ from annet.rpl.statement_builder import AsPathActionValue, NextHopActionValue, T
12
12
  from annet.rpl_generators.entities import (
13
13
  arista_well_known_community,
14
14
  CommunityList, RDFilter, PrefixListNameGenerator, CommunityLogic, mangle_united_community_list_name,
15
+ IpPrefixList,
15
16
  )
16
- from annet.rpl_generators.prefix_lists import new_prefix_list_name_generator
17
+
17
18
 
18
19
  HUAWEI_MATCH_COMMAND_MAP: dict[str, str] = {
19
20
  MatchField.as_path_filter: "as-path-filter {option_value}",
@@ -62,6 +63,10 @@ ARISTA_THEN_COMMAND_MAP: dict[str, str] = {
62
63
  class RoutingPolicyGenerator(PartialGenerator, ABC):
63
64
  TAGS = ["policy", "rpl", "routing"]
64
65
 
66
+ @abstractmethod
67
+ def get_prefix_lists(self, device: Any) -> list[IpPrefixList]:
68
+ raise NotImplementedError()
69
+
65
70
  @abstractmethod
66
71
  def get_policies(self, device: Any) -> list[RoutingPolicy]:
67
72
  raise NotImplementedError()
@@ -87,7 +92,7 @@ class RoutingPolicyGenerator(PartialGenerator, ABC):
87
92
  condition: SingleCondition[Any],
88
93
  communities: dict[str, CommunityList],
89
94
  rd_filters: dict[str, RDFilter],
90
- prefix_name_generator: PrefixListNameGenerator,
95
+ name_generator: PrefixListNameGenerator,
91
96
  ) -> Iterator[Sequence[str]]:
92
97
  if condition.field == MatchField.community:
93
98
  if condition.operator is ConditionOperator.HAS:
@@ -136,21 +141,13 @@ class RoutingPolicyGenerator(PartialGenerator, ABC):
136
141
  return
137
142
  if condition.field == MatchField.ip_prefix:
138
143
  for name in condition.value.names:
139
- mangled_name = prefix_name_generator.get_prefix_name(
140
- name=name,
141
- greater_equal=condition.value.greater_equal,
142
- less_equal=condition.value.less_equal,
143
- )
144
- yield "if-match", "ip-prefix", mangled_name
144
+ plist = name_generator.get_prefix(name, condition.value)
145
+ yield "if-match", "ip-prefix", plist.name
145
146
  return
146
147
  if condition.field == MatchField.ipv6_prefix:
147
148
  for name in condition.value.names:
148
- mangled_name = prefix_name_generator.get_prefix_name(
149
- name=name,
150
- greater_equal=condition.value.greater_equal,
151
- less_equal=condition.value.less_equal,
152
- )
153
- yield "if-match", "ipv6 address prefix-list", mangled_name
149
+ plist = name_generator.get_prefix(name, condition.value)
150
+ yield "if-match", "ipv6 address prefix-list", plist.name
154
151
  return
155
152
  if condition.field == MatchField.as_path_length:
156
153
  if condition.operator is ConditionOperator.EQ:
@@ -353,10 +350,11 @@ class RoutingPolicyGenerator(PartialGenerator, ABC):
353
350
  yield "goto next-node"
354
351
 
355
352
  def run_huawei(self, device):
353
+ prefix_lists = self.get_prefix_lists(device)
356
354
  policies = self.get_policies(device)
357
355
  communities = {c.name: c for c in self.get_community_lists(device)}
358
356
  rd_filters = {f.name: f for f in self.get_rd_filters(device)}
359
- prefix_name_generator = new_prefix_list_name_generator(policies)
357
+ prefix_name_generator = PrefixListNameGenerator(prefix_lists, policies)
360
358
 
361
359
  for policy in self.get_policies(device):
362
360
  for statement in policy.statements:
@@ -383,7 +381,7 @@ class RoutingPolicyGenerator(PartialGenerator, ABC):
383
381
  condition: SingleCondition[Any],
384
382
  communities: dict[str, CommunityList],
385
383
  rd_filters: dict[str, RDFilter],
386
- prefix_name_generator: PrefixListNameGenerator,
384
+ name_generator: PrefixListNameGenerator,
387
385
  ) -> Iterator[Sequence[str]]:
388
386
  if condition.field == MatchField.community:
389
387
  if condition.operator is ConditionOperator.HAS_ANY:
@@ -436,21 +434,13 @@ class RoutingPolicyGenerator(PartialGenerator, ABC):
436
434
  return
437
435
  if condition.field == MatchField.ip_prefix:
438
436
  for name in condition.value.names:
439
- mangled_name = prefix_name_generator.get_prefix_name(
440
- name=name,
441
- greater_equal=condition.value.greater_equal,
442
- less_equal=condition.value.less_equal,
443
- )
444
- yield "match", "ip address prefix-list", mangled_name
437
+ plist = name_generator.get_prefix(name, condition.value)
438
+ yield "match", "ip address prefix-list", plist.name
445
439
  return
446
440
  if condition.field == MatchField.ipv6_prefix:
447
441
  for name in condition.value.names:
448
- mangled_name = prefix_name_generator.get_prefix_name(
449
- name=name,
450
- greater_equal=condition.value.greater_equal,
451
- less_equal=condition.value.less_equal,
452
- )
453
- yield "match", "ipv6 address prefix-list", mangled_name
442
+ plist = name_generator.get_prefix(name, condition.value)
443
+ yield "match", "ipv6 address prefix-list", plist.name
454
444
  return
455
445
  if condition.field == MatchField.as_path_length:
456
446
  if condition.operator is ConditionOperator.EQ:
@@ -492,12 +482,15 @@ class RoutingPolicyGenerator(PartialGenerator, ABC):
492
482
  yield "set", "community community-list", *action.value.replaced
493
483
  else:
494
484
  yield "set", "community", "none"
495
- for community_name in action.value.added:
496
- yield "set", "community community-list", community_name, "additive"
497
- for community_name in action.value.removed:
498
- community = communities[community_name]
499
- for comm_value in community.members:
500
- yield "set community", arista_well_known_community(comm_value), "delete"
485
+ if action.value.added:
486
+ yield "set", "community community-list", *action.value.added, "additive"
487
+ if action.value.removed:
488
+ members = [
489
+ arista_well_known_community(member)
490
+ for community_name in action.value.removed
491
+ for member in communities[community_name].members
492
+ ]
493
+ yield "set community", *members, "delete"
501
494
 
502
495
  def _arista_then_large_community(
503
496
  self,
@@ -520,10 +513,10 @@ class RoutingPolicyGenerator(PartialGenerator, ABC):
520
513
  first = False
521
514
  else:
522
515
  yield "set", "large-community large-community-list", community_name, "additive"
523
- for community_name in action.value.added:
524
- yield "set", "large-community large-community-list", community_name, "additive"
525
- for community_name in action.value.removed:
526
- yield "set large-community large-community-list", community_name, "delete"
516
+ if action.value.added:
517
+ yield "set", "large-community large-community-list", *action.value.added, "additive"
518
+ if action.value.removed:
519
+ yield "set large-community large-community-list", *action.value.removed, "delete"
527
520
 
528
521
  def _arista_then_extcommunity_rt(
529
522
  self,
@@ -533,14 +526,20 @@ class RoutingPolicyGenerator(PartialGenerator, ABC):
533
526
  ) -> Iterator[Sequence[str]]:
534
527
  if action.value.replaced is not None:
535
528
  raise NotImplementedError("Extcommunity_rt replace is not supported for arista")
536
- for community_name in action.value.added:
537
- community = communities[community_name]
538
- for comm_value in community.members:
539
- yield "set", "extcommunity rt", comm_value, "additive"
540
- for community_name in action.value.removed:
541
- community = communities[community_name]
542
- for comm_value in community.members:
543
- yield "set extcommunity rt", comm_value, "delete"
529
+ if action.value.added:
530
+ members = [
531
+ f"rt {member}"
532
+ for community_name in action.value.removed
533
+ for member in communities[community_name].members
534
+ ]
535
+ yield "set", "extcommunity", *members, "additive"
536
+ if action.value.removed:
537
+ members = [
538
+ f"rt {member}"
539
+ for community_name in action.value.removed
540
+ for member in communities[community_name].members
541
+ ]
542
+ yield "set extcommunity", *members, "delete"
544
543
 
545
544
  def _arista_then_extcommunity_soo(
546
545
  self,
@@ -550,14 +549,20 @@ class RoutingPolicyGenerator(PartialGenerator, ABC):
550
549
  ) -> Iterator[Sequence[str]]:
551
550
  if action.value.replaced is not None:
552
551
  raise NotImplementedError("Extcommunity_soo replace is not supported for arista")
553
- for community_name in action.value.added:
554
- community = communities[community_name]
555
- for comm_value in community.members:
556
- yield "set", "extcommunity soo", comm_value, "additive"
557
- for community_name in action.value.removed:
558
- community = communities[community_name]
559
- for comm_value in community.members:
560
- yield "set", "extcommunity soo", comm_value, "delete"
552
+ if action.value.added:
553
+ members = [
554
+ f"soo {member}"
555
+ for community_name in action.value.removed
556
+ for member in communities[community_name].members
557
+ ]
558
+ yield "set", "extcommunity", *members, "additive"
559
+ if action.value.removed:
560
+ members = [
561
+ f"soo {member}"
562
+ for community_name in action.value.removed
563
+ for member in communities[community_name].members
564
+ ]
565
+ yield "set", "extcommunity", *members, "delete"
561
566
 
562
567
  def _arista_then_as_path(
563
568
  self,
@@ -675,8 +680,9 @@ class RoutingPolicyGenerator(PartialGenerator, ABC):
675
680
  yield "continue"
676
681
 
677
682
  def run_arista(self, device):
683
+ prefix_lists = self.get_prefix_lists(device)
678
684
  policies = self.get_policies(device)
679
- prefix_name_generator = new_prefix_list_name_generator(policies)
685
+ prefix_name_generator = PrefixListNameGenerator(prefix_lists, policies)
680
686
  communities = {c.name: c for c in self.get_community_lists(device)}
681
687
  rd_filters = {f.name: f for f in self.get_rd_filters(device)}
682
688
 
@@ -8,26 +8,6 @@ from annet.rpl import PrefixMatchValue, MatchField, SingleCondition, RoutingPoli
8
8
  from .entities import IpPrefixList, PrefixListNameGenerator
9
9
 
10
10
 
11
- def get_used_prefix_lists(
12
- prefix_lists: Sequence[IpPrefixList], name_generator: PrefixListNameGenerator,
13
- ) -> list[IpPrefixList]:
14
- return [c for c in prefix_lists if name_generator.is_used(c.name)]
15
-
16
-
17
- def new_prefix_list_name_generator(policies: list[RoutingPolicy]) -> PrefixListNameGenerator:
18
- name_gen = PrefixListNameGenerator()
19
- for policy in policies:
20
- for statement in policy.statements:
21
- condition: SingleCondition[PrefixMatchValue]
22
- for condition in statement.match.find_all(MatchField.ipv6_prefix):
23
- for name in condition.value.names:
24
- name_gen.add_prefix(name, condition.value.greater_equal, condition.value.less_equal)
25
- for condition in statement.match.find_all(MatchField.ip_prefix):
26
- for name in condition.value.names:
27
- name_gen.add_prefix(name, condition.value.greater_equal, condition.value.less_equal)
28
- return name_gen
29
-
30
-
31
11
  class PrefixListFilterGenerator(PartialGenerator, ABC):
32
12
  TAGS = ["policy", "rpl", "routing"]
33
13
 
@@ -39,12 +19,6 @@ class PrefixListFilterGenerator(PartialGenerator, ABC):
39
19
  def get_prefix_lists(self, device: Any) -> Sequence[IpPrefixList]:
40
20
  raise NotImplementedError()
41
21
 
42
- def get_used_prefix_lists(self, device: Any, name_generator: PrefixListNameGenerator) -> Sequence[IpPrefixList]:
43
- return get_used_prefix_lists(
44
- prefix_lists=self.get_prefix_lists(device),
45
- name_generator=name_generator,
46
- )
47
-
48
22
  # huawei
49
23
  def acl_huawei(self, _):
50
24
  return r"""
@@ -54,57 +28,48 @@ class PrefixListFilterGenerator(PartialGenerator, ABC):
54
28
 
55
29
  def _huawei_prefix_list(
56
30
  self,
57
- name: str,
58
31
  prefix_type: Literal["ipv6-prefix", "ip-prefix"],
59
- match: PrefixMatchValue,
60
32
  plist: IpPrefixList,
61
33
  ) -> Iterable[Sequence[str]]:
62
- for i, prefix in enumerate(plist.members):
63
- addr_mask = ip_interface(prefix)
34
+ for i, m in enumerate(plist.members):
35
+ ge, le = m.or_longer
64
36
  yield (
65
37
  "ip",
66
38
  prefix_type,
67
- name,
39
+ plist.name,
68
40
  f"index {i * 5 + 5}",
69
41
  "permit",
70
- str(addr_mask.ip).upper(),
71
- str(addr_mask.network.prefixlen),
42
+ str(m.prefix.network_address).upper(),
43
+ str(m.prefix.prefixlen),
72
44
  ) + (
73
- ("greater-equal", str(match.greater_equal)) if match.greater_equal is not None else ()
45
+ ("greater-equal", str(ge)) if ge is not None else ()
74
46
  ) + (
75
- ("less-equal", str(match.less_equal)) if match.less_equal is not None else ()
47
+ ("less-equal", str(le)) if le is not None else ()
76
48
  )
77
49
 
78
50
  def run_huawei(self, device: Any):
51
+ prefix_lists = self.get_prefix_lists(device)
79
52
  policies = self.get_policies(device)
80
- name_generator = new_prefix_list_name_generator(policies)
81
- plists = {p.name: p for p in self.get_used_prefix_lists(device, name_generator)}
82
- precessed_names = set()
53
+
54
+ name_generator = PrefixListNameGenerator(prefix_lists, policies)
55
+ processed_names = set()
83
56
  for policy in policies:
84
57
  for statement in policy.statements:
85
58
  cond: SingleCondition[PrefixMatchValue]
86
59
  for cond in statement.match.find_all(MatchField.ip_prefix):
87
60
  for name in cond.value.names:
88
- mangled_name = name_generator.get_prefix_name(
89
- name=name,
90
- greater_equal=cond.value.greater_equal,
91
- less_equal=cond.value.less_equal,
92
- )
93
- if mangled_name in precessed_names:
61
+ plist = name_generator.get_prefix(name, cond.value)
62
+ if plist.name in processed_names:
94
63
  continue
95
- yield from self._huawei_prefix_list(mangled_name, "ip-prefix", cond.value, plists[name])
96
- precessed_names.add(mangled_name)
64
+ yield from self._huawei_prefix_list("ip-prefix", plist)
65
+ processed_names.add(plist.name)
97
66
  for cond in statement.match.find_all(MatchField.ipv6_prefix):
98
67
  for name in cond.value.names:
99
- mangled_name = name_generator.get_prefix_name(
100
- name=name,
101
- greater_equal=cond.value.greater_equal,
102
- less_equal=cond.value.less_equal,
103
- )
104
- if mangled_name in precessed_names:
68
+ plist = name_generator.get_prefix(name, cond.value)
69
+ if plist.name in processed_names:
105
70
  continue
106
- yield from self._huawei_prefix_list(mangled_name, "ipv6-prefix", cond.value, plists[name])
107
- precessed_names.add(mangled_name)
71
+ yield from self._huawei_prefix_list("ipv6-prefix", plist)
72
+ processed_names.add(plist.name)
108
73
 
109
74
  # arista
110
75
  def acl_arista(self, _):
@@ -117,51 +82,41 @@ class PrefixListFilterGenerator(PartialGenerator, ABC):
117
82
 
118
83
  def _arista_prefix_list(
119
84
  self,
120
- name: str,
121
85
  prefix_type: Literal["ipv6", "ip"],
122
- match: PrefixMatchValue,
123
86
  plist: IpPrefixList,
124
87
  ) -> Iterable[Sequence[str]]:
125
- with self.block(prefix_type, "prefix-list", name):
126
- for i, prefix in enumerate(plist.members):
127
- addr_mask = ip_interface(prefix)
88
+ with self.block(prefix_type, "prefix-list", plist.name):
89
+ for i, m in enumerate(plist.members):
90
+ ge, le = m.or_longer
128
91
  yield (
129
92
  f"seq {i * 10 + 10}",
130
93
  "permit",
131
- addr_mask.with_prefixlen,
94
+ str(m.prefix),
132
95
  ) + (
133
- ("ge", str(match.greater_equal)) if match.greater_equal is not None else ()
96
+ ("ge", str(ge)) if ge is not None else ()
134
97
  ) + (
135
- ("le", str(match.less_equal)) if match.less_equal is not None else ()
98
+ ("le", str(le)) if le is not None else ()
136
99
  )
137
100
 
138
101
  def run_arista(self, device: Any):
102
+ prefix_lists = self.get_prefix_lists(device)
139
103
  policies = self.get_policies(device)
140
- name_generator = new_prefix_list_name_generator(policies)
141
- plists = {p.name: p for p in self.get_used_prefix_lists(device, name_generator)}
142
- precessed_names = set()
104
+ name_generator = PrefixListNameGenerator(prefix_lists, policies)
105
+ processed_names = set()
143
106
  for policy in policies:
144
107
  for statement in policy.statements:
145
108
  cond: SingleCondition[PrefixMatchValue]
146
109
  for cond in statement.match.find_all(MatchField.ip_prefix):
147
110
  for name in cond.value.names:
148
- mangled_name = name_generator.get_prefix_name(
149
- name=name,
150
- greater_equal=cond.value.greater_equal,
151
- less_equal=cond.value.less_equal,
152
- )
153
- if mangled_name in precessed_names:
111
+ plist = name_generator.get_prefix(name, cond.value)
112
+ if plist.name in processed_names:
154
113
  continue
155
- yield from self._arista_prefix_list(mangled_name, "ip", cond.value, plists[name])
156
- precessed_names.add(mangled_name)
114
+ yield from self._arista_prefix_list("ip", plist)
115
+ processed_names.add(plist.name)
157
116
  for cond in statement.match.find_all(MatchField.ipv6_prefix):
158
117
  for name in cond.value.names:
159
- mangled_name = name_generator.get_prefix_name(
160
- name=name,
161
- greater_equal=cond.value.greater_equal,
162
- less_equal=cond.value.less_equal,
163
- )
164
- if mangled_name in precessed_names:
118
+ plist = name_generator.get_prefix(name, cond.value)
119
+ if plist.name in processed_names:
165
120
  continue
166
- yield from self._arista_prefix_list(mangled_name, "ipv6", cond.value, plists[name])
167
- precessed_names.add(mangled_name)
121
+ yield from self._arista_prefix_list("ipv6", plist)
122
+ processed_names.add(plist.name)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: annet
3
- Version: 1.1.2
3
+ Version: 2.0.0
4
4
  Summary: annet
5
5
  Home-page: https://github.com/annetutil/annet
6
6
  License: MIT
@@ -1,5 +1,5 @@
1
1
  annet/__init__.py,sha256=0OKkFkqog8As7B6ApdpKQkrEAcEELUREWp82D8WvGA8,1846
2
- annet/annet.py,sha256=TMdEuM7GJQ4TjRVmuK3bCTZN-21lxjQ9sXqEdILUuBk,725
2
+ annet/annet.py,sha256=8Hc-n0S6d-YhcMpMiMET_SsYKfHFJeCA_E2gKgzK-2Y,790
3
3
  annet/argparse.py,sha256=v1MfhjR0B8qahza0WinmXClpR8UiDFhmwDDWtNroJPA,12855
4
4
  annet/bgp_models.py,sha256=oibTSdipNkGL4t8Xn94bhyKHMOtwbPBqmYaAy4FmTxQ,12361
5
5
  annet/cli.py,sha256=hDpjIr3w47lgQ_CvCQS1SXFDK-SJrf5slbT__5u6GIA,12342
@@ -7,14 +7,14 @@ annet/cli_args.py,sha256=KQlihxSl-Phhq1-9oJDdNSbIllEX55LlPfH6viEKOuw,13483
7
7
  annet/connectors.py,sha256=aoiDVLPizx8CW2p8SAwGCzyO_WW8H9xc2aujbGC4bDg,4882
8
8
  annet/deploy.py,sha256=3O96k17FbVt8KCvxF4gujXAB81U2-XRJyHLpbc9ekSQ,7529
9
9
  annet/deploy_ui.py,sha256=SDTJ-CF6puW0KHQ0g_NDp61Tqh6xkTBMxv8PrBhGyNI,27977
10
- annet/diff.py,sha256=zLcaCnb4lZRUb7frpH1CstQ3kacRcCblZs1uLG8J5lk,3391
10
+ annet/diff.py,sha256=SOhl706lpXdbrAwElgZqIzsTZEJRyDxOQ51fi910H0M,6088
11
11
  annet/executor.py,sha256=lcKI-EbYqeCiBNpL729kSltduzxbAzOkQ1L_QK7tNv8,5112
12
12
  annet/filtering.py,sha256=ZtqxPsKdV9reZoRxtQyBg22BqyMqd-2SotYcxZ-68AQ,903
13
- annet/gen.py,sha256=A718tYqIcxAa8tQEdjR6PjQ2ovWBnwPH7STKh38lmFY,33567
13
+ annet/gen.py,sha256=m76bL6rVbusNV_uVHrHnA3D7TUvvLtckXx7hlr5HGA0,31897
14
14
  annet/hardware.py,sha256=_iR28dWiPtt6ZYdk-qg1sxazkSRJE3ukqKB-fFFfQak,1141
15
15
  annet/implicit.py,sha256=G6EwZbrtUp089qRAwh96hminp236-1pJbeKAedoEafg,6056
16
16
  annet/lib.py,sha256=4N4X6jCCrig5rk7Ua4AofrV9zK9jhzkBq57fLsfBJjw,4812
17
- annet/output.py,sha256=FYMcWCc43-b51KsCiKnXPZHawhgWNoVtY9gRqw__Ce0,7473
17
+ annet/output.py,sha256=se8EpyNS9f9kPOlOaAV0ht4DjzDoBr8F2UafiezLPYw,7743
18
18
  annet/parallel.py,sha256=hLkzEht0KhzmzUWDdO4QFYQHzhxs3wPlTA8DxbB2ziw,17160
19
19
  annet/patching.py,sha256=nILbY5oJajN0b1j3f0HEJm05H3HVThnWvB7vDVh7UQw,559
20
20
  annet/reference.py,sha256=B8mH8VUMcecPnzULiTVb_kTQ7jQrCL7zp4pfIZQa5fk,4035
@@ -68,7 +68,7 @@ annet/annlib/rbparser/platform.py,sha256=65-r9mboRA3gaz9DRkSwPCdCRQneItqxppdMB6z
68
68
  annet/annlib/rbparser/syntax.py,sha256=iZ7Y-4QQBw4L3UtjEh54qisiRDhobl7HZxFNdP8mi54,3577
69
69
  annet/annlib/rulebook/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
70
70
  annet/annlib/rulebook/common.py,sha256=hqwmmNofm5q2f-hV2usMY-IPMeiANLth28tZcRBYJTw,16640
71
- annet/api/__init__.py,sha256=oj6n0hwdxEbv7EoqAQUA56413BddSlIXNR_pUDTb8fY,35054
71
+ annet/api/__init__.py,sha256=YHhYDXAJVUijhjFaha_WxwpOMuc39zuy2e3PWt0yMCE,35106
72
72
  annet/configs/context.yml,sha256=RVLrKLIHpCty7AGwOnmqf7Uu0iZQCn-AjYhophDJer8,259
73
73
  annet/configs/logging.yaml,sha256=EUagfir99QqA73Scc3k7sfQccbU3E1SvEQdyhLFtCl4,997
74
74
  annet/generators/__init__.py,sha256=rVHHDTPKHPZsml1eNEAj3o-8RweFTN8J7LX3tKMXdIY,16402
@@ -91,22 +91,22 @@ annet/mesh/models_converter.py,sha256=itfrxDd5zdTQpFNmo-YXIFQDpYyBuQ6g7xpcjxvK6u
91
91
  annet/mesh/peer_models.py,sha256=Div4o1t6Z0TWvy-8WKg4-n9WOd2PKCmIpfbkILDlDtk,2791
92
92
  annet/mesh/port_processor.py,sha256=RHiMS5W8qoDkTKiarQ748bcr8bNx4g_R4Y4vZg2k4TU,478
93
93
  annet/mesh/registry.py,sha256=xmWF7yxWXmwqX2_jyMAKrbGd2G9sjb4rYDx4Xk61QKc,9607
94
- annet/rpl/__init__.py,sha256=0kcIktE3AmS0rlm9xzVDf53xk08OeZXgD-6ZLCt_KCs,731
94
+ annet/rpl/__init__.py,sha256=8nSiFpXH4OhzRGKr-013nHwwKk5Y50uh2gL7d_IoV8U,757
95
95
  annet/rpl/action.py,sha256=PY6W66j908RuqQ1_ioxayqVN-70rxDk5Z59EGHtxI98,1246
96
96
  annet/rpl/condition.py,sha256=MJri4MbWtPkLHIsLMAtsIEF7e8IAS9dIImjmJs5vS5U,3418
97
- annet/rpl/match_builder.py,sha256=vK0faYyg3vJGOp6LSKscLCno3Fc2PfznOmVCEUEhZfk,4774
97
+ annet/rpl/match_builder.py,sha256=8hvkNWF29yIAt8cpS7P797ePm3ikZBZwrVQj7QcCsP8,4749
98
98
  annet/rpl/policy.py,sha256=P1Kt-8fHFxEczeP-RwkK_wrGN0p7IR-hOApEd2vC55E,448
99
99
  annet/rpl/result.py,sha256=PHFn1zhDeqLBn07nkYw5vsoXew4nTwkklOwqvFWzBLg,141
100
100
  annet/rpl/routemap.py,sha256=SIyk73OzPp2oH_XwrDv2xczuY2Zt1VsJmB0TT5r7F5g,2593
101
101
  annet/rpl/statement_builder.py,sha256=sVGOYsCV0s_SFQUy2WtUyQqKy5H4MOfmRCJWGj-UOJ4,9403
102
- annet/rpl_generators/__init__.py,sha256=ZLWs-flcpyIbdhxSDfNt-ORDrLe8ins25sWdXTWeUoA,748
102
+ annet/rpl_generators/__init__.py,sha256=V4rAZlBaOUSjeQ5eCNmWeD7BSJLIwy0lKU_01grebpc,832
103
103
  annet/rpl_generators/aspath.py,sha256=kZakwPLfGGiXu9fC6I1z-pvy7Fe-4dy93_-lYcx39_4,2038
104
104
  annet/rpl_generators/community.py,sha256=SWpaOvoQUNISuRm41-IvGPFmntvgFv9ee4zegsMyeo0,11496
105
- annet/rpl_generators/cumulus_frr.py,sha256=F-LRT-UPnztUqinp3KYKqh796L9dqkzPKBUxMkiFjF8,21111
106
- annet/rpl_generators/entities.py,sha256=DIpgAQ8Tslo2hq6iFBaYkJX12BFBiccN8GOaRVxR1Uk,1985
105
+ annet/rpl_generators/cumulus_frr.py,sha256=OabE9Me0O0iYCl20JOq9khTNhV2tXTrB3hrx9ohI3xE,19651
106
+ annet/rpl_generators/entities.py,sha256=gL_Ap7dz2ioTCbA-4DIZceeE-7-_Var5hItVz--bVcs,3343
107
107
  annet/rpl_generators/execute.py,sha256=wS6e6fwcPWywsHB0gBMqZ17eF0s4YOBgDgwPB_cr5Rw,431
108
- annet/rpl_generators/policy.py,sha256=NeqB0reRN_KuY8LYkeGT3dRPe2HFDT9RfmVy5fcA3zw,32570
109
- annet/rpl_generators/prefix_lists.py,sha256=D4WXeISevf62EmDu1GYdldjz2gmONFFlmgMyGAOa_m8,7248
108
+ annet/rpl_generators/policy.py,sha256=t0n4kRYT_cQIfPUXvQBfGiKwMF6GrpiHqs93KMq075s,32335
109
+ annet/rpl_generators/prefix_lists.py,sha256=5jM5Xj0dtx5xF9ap-TgExs0ofRAzm0LN2j3aL5Ub_yc,4778
110
110
  annet/rpl_generators/rd.py,sha256=YGXgx1D2D0-pixgspXJzA6NvW8lx3AmHMxIY2l5rraI,1457
111
111
  annet/rulebook/__init__.py,sha256=oafL5HC8QHdkO9CH2q_fxohPMxOgjn-dNQa5kPjuqsA,3942
112
112
  annet/rulebook/common.py,sha256=zK1s2c5lc5HQbIlMUQ4HARQudXSgOYiZ_Sxc2I_tHqg,721
@@ -175,13 +175,13 @@ annet_generators/mesh_example/bgp.py,sha256=jzyDndSSGYyYBquDnLlR-7P5lzmUKcSyYCml
175
175
  annet_generators/mesh_example/mesh_logic.py,sha256=DJS5JMCTs0rs0LN__0LulNgo2ekUcWiOMe02BlOeFas,1454
176
176
  annet_generators/rpl_example/__init__.py,sha256=A7DTn-SVSlUpeO7mKT_obqimp29p9zWfVRPBSxmENQY,209
177
177
  annet_generators/rpl_example/generator.py,sha256=zndIGfV4ZlTxPgAGYs7bMQvTc_tYScODqJz3fuyermY,3871
178
- annet_generators/rpl_example/items.py,sha256=Ez1RF5YhcXNCusBmeApIjRL3rBlMazNZd29Gpw1_IsA,766
178
+ annet_generators/rpl_example/items.py,sha256=d99HSXDHFjZq511EvGhIqRTWK3F4ZsCWfdUqFYQcyhE,772
179
179
  annet_generators/rpl_example/mesh.py,sha256=z_WgfDZZ4xnyh3cSf75igyH09hGvtexEVwy1gCD_DzA,288
180
180
  annet_generators/rpl_example/route_policy.py,sha256=z6nPb0VDeQtKD1NIg9sFvmUxBD5tVs2frfNIuKdM-5c,2318
181
- annet-1.1.2.dist-info/AUTHORS,sha256=rh3w5P6gEgqmuC-bw-HB68vBCr-yIBFhVL0PG4hguLs,878
182
- annet-1.1.2.dist-info/LICENSE,sha256=yPxl7dno02Pw7gAcFPIFONzx_gapwDoPXsIsh6Y7lC0,1079
183
- annet-1.1.2.dist-info/METADATA,sha256=Ngyf7Hi193Mgaifi37xaep8v7fjSM9-KWlIveKaXiEg,793
184
- annet-1.1.2.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
185
- annet-1.1.2.dist-info/entry_points.txt,sha256=5lIaDGlGi3l6QQ2ry2jZaqViP5Lvt8AmsegdD0Uznck,192
186
- annet-1.1.2.dist-info/top_level.txt,sha256=QsoTZBsUtwp_FEcmRwuN8QITBmLOZFqjssRfKilGbP8,23
187
- annet-1.1.2.dist-info/RECORD,,
181
+ annet-2.0.0.dist-info/AUTHORS,sha256=rh3w5P6gEgqmuC-bw-HB68vBCr-yIBFhVL0PG4hguLs,878
182
+ annet-2.0.0.dist-info/LICENSE,sha256=yPxl7dno02Pw7gAcFPIFONzx_gapwDoPXsIsh6Y7lC0,1079
183
+ annet-2.0.0.dist-info/METADATA,sha256=9UifSXsrbaDmr4yZByRC-grykjjq-HGfyT32G1aDE6w,793
184
+ annet-2.0.0.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
185
+ annet-2.0.0.dist-info/entry_points.txt,sha256=5lIaDGlGi3l6QQ2ry2jZaqViP5Lvt8AmsegdD0Uznck,192
186
+ annet-2.0.0.dist-info/top_level.txt,sha256=QsoTZBsUtwp_FEcmRwuN8QITBmLOZFqjssRfKilGbP8,23
187
+ annet-2.0.0.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
- from annet.rpl_generators import AsPathFilter, CommunityList, CommunityType, RDFilter, IpPrefixList
1
+ from annet.rpl_generators import AsPathFilter, CommunityList, CommunityType, RDFilter, ip_prefix_list
2
2
 
3
3
  AS_PATH_FILTERS = [
4
4
  AsPathFilter("ASP_EXAMPLE", [".*123456.*"]),
@@ -16,6 +16,6 @@ RD_FILTERS = [
16
16
  ]
17
17
 
18
18
  PREFIX_LISTS = [
19
- IpPrefixList("IPV6_LIST_EXAMPLE", ["2a13:5941::/32"]),
20
- IpPrefixList("IPV4_LIST_EXAMPLE", ["0.0.0.0/8", "10.0.0.0/8"]),
19
+ ip_prefix_list("IPV6_LIST_EXAMPLE", ["2a13:5941::/32"]),
20
+ ip_prefix_list("IPV4_LIST_EXAMPLE", ["0.0.0.0/8", "10.0.0.0/8"]),
21
21
  ]
File without changes
File without changes
File without changes