annet 0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of annet might be problematic. Click here for more details.
- annet/__init__.py +61 -0
- annet/annet.py +25 -0
- annet/annlib/__init__.py +7 -0
- annet/annlib/command.py +49 -0
- annet/annlib/diff.py +158 -0
- annet/annlib/errors.py +8 -0
- annet/annlib/filter_acl.py +196 -0
- annet/annlib/jsontools.py +89 -0
- annet/annlib/lib.py +495 -0
- annet/annlib/netdev/__init__.py +0 -0
- annet/annlib/netdev/db.py +62 -0
- annet/annlib/netdev/devdb/__init__.py +28 -0
- annet/annlib/netdev/devdb/data/devdb.json +137 -0
- annet/annlib/netdev/views/__init__.py +0 -0
- annet/annlib/netdev/views/dump.py +121 -0
- annet/annlib/netdev/views/hardware.py +112 -0
- annet/annlib/output.py +246 -0
- annet/annlib/patching.py +533 -0
- annet/annlib/rbparser/__init__.py +0 -0
- annet/annlib/rbparser/acl.py +120 -0
- annet/annlib/rbparser/deploying.py +55 -0
- annet/annlib/rbparser/ordering.py +52 -0
- annet/annlib/rbparser/platform.py +51 -0
- annet/annlib/rbparser/syntax.py +115 -0
- annet/annlib/rulebook/__init__.py +0 -0
- annet/annlib/rulebook/common.py +350 -0
- annet/annlib/tabparser.py +648 -0
- annet/annlib/types.py +35 -0
- annet/api/__init__.py +807 -0
- annet/argparse.py +415 -0
- annet/cli.py +192 -0
- annet/cli_args.py +493 -0
- annet/configs/context.yml +18 -0
- annet/configs/logging.yaml +39 -0
- annet/connectors.py +64 -0
- annet/deploy.py +441 -0
- annet/diff.py +85 -0
- annet/executor.py +551 -0
- annet/filtering.py +40 -0
- annet/gen.py +828 -0
- annet/generators/__init__.py +987 -0
- annet/generators/common/__init__.py +0 -0
- annet/generators/common/initial.py +33 -0
- annet/hardware.py +45 -0
- annet/implicit.py +139 -0
- annet/lib.py +128 -0
- annet/output.py +170 -0
- annet/parallel.py +448 -0
- annet/patching.py +25 -0
- annet/reference.py +148 -0
- annet/rulebook/__init__.py +114 -0
- annet/rulebook/arista/__init__.py +0 -0
- annet/rulebook/arista/iface.py +16 -0
- annet/rulebook/aruba/__init__.py +16 -0
- annet/rulebook/aruba/ap_env.py +146 -0
- annet/rulebook/aruba/misc.py +8 -0
- annet/rulebook/cisco/__init__.py +0 -0
- annet/rulebook/cisco/iface.py +68 -0
- annet/rulebook/cisco/misc.py +57 -0
- annet/rulebook/cisco/vlandb.py +90 -0
- annet/rulebook/common.py +19 -0
- annet/rulebook/deploying.py +87 -0
- annet/rulebook/huawei/__init__.py +0 -0
- annet/rulebook/huawei/aaa.py +75 -0
- annet/rulebook/huawei/bgp.py +97 -0
- annet/rulebook/huawei/iface.py +33 -0
- annet/rulebook/huawei/misc.py +337 -0
- annet/rulebook/huawei/vlandb.py +115 -0
- annet/rulebook/juniper/__init__.py +107 -0
- annet/rulebook/nexus/__init__.py +0 -0
- annet/rulebook/nexus/iface.py +92 -0
- annet/rulebook/patching.py +143 -0
- annet/rulebook/ribbon/__init__.py +12 -0
- annet/rulebook/texts/arista.deploy +20 -0
- annet/rulebook/texts/arista.order +125 -0
- annet/rulebook/texts/arista.rul +59 -0
- annet/rulebook/texts/aruba.deploy +20 -0
- annet/rulebook/texts/aruba.order +83 -0
- annet/rulebook/texts/aruba.rul +87 -0
- annet/rulebook/texts/cisco.deploy +27 -0
- annet/rulebook/texts/cisco.order +82 -0
- annet/rulebook/texts/cisco.rul +105 -0
- annet/rulebook/texts/huawei.deploy +188 -0
- annet/rulebook/texts/huawei.order +388 -0
- annet/rulebook/texts/huawei.rul +471 -0
- annet/rulebook/texts/juniper.rul +120 -0
- annet/rulebook/texts/nexus.deploy +24 -0
- annet/rulebook/texts/nexus.order +85 -0
- annet/rulebook/texts/nexus.rul +83 -0
- annet/rulebook/texts/nokia.rul +31 -0
- annet/rulebook/texts/pc.order +5 -0
- annet/rulebook/texts/pc.rul +9 -0
- annet/rulebook/texts/ribbon.deploy +22 -0
- annet/rulebook/texts/ribbon.rul +77 -0
- annet/rulebook/texts/routeros.order +38 -0
- annet/rulebook/texts/routeros.rul +45 -0
- annet/storage.py +121 -0
- annet/tabparser.py +36 -0
- annet/text_term_format.py +95 -0
- annet/tracing.py +170 -0
- annet/types.py +223 -0
- annet-0.1.dist-info/AUTHORS +21 -0
- annet-0.1.dist-info/LICENSE +21 -0
- annet-0.1.dist-info/METADATA +24 -0
- annet-0.1.dist-info/RECORD +113 -0
- annet-0.1.dist-info/WHEEL +5 -0
- annet-0.1.dist-info/entry_points.txt +6 -0
- annet-0.1.dist-info/top_level.txt +3 -0
- annet_generators/__init__.py +0 -0
- annet_generators/example/__init__.py +12 -0
- annet_generators/example/lldp.py +52 -0
- annet_nbexport/__init__.py +220 -0
- annet_nbexport/main.py +46 -0
annet/annlib/patching.py
ADDED
|
@@ -0,0 +1,533 @@
|
|
|
1
|
+
import copy
|
|
2
|
+
import operator
|
|
3
|
+
import textwrap
|
|
4
|
+
from collections import OrderedDict as odict
|
|
5
|
+
from typing import ( # pylint: disable=unused-import
|
|
6
|
+
Any,
|
|
7
|
+
Dict,
|
|
8
|
+
Iterator,
|
|
9
|
+
List,
|
|
10
|
+
Optional,
|
|
11
|
+
Tuple,
|
|
12
|
+
Union,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
from .lib import jun_activate, merge_dicts, strip_annotation, uniq
|
|
16
|
+
from .rbparser import platform
|
|
17
|
+
from .rbparser.ordering import compile_ordering_text
|
|
18
|
+
from .rulebook.common import default as common_default
|
|
19
|
+
from .rulebook.common import call_diff_logic
|
|
20
|
+
from .tabparser import CommonFormatter
|
|
21
|
+
from .types import Diff, Op
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
# =====
|
|
25
|
+
class AclError(Exception):
|
|
26
|
+
pass
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class AclNotExclusiveError(AclError):
|
|
30
|
+
pass
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class PatchRow:
|
|
34
|
+
row: str
|
|
35
|
+
|
|
36
|
+
def __init__(self, row: str):
|
|
37
|
+
self.row = row
|
|
38
|
+
|
|
39
|
+
def __eq__(self, other: object) -> bool:
|
|
40
|
+
if isinstance(other, str):
|
|
41
|
+
return self.row == other
|
|
42
|
+
if not isinstance(other, PatchRow):
|
|
43
|
+
return NotImplemented
|
|
44
|
+
return self.row == other.row
|
|
45
|
+
|
|
46
|
+
def __hash__(self) -> int:
|
|
47
|
+
return hash(self.row)
|
|
48
|
+
|
|
49
|
+
def __str__(self) -> str:
|
|
50
|
+
return self.row
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class PatchItem:
|
|
54
|
+
row: str
|
|
55
|
+
child: "Union[PatchTree, None]"
|
|
56
|
+
context: Dict[str, str]
|
|
57
|
+
|
|
58
|
+
def __init__(self, row, child, context):
|
|
59
|
+
self.row = row
|
|
60
|
+
self.child = child
|
|
61
|
+
self.context = context
|
|
62
|
+
|
|
63
|
+
def __str__(self):
|
|
64
|
+
return (
|
|
65
|
+
f"PatchItem(\n"
|
|
66
|
+
f' row="{self.row}",\n'
|
|
67
|
+
f" child={textwrap.indent(str(self.child), ' ').strip()},\n"
|
|
68
|
+
f" context={self.context}\n"
|
|
69
|
+
f")"
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class PatchTree:
|
|
74
|
+
itms: List[PatchItem]
|
|
75
|
+
|
|
76
|
+
def __init__(self, row: Optional[str] = None):
|
|
77
|
+
self.itms = []
|
|
78
|
+
if row:
|
|
79
|
+
self.add(row, {})
|
|
80
|
+
|
|
81
|
+
def add(self, row: str, context: Dict[str, str]) -> None:
|
|
82
|
+
self.itms.append(PatchItem(row, None, context))
|
|
83
|
+
|
|
84
|
+
def add_block(self, row: str, subtree: "Optional[PatchTree]" = None, context: Dict[str, str] = None) -> "PatchTree":
|
|
85
|
+
if subtree is None:
|
|
86
|
+
subtree = PatchTree()
|
|
87
|
+
if context is None:
|
|
88
|
+
context = {}
|
|
89
|
+
self.itms.append(PatchItem(row, subtree, context))
|
|
90
|
+
return subtree
|
|
91
|
+
|
|
92
|
+
def items(self) -> "Iterator[Tuple[str, Union[PatchTree, None]]]":
|
|
93
|
+
for item in self.itms:
|
|
94
|
+
yield str(item.row), item.child
|
|
95
|
+
|
|
96
|
+
def asdict(self) -> Dict:
|
|
97
|
+
ret = odict()
|
|
98
|
+
for row in uniq(i.row for i in self.itms):
|
|
99
|
+
subtrees = []
|
|
100
|
+
for i in self.itms:
|
|
101
|
+
if i.row == row and i.child is not None:
|
|
102
|
+
subtrees.append(i.child.asdict())
|
|
103
|
+
if subtrees:
|
|
104
|
+
ret[str(row)] = merge_dicts(*subtrees)
|
|
105
|
+
else:
|
|
106
|
+
ret[str(row)] = None
|
|
107
|
+
return ret
|
|
108
|
+
|
|
109
|
+
def __bool__(self):
|
|
110
|
+
return bool(self.itms)
|
|
111
|
+
|
|
112
|
+
def __str__(self):
|
|
113
|
+
n = ",\n"
|
|
114
|
+
itms = map(lambda x: textwrap.indent(str(x), " "), self.itms)
|
|
115
|
+
return (
|
|
116
|
+
f"PatchTree(\n"
|
|
117
|
+
f" itms=[\n"
|
|
118
|
+
f" {n.join(itms).strip()}\n"
|
|
119
|
+
f" ]\n"
|
|
120
|
+
f")"
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
class Orderer:
|
|
125
|
+
def __init__(self, rb, vendor):
|
|
126
|
+
self.rb = rb
|
|
127
|
+
self.vendor = vendor
|
|
128
|
+
|
|
129
|
+
def ref_insert(self, ref_tracker):
|
|
130
|
+
for ref, _ in reversed(ref_tracker.configs()):
|
|
131
|
+
self.insert(ref)
|
|
132
|
+
for _, defs in reversed(ref_tracker.configs()):
|
|
133
|
+
self.insert(defs)
|
|
134
|
+
|
|
135
|
+
def insert(self, rules):
|
|
136
|
+
if isinstance(rules, dict):
|
|
137
|
+
fmtr = CommonFormatter()
|
|
138
|
+
rules = fmtr.join(rules)
|
|
139
|
+
rules = compile_ordering_text(rules, self.vendor)
|
|
140
|
+
self.rb = merge_dicts(rules, self.rb)
|
|
141
|
+
|
|
142
|
+
def rule_weight(self, row, rule, regexp_key):
|
|
143
|
+
return len(set(row).intersection(set(rule["attrs"][regexp_key].pattern))) / len(row)
|
|
144
|
+
|
|
145
|
+
def get_order(self, row, cmd_direct):
|
|
146
|
+
f_order = None
|
|
147
|
+
f_weight = 0
|
|
148
|
+
f_rule = ""
|
|
149
|
+
children = []
|
|
150
|
+
ordering = self.rb
|
|
151
|
+
block_exit = platform.VENDOR_EXIT[self.vendor]
|
|
152
|
+
|
|
153
|
+
for (order, (raw_rule, rule)) in enumerate(ordering.items()):
|
|
154
|
+
direct_matched = bool(rule["attrs"]["direct_regexp"].match(row))
|
|
155
|
+
if not rule["attrs"]["order_reverse"] and (direct_matched or rule["attrs"]["reverse_regexp"].match(row)):
|
|
156
|
+
# если не указано order_reverse - правило считается прямым
|
|
157
|
+
regexp_key = ("direct_regexp" if direct_matched else "reverse_regexp")
|
|
158
|
+
weight = self.rule_weight(row, rule, regexp_key)
|
|
159
|
+
if f_order is None or f_weight < weight:
|
|
160
|
+
f_order = order
|
|
161
|
+
f_weight = weight
|
|
162
|
+
f_rule = (raw_rule, rule["attrs"][regexp_key])
|
|
163
|
+
children.extend(ordering[raw_rule]["children"].items())
|
|
164
|
+
|
|
165
|
+
elif rule["attrs"]["order_reverse"] and not cmd_direct and direct_matched:
|
|
166
|
+
weight = self.rule_weight(row, rule, "direct_regexp")
|
|
167
|
+
if f_order is None or f_weight < weight or (f_weight == weight and not cmd_direct):
|
|
168
|
+
f_order = order
|
|
169
|
+
f_weight = weight
|
|
170
|
+
f_rule = (raw_rule, rule["attrs"]["direct_regexp"])
|
|
171
|
+
cmd_direct = True
|
|
172
|
+
children = []
|
|
173
|
+
|
|
174
|
+
elif block_exit and block_exit == row:
|
|
175
|
+
f_order = float("inf")
|
|
176
|
+
f_rule = (raw_rule, block_exit)
|
|
177
|
+
cmd_direct = True
|
|
178
|
+
children = []
|
|
179
|
+
|
|
180
|
+
return (f_order or 0), cmd_direct, odict(children), f_rule
|
|
181
|
+
|
|
182
|
+
def order_config(self, config):
|
|
183
|
+
ordered = []
|
|
184
|
+
reverse_prefix = platform.VENDOR_REVERSES[self.vendor]
|
|
185
|
+
if not config:
|
|
186
|
+
return odict()
|
|
187
|
+
for (row, children) in config.items():
|
|
188
|
+
cmd_direct = not row.startswith(reverse_prefix)
|
|
189
|
+
(order, direct, rb, _) = self.get_order(row, cmd_direct)
|
|
190
|
+
child_orderer = Orderer(rb, self.vendor)
|
|
191
|
+
children = child_orderer.order_config(children)
|
|
192
|
+
ordered.append({
|
|
193
|
+
"row": row,
|
|
194
|
+
"children": children,
|
|
195
|
+
"direct": direct,
|
|
196
|
+
"order": order,
|
|
197
|
+
})
|
|
198
|
+
|
|
199
|
+
return odict(
|
|
200
|
+
(item["row"], item["children"])
|
|
201
|
+
for item in sorted(ordered, key=(lambda item: (
|
|
202
|
+
(item["order"] if item["direct"] else -item["order"]),
|
|
203
|
+
item["direct"],
|
|
204
|
+
)))
|
|
205
|
+
)
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
# =====
|
|
209
|
+
def apply_acl(config, rules, fatal_acl=False, exclusive=False, with_annotations=False, _path=()):
|
|
210
|
+
passed = odict()
|
|
211
|
+
for (row, children) in config.items():
|
|
212
|
+
if with_annotations:
|
|
213
|
+
# do not pass annotations through ACL
|
|
214
|
+
test_row = strip_annotation(row)
|
|
215
|
+
else:
|
|
216
|
+
test_row = row
|
|
217
|
+
try:
|
|
218
|
+
(match, children_rules) = match_row_to_acl(test_row, rules, exclusive)
|
|
219
|
+
except AclNotExclusiveError as err:
|
|
220
|
+
raise AclNotExclusiveError("'%s', %s" % ("/ ".join(_path + (row,)), err))
|
|
221
|
+
if match:
|
|
222
|
+
if not (match["is_reverse"] and all(match["attrs"]["cant_delete"])):
|
|
223
|
+
passed[row] = apply_acl(
|
|
224
|
+
config=children,
|
|
225
|
+
rules=children_rules,
|
|
226
|
+
fatal_acl=fatal_acl,
|
|
227
|
+
exclusive=exclusive,
|
|
228
|
+
with_annotations=with_annotations,
|
|
229
|
+
_path=_path + (row,)
|
|
230
|
+
)
|
|
231
|
+
elif fatal_acl:
|
|
232
|
+
raise AclError(" / ".join(_path + (row,)))
|
|
233
|
+
return passed
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
def apply_acl_diff(diff, rules):
|
|
237
|
+
passed = []
|
|
238
|
+
for (op, row, children, d_match) in diff:
|
|
239
|
+
(match, children_rules) = match_row_to_acl(row, rules)
|
|
240
|
+
if match:
|
|
241
|
+
if op == Op.REMOVED and all(match["attrs"]["cant_delete"]):
|
|
242
|
+
op = Op.AFFECTED
|
|
243
|
+
children = apply_acl_diff(children, children_rules)
|
|
244
|
+
passed.append((op, row, children, d_match))
|
|
245
|
+
return passed
|
|
246
|
+
|
|
247
|
+
|
|
248
|
+
def mark_unchanged(diff):
|
|
249
|
+
passed = []
|
|
250
|
+
for (op, row, children, d_match) in diff:
|
|
251
|
+
if op == Op.AFFECTED:
|
|
252
|
+
children = mark_unchanged(children)
|
|
253
|
+
if all(x[0] == Op.UNCHANGED for x in children):
|
|
254
|
+
op = Op.UNCHANGED
|
|
255
|
+
passed.append((op, row, children, d_match))
|
|
256
|
+
return passed
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
def strip_unchanged(diff):
|
|
260
|
+
passed = []
|
|
261
|
+
for (op, row, children, d_match) in diff:
|
|
262
|
+
if op == Op.UNCHANGED:
|
|
263
|
+
continue
|
|
264
|
+
children = strip_unchanged(children)
|
|
265
|
+
passed.append((op, row, children, d_match))
|
|
266
|
+
return passed
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
def make_diff(old, new, rb, acl_rules_list) -> Diff:
|
|
270
|
+
# не позволяем logic-коду модифицировать конфиг
|
|
271
|
+
old = copy.deepcopy(old)
|
|
272
|
+
new = copy.deepcopy(new)
|
|
273
|
+
diff_pre = apply_diff_rb(old, new, rb)
|
|
274
|
+
diff = call_diff_logic(diff_pre, old, new)
|
|
275
|
+
for acl_rules in acl_rules_list:
|
|
276
|
+
if acl_rules is not None:
|
|
277
|
+
diff = apply_acl_diff(diff, acl_rules)
|
|
278
|
+
diff = mark_unchanged(diff)
|
|
279
|
+
return diff
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
def apply_diff_rb(old, new, rb):
|
|
283
|
+
""" Diff pre is a odict {(key, diff_logic): {}} """
|
|
284
|
+
diff_pre = odict()
|
|
285
|
+
for row in list(uniq(old, new)):
|
|
286
|
+
(match, children_rules) = _match_row_to_rules(row, rb["patching"])
|
|
287
|
+
if match:
|
|
288
|
+
diff_pre[row] = {
|
|
289
|
+
"match": match,
|
|
290
|
+
"subtree": apply_diff_rb(
|
|
291
|
+
old.get(row, odict()),
|
|
292
|
+
new.get(row, odict()),
|
|
293
|
+
rb={"patching": children_rules}, # Нужен только кусок, касающийся правил для патчей
|
|
294
|
+
),
|
|
295
|
+
}
|
|
296
|
+
else:
|
|
297
|
+
old.pop(row, None)
|
|
298
|
+
new.pop(row, None)
|
|
299
|
+
return diff_pre
|
|
300
|
+
|
|
301
|
+
|
|
302
|
+
def make_pre(diff: Diff, _parent_match=None) -> Dict[str, Any]:
|
|
303
|
+
pre = odict()
|
|
304
|
+
for (op, row, children, match) in diff:
|
|
305
|
+
if _parent_match and _parent_match["attrs"]["multiline"]:
|
|
306
|
+
# Если родительское правило было мультилайном, то все внутренности станут его контентом.
|
|
307
|
+
# Это значит, что к ним будет принудительно применяться common.default() и фейковое
|
|
308
|
+
# правило __MULTILINE_BODY__.
|
|
309
|
+
match = {
|
|
310
|
+
"raw_rule": "__MULTILINE_BODY__",
|
|
311
|
+
"key": row,
|
|
312
|
+
"attrs": {
|
|
313
|
+
"comment": [],
|
|
314
|
+
"logic": common_default, # Прекрасно работает с мультилайнами и обрезанным правилом
|
|
315
|
+
"multiline": True,
|
|
316
|
+
"context": _parent_match["attrs"]["context"],
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
raw_rule = match["raw_rule"]
|
|
320
|
+
key = match["key"]
|
|
321
|
+
|
|
322
|
+
if raw_rule not in pre:
|
|
323
|
+
pre[raw_rule] = {
|
|
324
|
+
"attrs": match["attrs"],
|
|
325
|
+
"items": odict(),
|
|
326
|
+
}
|
|
327
|
+
if key not in pre[raw_rule]["items"]:
|
|
328
|
+
pre[raw_rule]["items"][key] = {
|
|
329
|
+
Op.ADDED: [],
|
|
330
|
+
Op.REMOVED: [],
|
|
331
|
+
Op.MOVED: [],
|
|
332
|
+
Op.AFFECTED: [],
|
|
333
|
+
Op.UNCHANGED: [],
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
pre[raw_rule]["items"][key][op].append({
|
|
337
|
+
"row": row,
|
|
338
|
+
"children": make_pre(
|
|
339
|
+
diff=children,
|
|
340
|
+
_parent_match=match,
|
|
341
|
+
),
|
|
342
|
+
})
|
|
343
|
+
return pre
|
|
344
|
+
|
|
345
|
+
|
|
346
|
+
_comment_macros = {
|
|
347
|
+
"!!HYES!!": "!!question!![Y/N]!!answer!!Y!! !!question!![y/n]!!answer!!Y!! !!question!![Yes/All/No/Cancel]!!answer!!Y!!"
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
def make_patch(pre, rb, hw, add_comments, orderer=None, _root_pre=None, do_commit=True):
|
|
352
|
+
patch = []
|
|
353
|
+
if not orderer:
|
|
354
|
+
orderer = Orderer(rb["ordering"], hw.vendor)
|
|
355
|
+
|
|
356
|
+
for (raw_rule, content) in pre.items():
|
|
357
|
+
for (key, diff) in content["items"].items():
|
|
358
|
+
# чтобы logic не мог поменять атрибуты
|
|
359
|
+
rule_pre = content.copy()
|
|
360
|
+
attrs = rule_pre["attrs"].copy()
|
|
361
|
+
|
|
362
|
+
iterable = attrs["logic"](
|
|
363
|
+
rule=attrs,
|
|
364
|
+
key=key,
|
|
365
|
+
diff=diff,
|
|
366
|
+
hw=hw,
|
|
367
|
+
rule_pre=rule_pre,
|
|
368
|
+
root_pre=(_root_pre or pre),
|
|
369
|
+
)
|
|
370
|
+
for (direct, row, sub_pre) in iterable:
|
|
371
|
+
if direct is not None:
|
|
372
|
+
patch_row = row
|
|
373
|
+
if add_comments:
|
|
374
|
+
comments = " ".join(attrs["comment"])
|
|
375
|
+
for (macro, m_value) in _comment_macros.items():
|
|
376
|
+
comments = comments.replace(macro, m_value)
|
|
377
|
+
if comments:
|
|
378
|
+
patch_row = "%s %s" % (row, comments)
|
|
379
|
+
|
|
380
|
+
# pylint: disable=unused-variable
|
|
381
|
+
(order, order_direct, ordering, order_rule) = orderer.get_order(row, direct)
|
|
382
|
+
fmt_row = patch_row
|
|
383
|
+
# fmt_row += " # %s" % str(order_rule) # uncomment to debug ordering
|
|
384
|
+
|
|
385
|
+
if not do_commit and attrs.get("force_commit", False):
|
|
386
|
+
# if do_commit is false skip patch that couldn't be applied without commit
|
|
387
|
+
continue
|
|
388
|
+
|
|
389
|
+
patch.append({
|
|
390
|
+
"row": fmt_row,
|
|
391
|
+
"children": (PatchTree() if not sub_pre else make_patch(
|
|
392
|
+
pre=sub_pre,
|
|
393
|
+
rb={"ordering": ordering}, # Нужен только кусок, касающийся правил для ордеринга
|
|
394
|
+
hw=hw,
|
|
395
|
+
add_comments=add_comments,
|
|
396
|
+
_root_pre=(_root_pre or pre),
|
|
397
|
+
do_commit=do_commit,
|
|
398
|
+
)),
|
|
399
|
+
"raw_rule": raw_rule,
|
|
400
|
+
"direct": direct,
|
|
401
|
+
"order": order,
|
|
402
|
+
"order_direct": order_direct,
|
|
403
|
+
"parent": attrs.get("parent", False),
|
|
404
|
+
"force_commit": attrs.get("force_commit", False),
|
|
405
|
+
"ignore_case": attrs.get("ignore_case", False),
|
|
406
|
+
"context": attrs["context"],
|
|
407
|
+
})
|
|
408
|
+
tree = PatchTree()
|
|
409
|
+
sorted_patch = sorted(patch, key=(lambda item: (
|
|
410
|
+
(item["order"] if item["order_direct"] else -item["order"]),
|
|
411
|
+
item["raw_rule"],
|
|
412
|
+
item["order_direct"],
|
|
413
|
+
)))
|
|
414
|
+
for item in sorted_patch:
|
|
415
|
+
if (not item["children"] and not item["parent"]) or not item["direct"]:
|
|
416
|
+
tree.add(item["row"], item["context"])
|
|
417
|
+
else:
|
|
418
|
+
tree.add_block(item["row"], item["children"], item["context"])
|
|
419
|
+
if item["force_commit"]:
|
|
420
|
+
tree.add("commit", item["context"])
|
|
421
|
+
return tree
|
|
422
|
+
|
|
423
|
+
|
|
424
|
+
def match_row_to_acl(row, rules, exclusive=False):
|
|
425
|
+
matches = _find_acl_matches(row, rules)
|
|
426
|
+
if matches:
|
|
427
|
+
if exclusive:
|
|
428
|
+
gen_cant_delete = {}
|
|
429
|
+
for match in matches:
|
|
430
|
+
names = match[0][0]["attrs"]["generator_names"]
|
|
431
|
+
flags = match[0][0]["attrs"]["cant_delete"]
|
|
432
|
+
for name, flag in zip(names, flags):
|
|
433
|
+
if name not in gen_cant_delete:
|
|
434
|
+
gen_cant_delete[name] = flag
|
|
435
|
+
else:
|
|
436
|
+
gen_cant_delete[name] &= flag
|
|
437
|
+
can_delete = {name: flag for name, flag in gen_cant_delete.items() if not flag}
|
|
438
|
+
if len(can_delete) > 1:
|
|
439
|
+
generator_names = ", ".join(can_delete.keys())
|
|
440
|
+
raise AclNotExclusiveError("generators: '%s'" % generator_names)
|
|
441
|
+
return _select_match(matches, rules)
|
|
442
|
+
return (None, None) # (match, children_rules)
|
|
443
|
+
|
|
444
|
+
|
|
445
|
+
def _match_row_to_rules(row, rules):
|
|
446
|
+
matches = _find_rules_matches(row, rules)
|
|
447
|
+
if matches:
|
|
448
|
+
return _select_match(matches, rules)
|
|
449
|
+
return (None, None)
|
|
450
|
+
|
|
451
|
+
|
|
452
|
+
def _find_acl_matches(row, rules):
|
|
453
|
+
res = []
|
|
454
|
+
for regexp_key in ["direct_regexp", "reverse_regexp"]:
|
|
455
|
+
for ((_, rule), is_global) in _rules_local_global(rules):
|
|
456
|
+
row_to_match = _normalize_row_for_acl(row, rule)
|
|
457
|
+
match = rule["attrs"][regexp_key].match(row_to_match)
|
|
458
|
+
if match:
|
|
459
|
+
rule["attrs"]["match"] = match.groupdict()
|
|
460
|
+
# FIXME: сейчас у нас вообще не используется тип ignore, но он иногда встречается в ACL.
|
|
461
|
+
# Проблема в том, что ACL мержится, и игноры все ломают. Надо придумать, что с этим сделать.
|
|
462
|
+
# В данный момент ignore acl работает только в filter-acl, так как он целостный и накладывается независимо
|
|
463
|
+
# В этом случае ignore правила так же матчатся и считается их специфичность на ряду с normal
|
|
464
|
+
# при выборе ignore правила, заматченная строка не будет пропущена
|
|
465
|
+
metric = (
|
|
466
|
+
rule["attrs"]["prio"],
|
|
467
|
+
# Calculate how specific matched regex is for the row
|
|
468
|
+
# based on how many symbols they share
|
|
469
|
+
len(set(row).intersection(set(rule["attrs"][regexp_key].pattern))) / len(row),
|
|
470
|
+
)
|
|
471
|
+
item = (
|
|
472
|
+
metric,
|
|
473
|
+
((rule, (not is_global and regexp_key == "direct_regexp" and rule["type"] != "ignore")),
|
|
474
|
+
# ^^^ is_cr_allowed ^^^ cr == children rules
|
|
475
|
+
{"is_reverse": (regexp_key == "reverse_regexp")}),
|
|
476
|
+
# ^^^ is_reverse ^^^
|
|
477
|
+
)
|
|
478
|
+
res.append(item)
|
|
479
|
+
res.sort(key=operator.itemgetter(0), reverse=True)
|
|
480
|
+
return [item[1] for item in res]
|
|
481
|
+
|
|
482
|
+
|
|
483
|
+
def _find_rules_matches(row, rules):
|
|
484
|
+
matches = []
|
|
485
|
+
for ((raw_rule, rule), is_global) in _rules_local_global(rules):
|
|
486
|
+
match = rule["attrs"]["regexp"].match(row)
|
|
487
|
+
if match:
|
|
488
|
+
if rule["type"] == "ignore":
|
|
489
|
+
return []
|
|
490
|
+
matches.append(((rule, (not is_global)), {"raw_rule": raw_rule, "key": match.groups()}))
|
|
491
|
+
# ^^^ is_cr_allowed
|
|
492
|
+
return matches
|
|
493
|
+
|
|
494
|
+
|
|
495
|
+
def _select_match(matches, rules):
|
|
496
|
+
((f_rule, is_f_cr_allowed), f_other) = matches[0] # f == first
|
|
497
|
+
if f_rule["type"] == "ignore":
|
|
498
|
+
# В данный момент эта ветка достижима только в filter-acl
|
|
499
|
+
return (None, None)
|
|
500
|
+
|
|
501
|
+
# Мерджим всех потомков которые заматчились
|
|
502
|
+
local_children = odict()
|
|
503
|
+
if is_f_cr_allowed:
|
|
504
|
+
for (rule, is_cr_allowed) in map(operator.itemgetter(0), matches):
|
|
505
|
+
if is_cr_allowed:
|
|
506
|
+
local_children = merge_dicts(local_children, rule["children"]["local"])
|
|
507
|
+
# optional break on is_cr_allowed==False?
|
|
508
|
+
|
|
509
|
+
children_rules = {
|
|
510
|
+
"local": local_children,
|
|
511
|
+
"global": odict(
|
|
512
|
+
(list(f_rule["children"]["global"].items()) if is_f_cr_allowed else [])
|
|
513
|
+
+ list(rules["global"].items()),
|
|
514
|
+
),
|
|
515
|
+
}
|
|
516
|
+
|
|
517
|
+
match = {"attrs": f_rule["attrs"]}
|
|
518
|
+
match.update(f_other)
|
|
519
|
+
return (match, children_rules)
|
|
520
|
+
|
|
521
|
+
|
|
522
|
+
def _rules_local_global(rules):
|
|
523
|
+
for (raw_rule, rule) in rules["local"].items():
|
|
524
|
+
yield ((raw_rule, rule), False)
|
|
525
|
+
for (raw_rule, rule) in rules["global"].items():
|
|
526
|
+
yield ((raw_rule, rule), True)
|
|
527
|
+
|
|
528
|
+
|
|
529
|
+
def _normalize_row_for_acl(row, rule):
|
|
530
|
+
# NOCDEV-5940 У джуниперов есть служебрая разметка "inactive:"
|
|
531
|
+
if rule["attrs"]["vendor"] == "juniper":
|
|
532
|
+
row = jun_activate(row)
|
|
533
|
+
return row
|
|
File without changes
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
import functools
|
|
2
|
+
from collections import OrderedDict as odict
|
|
3
|
+
from typing import Any, Callable, List, Optional
|
|
4
|
+
|
|
5
|
+
from valkit import add_validator_magic
|
|
6
|
+
from valkit.common import valid_bool, valid_number, valid_string_list
|
|
7
|
+
|
|
8
|
+
from . import platform, syntax
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
# =====
|
|
12
|
+
@functools.lru_cache()
|
|
13
|
+
def compile_acl_text(text, vendor, allow_ignore=False):
|
|
14
|
+
return _compile_acl(
|
|
15
|
+
trees=[syntax.parse_text(text, _PARAMS_SCHEME)],
|
|
16
|
+
reverse_prefix=platform.VENDOR_REVERSES[vendor],
|
|
17
|
+
allow_ignore=allow_ignore,
|
|
18
|
+
vendor=vendor,
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@functools.lru_cache()
|
|
23
|
+
def compile_ref_acl_text(text):
|
|
24
|
+
return _compile_acl(
|
|
25
|
+
trees=[syntax.parse_text(text, _PARAMS_SCHEME)],
|
|
26
|
+
reverse_prefix="",
|
|
27
|
+
allow_ignore=False,
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@add_validator_magic
|
|
32
|
+
def valid_bool_list(
|
|
33
|
+
arg: Any,
|
|
34
|
+
delim: str = r"[,\t ]+",
|
|
35
|
+
subval: Optional[Callable[[Any], Any]] = None,
|
|
36
|
+
strip: bool = False,
|
|
37
|
+
) -> List[bool]:
|
|
38
|
+
arg = valid_string_list(arg, delim, subval, strip)
|
|
39
|
+
arg = [valid_bool(x, strip) for x in arg]
|
|
40
|
+
return arg
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
# =====
|
|
44
|
+
_PARAMS_SCHEME = {
|
|
45
|
+
"global": {
|
|
46
|
+
"validator": valid_bool,
|
|
47
|
+
"default": False,
|
|
48
|
+
"uniter": (lambda a, b: a or b),
|
|
49
|
+
},
|
|
50
|
+
"cant_delete": {
|
|
51
|
+
"validator": valid_bool_list,
|
|
52
|
+
"default": (lambda raw_rule: [raw_rule.startswith("interface")]), # FIXME: ужас какой
|
|
53
|
+
"uniter": (lambda a, b: a + b)
|
|
54
|
+
},
|
|
55
|
+
"prio": {
|
|
56
|
+
"validator": (lambda s: valid_number(s, min=0, type=int)),
|
|
57
|
+
"default": 0,
|
|
58
|
+
"uniter": max,
|
|
59
|
+
},
|
|
60
|
+
"generator_names": {
|
|
61
|
+
"validator": valid_string_list,
|
|
62
|
+
"default": [],
|
|
63
|
+
"uniter": (lambda a, b: a + b)
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
# =====
|
|
69
|
+
def _compile_acl(trees, reverse_prefix, allow_ignore=False, vendor=""):
|
|
70
|
+
rules = {"local": odict(), "global": odict()}
|
|
71
|
+
for (rule_id, attrs) in _merge_toplevel(trees).items():
|
|
72
|
+
if attrs["type"] == "ignore" and not allow_ignore:
|
|
73
|
+
raise NotImplementedError("ACL does not support ignore-rules")
|
|
74
|
+
rule = {
|
|
75
|
+
"type": attrs["type"],
|
|
76
|
+
"attrs": {
|
|
77
|
+
"direct_regexp": syntax.compile_row_regexp(attrs["row"]),
|
|
78
|
+
"reverse_regexp": syntax.compile_row_regexp(_make_reverse(attrs["row"], reverse_prefix)),
|
|
79
|
+
"cant_delete": attrs["params"]["cant_delete"],
|
|
80
|
+
"prio": attrs["params"]["prio"],
|
|
81
|
+
"generator_names": attrs["params"]["generator_names"],
|
|
82
|
+
"vendor": vendor,
|
|
83
|
+
"context": attrs["context"],
|
|
84
|
+
},
|
|
85
|
+
"children": None,
|
|
86
|
+
}
|
|
87
|
+
if not attrs["params"]["global"] and not attrs["type"] == "ignore":
|
|
88
|
+
rule["children"] = _compile_acl(attrs["children"], reverse_prefix, allow_ignore, vendor)
|
|
89
|
+
rules["global" if attrs["params"]["global"] else "local"][rule_id] = rule
|
|
90
|
+
return rules
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def _merge_toplevel(trees):
|
|
94
|
+
merged = odict()
|
|
95
|
+
for tree in trees:
|
|
96
|
+
for attrs in tree.values():
|
|
97
|
+
rule_id = ("!" if attrs["type"] == "ignore" else "") + attrs["row"]
|
|
98
|
+
if rule_id not in merged:
|
|
99
|
+
merged[rule_id] = attrs
|
|
100
|
+
merged[rule_id]["children"] = [attrs["children"]] if attrs["children"] else []
|
|
101
|
+
continue
|
|
102
|
+
|
|
103
|
+
for (key, value) in attrs["params"].items():
|
|
104
|
+
if key in merged[rule_id]["params"]:
|
|
105
|
+
uniter = _PARAMS_SCHEME[key]["uniter"]
|
|
106
|
+
merged[rule_id]["params"][key] = uniter(merged[rule_id]["params"][key], value)
|
|
107
|
+
else:
|
|
108
|
+
merged[rule_id]["params"][key] = value
|
|
109
|
+
|
|
110
|
+
if attrs["children"]:
|
|
111
|
+
merged[rule_id]["children"].append(attrs["children"])
|
|
112
|
+
return merged
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
@functools.lru_cache()
|
|
116
|
+
def _make_reverse(row, reverse_prefix):
|
|
117
|
+
if row.startswith(reverse_prefix + " "):
|
|
118
|
+
return row[len(reverse_prefix + " "):]
|
|
119
|
+
else:
|
|
120
|
+
return "%s %s" % (reverse_prefix, row)
|