illumio-pylo 0.3.12__py3-none-any.whl → 0.3.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- illumio_pylo/API/APIConnector.py +61 -14
- illumio_pylo/API/CredentialsManager.py +130 -3
- illumio_pylo/API/Explorer.py +619 -14
- illumio_pylo/API/JsonPayloadTypes.py +64 -4
- illumio_pylo/FilterQuery.py +892 -0
- illumio_pylo/LabelCommon.py +13 -3
- illumio_pylo/LabelDimension.py +109 -0
- illumio_pylo/LabelStore.py +97 -38
- illumio_pylo/WorkloadStore.py +58 -0
- illumio_pylo/__init__.py +9 -3
- illumio_pylo/cli/__init__.py +5 -2
- illumio_pylo/cli/commands/__init__.py +1 -0
- illumio_pylo/cli/commands/credential_manager.py +176 -0
- illumio_pylo/cli/commands/traffic_export.py +358 -0
- illumio_pylo/cli/commands/ui/credential_manager_ui/app.js +191 -2
- illumio_pylo/cli/commands/ui/credential_manager_ui/index.html +50 -1
- illumio_pylo/cli/commands/ui/credential_manager_ui/styles.css +179 -28
- illumio_pylo/cli/commands/update_pce_objects_cache.py +1 -2
- illumio_pylo/cli/commands/workload_export.py +29 -0
- {illumio_pylo-0.3.12.dist-info → illumio_pylo-0.3.13.dist-info}/METADATA +1 -1
- {illumio_pylo-0.3.12.dist-info → illumio_pylo-0.3.13.dist-info}/RECORD +24 -22
- illumio_pylo/Query.py +0 -331
- {illumio_pylo-0.3.12.dist-info → illumio_pylo-0.3.13.dist-info}/WHEEL +0 -0
- {illumio_pylo-0.3.12.dist-info → illumio_pylo-0.3.13.dist-info}/licenses/LICENSE +0 -0
- {illumio_pylo-0.3.12.dist-info → illumio_pylo-0.3.13.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,892 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Filter Query Engine for illumio_pylo
|
|
3
|
+
|
|
4
|
+
Provides a SQL-like query language to filter objects in the library.
|
|
5
|
+
|
|
6
|
+
Example queries:
|
|
7
|
+
"name == 'SRV158'"
|
|
8
|
+
"(name == 'SRV158' or name == 'SRV48889') and ip_address == '192.168.2.54'"
|
|
9
|
+
"last_heartbeat <= '2022-09-12' and online == true"
|
|
10
|
+
"hostname contains 'prod' and label.env == 'Production'"
|
|
11
|
+
"name matches 'SRV[0-9]+'"
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
from abc import ABC, abstractmethod
|
|
15
|
+
from dataclasses import dataclass
|
|
16
|
+
from datetime import datetime, date
|
|
17
|
+
from enum import Enum, auto
|
|
18
|
+
from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar, Union
|
|
19
|
+
import re
|
|
20
|
+
|
|
21
|
+
import illumio_pylo as pylo
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class TokenType(Enum):
|
|
25
|
+
"""Token types for the query lexer"""
|
|
26
|
+
LPAREN = auto() # (
|
|
27
|
+
RPAREN = auto() # )
|
|
28
|
+
AND = auto() # and
|
|
29
|
+
OR = auto() # or
|
|
30
|
+
NOT = auto() # not
|
|
31
|
+
EQ = auto() # ==
|
|
32
|
+
NEQ = auto() # !=
|
|
33
|
+
LT = auto() # <
|
|
34
|
+
GT = auto() # >
|
|
35
|
+
LTE = auto() # <=
|
|
36
|
+
GTE = auto() # >=
|
|
37
|
+
CONTAINS = auto() # contains
|
|
38
|
+
MATCHES = auto() # matches (regex)
|
|
39
|
+
IDENTIFIER = auto() # field names
|
|
40
|
+
STRING = auto() # 'value' or "value"
|
|
41
|
+
NUMBER = auto() # 123 or 123.45
|
|
42
|
+
BOOLEAN = auto() # true or false
|
|
43
|
+
DATE = auto() # date value parsed from string
|
|
44
|
+
EOF = auto() # end of input
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
@dataclass
|
|
48
|
+
class Token:
|
|
49
|
+
"""A single token from the query string"""
|
|
50
|
+
type: TokenType
|
|
51
|
+
value: Any
|
|
52
|
+
position: int
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class QueryLexer:
|
|
56
|
+
"""Tokenizes a query string into tokens"""
|
|
57
|
+
|
|
58
|
+
KEYWORDS = {
|
|
59
|
+
'and': TokenType.AND,
|
|
60
|
+
'or': TokenType.OR,
|
|
61
|
+
'not': TokenType.NOT,
|
|
62
|
+
'contains': TokenType.CONTAINS,
|
|
63
|
+
'matches': TokenType.MATCHES,
|
|
64
|
+
'true': TokenType.BOOLEAN,
|
|
65
|
+
'false': TokenType.BOOLEAN,
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
OPERATORS = {
|
|
69
|
+
'==': TokenType.EQ,
|
|
70
|
+
'!=': TokenType.NEQ,
|
|
71
|
+
'<=': TokenType.LTE,
|
|
72
|
+
'>=': TokenType.GTE,
|
|
73
|
+
'<': TokenType.LT,
|
|
74
|
+
'>': TokenType.GT,
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
def __init__(self, query: str):
|
|
78
|
+
self.query = query
|
|
79
|
+
self.pos = 0
|
|
80
|
+
self.length = len(query)
|
|
81
|
+
|
|
82
|
+
def _skip_whitespace(self):
|
|
83
|
+
while self.pos < self.length and self.query[self.pos].isspace():
|
|
84
|
+
self.pos += 1
|
|
85
|
+
|
|
86
|
+
def _read_string(self, quote_char: str) -> str:
|
|
87
|
+
"""Read a quoted string, handling escape sequences"""
|
|
88
|
+
result = []
|
|
89
|
+
self.pos += 1 # skip opening quote
|
|
90
|
+
|
|
91
|
+
while self.pos < self.length:
|
|
92
|
+
char = self.query[self.pos]
|
|
93
|
+
if char == '\\' and self.pos + 1 < self.length:
|
|
94
|
+
# Handle escape sequences
|
|
95
|
+
next_char = self.query[self.pos + 1]
|
|
96
|
+
if next_char in (quote_char, '\\'):
|
|
97
|
+
result.append(next_char)
|
|
98
|
+
self.pos += 2
|
|
99
|
+
continue
|
|
100
|
+
elif char == quote_char:
|
|
101
|
+
self.pos += 1 # skip closing quote
|
|
102
|
+
return ''.join(result)
|
|
103
|
+
result.append(char)
|
|
104
|
+
self.pos += 1
|
|
105
|
+
|
|
106
|
+
raise pylo.PyloEx(f"Unterminated string starting at position {self.pos}")
|
|
107
|
+
|
|
108
|
+
def _read_identifier(self) -> str:
|
|
109
|
+
"""Read an identifier (field name, including dots for nested fields)"""
|
|
110
|
+
start = self.pos
|
|
111
|
+
while self.pos < self.length:
|
|
112
|
+
char = self.query[self.pos]
|
|
113
|
+
if char.isalnum() or char in ('_', '.'):
|
|
114
|
+
self.pos += 1
|
|
115
|
+
else:
|
|
116
|
+
break
|
|
117
|
+
return self.query[start:self.pos]
|
|
118
|
+
|
|
119
|
+
def _read_number(self) -> Union[int, float]:
|
|
120
|
+
"""Read a numeric value"""
|
|
121
|
+
start = self.pos
|
|
122
|
+
has_dot = False
|
|
123
|
+
|
|
124
|
+
while self.pos < self.length:
|
|
125
|
+
char = self.query[self.pos]
|
|
126
|
+
if char.isdigit():
|
|
127
|
+
self.pos += 1
|
|
128
|
+
elif char == '.' and not has_dot:
|
|
129
|
+
has_dot = True
|
|
130
|
+
self.pos += 1
|
|
131
|
+
else:
|
|
132
|
+
break
|
|
133
|
+
|
|
134
|
+
value_str = self.query[start:self.pos]
|
|
135
|
+
return float(value_str) if has_dot else int(value_str)
|
|
136
|
+
|
|
137
|
+
def tokenize(self) -> List[Token]:
|
|
138
|
+
"""Convert the query string into a list of tokens"""
|
|
139
|
+
tokens = []
|
|
140
|
+
|
|
141
|
+
while self.pos < self.length:
|
|
142
|
+
self._skip_whitespace()
|
|
143
|
+
if self.pos >= self.length:
|
|
144
|
+
break
|
|
145
|
+
|
|
146
|
+
start_pos = self.pos
|
|
147
|
+
char = self.query[self.pos]
|
|
148
|
+
|
|
149
|
+
# Single character tokens
|
|
150
|
+
if char == '(':
|
|
151
|
+
tokens.append(Token(TokenType.LPAREN, '(', start_pos))
|
|
152
|
+
self.pos += 1
|
|
153
|
+
elif char == ')':
|
|
154
|
+
tokens.append(Token(TokenType.RPAREN, ')', start_pos))
|
|
155
|
+
self.pos += 1
|
|
156
|
+
# Quoted strings
|
|
157
|
+
elif char in ('"', "'"):
|
|
158
|
+
value = self._read_string(char)
|
|
159
|
+
tokens.append(Token(TokenType.STRING, value, start_pos))
|
|
160
|
+
# Numbers
|
|
161
|
+
elif char.isdigit():
|
|
162
|
+
value = self._read_number()
|
|
163
|
+
tokens.append(Token(TokenType.NUMBER, value, start_pos))
|
|
164
|
+
# Two-character operators
|
|
165
|
+
elif self.pos + 1 < self.length:
|
|
166
|
+
two_char = self.query[self.pos:self.pos + 2]
|
|
167
|
+
if two_char in self.OPERATORS:
|
|
168
|
+
tokens.append(Token(self.OPERATORS[two_char], two_char, start_pos))
|
|
169
|
+
self.pos += 2
|
|
170
|
+
elif char in '<>':
|
|
171
|
+
tokens.append(Token(self.OPERATORS[char], char, start_pos))
|
|
172
|
+
self.pos += 1
|
|
173
|
+
elif char.isalpha() or char == '_':
|
|
174
|
+
# Identifier or keyword
|
|
175
|
+
identifier = self._read_identifier()
|
|
176
|
+
lower_id = identifier.lower()
|
|
177
|
+
if lower_id in self.KEYWORDS:
|
|
178
|
+
token_type = self.KEYWORDS[lower_id]
|
|
179
|
+
value = True if lower_id == 'true' else (False if lower_id == 'false' else lower_id)
|
|
180
|
+
tokens.append(Token(token_type, value, start_pos))
|
|
181
|
+
else:
|
|
182
|
+
tokens.append(Token(TokenType.IDENTIFIER, identifier, start_pos))
|
|
183
|
+
else:
|
|
184
|
+
raise pylo.PyloEx(f"Unexpected character '{char}' at position {self.pos}")
|
|
185
|
+
elif char in '<>':
|
|
186
|
+
tokens.append(Token(self.OPERATORS[char], char, start_pos))
|
|
187
|
+
self.pos += 1
|
|
188
|
+
elif char.isalpha() or char == '_':
|
|
189
|
+
identifier = self._read_identifier()
|
|
190
|
+
lower_id = identifier.lower()
|
|
191
|
+
if lower_id in self.KEYWORDS:
|
|
192
|
+
token_type = self.KEYWORDS[lower_id]
|
|
193
|
+
value = True if lower_id == 'true' else (False if lower_id == 'false' else lower_id)
|
|
194
|
+
tokens.append(Token(token_type, value, start_pos))
|
|
195
|
+
else:
|
|
196
|
+
tokens.append(Token(TokenType.IDENTIFIER, identifier, start_pos))
|
|
197
|
+
else:
|
|
198
|
+
raise pylo.PyloEx(f"Unexpected character '{char}' at position {self.pos}")
|
|
199
|
+
|
|
200
|
+
tokens.append(Token(TokenType.EOF, None, self.pos))
|
|
201
|
+
return tokens
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
class QueryNode(ABC):
|
|
205
|
+
"""Base class for all AST nodes"""
|
|
206
|
+
|
|
207
|
+
@abstractmethod
|
|
208
|
+
def evaluate(self, obj: Any, registry: 'FilterRegistry') -> bool:
|
|
209
|
+
"""Evaluate this node against an object"""
|
|
210
|
+
pass
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
class AndNode(QueryNode):
|
|
214
|
+
"""Represents an AND operation between two nodes"""
|
|
215
|
+
|
|
216
|
+
def __init__(self, left: QueryNode, right: QueryNode):
|
|
217
|
+
self.left = left
|
|
218
|
+
self.right = right
|
|
219
|
+
|
|
220
|
+
def evaluate(self, obj: Any, registry: 'FilterRegistry') -> bool:
|
|
221
|
+
return self.left.evaluate(obj, registry) and self.right.evaluate(obj, registry)
|
|
222
|
+
|
|
223
|
+
def __repr__(self):
|
|
224
|
+
return f"AndNode({self.left}, {self.right})"
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
class OrNode(QueryNode):
|
|
228
|
+
"""Represents an OR operation between two nodes"""
|
|
229
|
+
|
|
230
|
+
def __init__(self, left: QueryNode, right: QueryNode):
|
|
231
|
+
self.left = left
|
|
232
|
+
self.right = right
|
|
233
|
+
|
|
234
|
+
def evaluate(self, obj: Any, registry: 'FilterRegistry') -> bool:
|
|
235
|
+
return self.left.evaluate(obj, registry) or self.right.evaluate(obj, registry)
|
|
236
|
+
|
|
237
|
+
def __repr__(self):
|
|
238
|
+
return f"OrNode({self.left}, {self.right})"
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
class NotNode(QueryNode):
|
|
242
|
+
"""Represents a NOT operation on a node"""
|
|
243
|
+
|
|
244
|
+
def __init__(self, operand: QueryNode):
|
|
245
|
+
self.operand = operand
|
|
246
|
+
|
|
247
|
+
def evaluate(self, obj: Any, registry: 'FilterRegistry') -> bool:
|
|
248
|
+
return not self.operand.evaluate(obj, registry)
|
|
249
|
+
|
|
250
|
+
def __repr__(self):
|
|
251
|
+
return f"NotNode({self.operand})"
|
|
252
|
+
|
|
253
|
+
|
|
254
|
+
class ConditionNode(QueryNode):
|
|
255
|
+
"""Represents a single condition (field operator value)"""
|
|
256
|
+
|
|
257
|
+
def __init__(self, field: str, operator: TokenType, value: Any):
|
|
258
|
+
self.field = field
|
|
259
|
+
self.operator = operator
|
|
260
|
+
self.value = value
|
|
261
|
+
|
|
262
|
+
def evaluate(self, obj: Any, registry: 'FilterRegistry') -> bool:
|
|
263
|
+
return registry.evaluate_condition(obj, self.field, self.operator, self.value)
|
|
264
|
+
|
|
265
|
+
def __repr__(self):
|
|
266
|
+
return f"ConditionNode({self.field} {self.operator.name} {self.value!r})"
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
class QueryParser:
|
|
270
|
+
"""Parses a list of tokens into an AST"""
|
|
271
|
+
|
|
272
|
+
def __init__(self, tokens: List[Token]):
|
|
273
|
+
self.tokens = tokens
|
|
274
|
+
self.pos = 0
|
|
275
|
+
|
|
276
|
+
def _current(self) -> Token:
|
|
277
|
+
return self.tokens[self.pos]
|
|
278
|
+
|
|
279
|
+
def _advance(self) -> Token:
|
|
280
|
+
token = self.tokens[self.pos]
|
|
281
|
+
self.pos += 1
|
|
282
|
+
return token
|
|
283
|
+
|
|
284
|
+
def _expect(self, token_type: TokenType) -> Token:
|
|
285
|
+
token = self._current()
|
|
286
|
+
if token.type != token_type:
|
|
287
|
+
raise pylo.PyloEx(
|
|
288
|
+
f"Expected {token_type.name} but got {token.type.name} at position {token.position}"
|
|
289
|
+
)
|
|
290
|
+
return self._advance()
|
|
291
|
+
|
|
292
|
+
def parse(self) -> QueryNode:
|
|
293
|
+
"""Parse the tokens into an AST"""
|
|
294
|
+
if self._current().type == TokenType.EOF:
|
|
295
|
+
raise pylo.PyloEx("Empty query")
|
|
296
|
+
|
|
297
|
+
node = self._parse_or()
|
|
298
|
+
|
|
299
|
+
if self._current().type != TokenType.EOF:
|
|
300
|
+
raise pylo.PyloEx(
|
|
301
|
+
f"Unexpected token '{self._current().value}' at position {self._current().position}"
|
|
302
|
+
)
|
|
303
|
+
|
|
304
|
+
return node
|
|
305
|
+
|
|
306
|
+
def _parse_or(self) -> QueryNode:
|
|
307
|
+
"""Parse OR expressions (lowest precedence)"""
|
|
308
|
+
left = self._parse_and()
|
|
309
|
+
|
|
310
|
+
while self._current().type == TokenType.OR:
|
|
311
|
+
self._advance() # consume 'or'
|
|
312
|
+
right = self._parse_and()
|
|
313
|
+
left = OrNode(left, right)
|
|
314
|
+
|
|
315
|
+
return left
|
|
316
|
+
|
|
317
|
+
def _parse_and(self) -> QueryNode:
|
|
318
|
+
"""Parse AND expressions"""
|
|
319
|
+
left = self._parse_not()
|
|
320
|
+
|
|
321
|
+
while self._current().type == TokenType.AND:
|
|
322
|
+
self._advance() # consume 'and'
|
|
323
|
+
right = self._parse_not()
|
|
324
|
+
left = AndNode(left, right)
|
|
325
|
+
|
|
326
|
+
return left
|
|
327
|
+
|
|
328
|
+
def _parse_not(self) -> QueryNode:
|
|
329
|
+
"""Parse NOT expressions"""
|
|
330
|
+
if self._current().type == TokenType.NOT:
|
|
331
|
+
self._advance() # consume 'not'
|
|
332
|
+
operand = self._parse_not() # Allow chained NOT
|
|
333
|
+
return NotNode(operand)
|
|
334
|
+
|
|
335
|
+
return self._parse_primary()
|
|
336
|
+
|
|
337
|
+
def _parse_primary(self) -> QueryNode:
|
|
338
|
+
"""Parse primary expressions (conditions or parenthesized expressions)"""
|
|
339
|
+
token = self._current()
|
|
340
|
+
|
|
341
|
+
if token.type == TokenType.LPAREN:
|
|
342
|
+
self._advance() # consume '('
|
|
343
|
+
node = self._parse_or()
|
|
344
|
+
self._expect(TokenType.RPAREN)
|
|
345
|
+
return node
|
|
346
|
+
|
|
347
|
+
if token.type == TokenType.IDENTIFIER:
|
|
348
|
+
return self._parse_condition()
|
|
349
|
+
|
|
350
|
+
raise pylo.PyloEx(
|
|
351
|
+
f"Unexpected token '{token.value}' at position {token.position}"
|
|
352
|
+
)
|
|
353
|
+
|
|
354
|
+
def _parse_condition(self) -> ConditionNode:
|
|
355
|
+
"""Parse a single condition: field operator value"""
|
|
356
|
+
field_token = self._expect(TokenType.IDENTIFIER)
|
|
357
|
+
field = field_token.value
|
|
358
|
+
|
|
359
|
+
# Get operator
|
|
360
|
+
op_token = self._current()
|
|
361
|
+
if op_token.type in (TokenType.EQ, TokenType.NEQ, TokenType.LT, TokenType.GT,
|
|
362
|
+
TokenType.LTE, TokenType.GTE, TokenType.CONTAINS, TokenType.MATCHES):
|
|
363
|
+
self._advance()
|
|
364
|
+
operator = op_token.type
|
|
365
|
+
else:
|
|
366
|
+
raise pylo.PyloEx(
|
|
367
|
+
f"Expected comparison operator but got '{op_token.value}' at position {op_token.position}"
|
|
368
|
+
)
|
|
369
|
+
|
|
370
|
+
# Get value
|
|
371
|
+
value_token = self._current()
|
|
372
|
+
if value_token.type == TokenType.STRING:
|
|
373
|
+
value = value_token.value
|
|
374
|
+
elif value_token.type == TokenType.NUMBER:
|
|
375
|
+
value = value_token.value
|
|
376
|
+
elif value_token.type == TokenType.BOOLEAN:
|
|
377
|
+
value = value_token.value
|
|
378
|
+
elif value_token.type == TokenType.IDENTIFIER:
|
|
379
|
+
# Allow unquoted identifiers as values (for enums, etc.)
|
|
380
|
+
value = value_token.value
|
|
381
|
+
else:
|
|
382
|
+
raise pylo.PyloEx(
|
|
383
|
+
f"Expected value but got '{value_token.value}' at position {value_token.position}"
|
|
384
|
+
)
|
|
385
|
+
self._advance()
|
|
386
|
+
|
|
387
|
+
return ConditionNode(field, operator, value)
|
|
388
|
+
|
|
389
|
+
|
|
390
|
+
class ValueType(Enum):
|
|
391
|
+
"""Supported value types for filter fields"""
|
|
392
|
+
STRING = auto()
|
|
393
|
+
INT = auto()
|
|
394
|
+
FLOAT = auto()
|
|
395
|
+
BOOLEAN = auto()
|
|
396
|
+
DATE = auto()
|
|
397
|
+
DATETIME = auto()
|
|
398
|
+
IP_ADDRESS = auto()
|
|
399
|
+
|
|
400
|
+
|
|
401
|
+
T = TypeVar('T')
|
|
402
|
+
|
|
403
|
+
|
|
404
|
+
@dataclass
|
|
405
|
+
class FilterField(Generic[T]):
|
|
406
|
+
"""Definition of a filterable field"""
|
|
407
|
+
name: str
|
|
408
|
+
value_type: ValueType
|
|
409
|
+
getter: Callable[[T], Any]
|
|
410
|
+
description: str = ""
|
|
411
|
+
supported_operators: Optional[List[TokenType]] = None
|
|
412
|
+
|
|
413
|
+
def __post_init__(self):
|
|
414
|
+
if self.supported_operators is None:
|
|
415
|
+
# Set default operators based on value type
|
|
416
|
+
if self.value_type == ValueType.STRING:
|
|
417
|
+
self.supported_operators = [
|
|
418
|
+
TokenType.EQ, TokenType.NEQ, TokenType.CONTAINS, TokenType.MATCHES
|
|
419
|
+
]
|
|
420
|
+
elif self.value_type in (ValueType.INT, ValueType.FLOAT):
|
|
421
|
+
self.supported_operators = [
|
|
422
|
+
TokenType.EQ, TokenType.NEQ, TokenType.LT, TokenType.GT,
|
|
423
|
+
TokenType.LTE, TokenType.GTE
|
|
424
|
+
]
|
|
425
|
+
elif self.value_type == ValueType.BOOLEAN:
|
|
426
|
+
self.supported_operators = [TokenType.EQ, TokenType.NEQ]
|
|
427
|
+
elif self.value_type in (ValueType.DATE, ValueType.DATETIME):
|
|
428
|
+
self.supported_operators = [
|
|
429
|
+
TokenType.EQ, TokenType.NEQ, TokenType.LT, TokenType.GT,
|
|
430
|
+
TokenType.LTE, TokenType.GTE
|
|
431
|
+
]
|
|
432
|
+
elif self.value_type == ValueType.IP_ADDRESS:
|
|
433
|
+
self.supported_operators = [
|
|
434
|
+
TokenType.EQ, TokenType.NEQ, TokenType.CONTAINS
|
|
435
|
+
]
|
|
436
|
+
|
|
437
|
+
|
|
438
|
+
class FilterRegistry(ABC, Generic[T]):
|
|
439
|
+
"""
|
|
440
|
+
Base class for filter registries.
|
|
441
|
+
Subclass this to define filterable fields for specific object types.
|
|
442
|
+
"""
|
|
443
|
+
|
|
444
|
+
def __init__(self):
|
|
445
|
+
self._fields: Dict[str, FilterField[T]] = {}
|
|
446
|
+
|
|
447
|
+
def register_field(self, field: FilterField[T]):
|
|
448
|
+
"""Register a filterable field"""
|
|
449
|
+
self._fields[field.name.lower()] = field
|
|
450
|
+
|
|
451
|
+
def get_field(self, name: str) -> Optional[FilterField[T]]:
|
|
452
|
+
"""Get a field by name (case-insensitive)"""
|
|
453
|
+
return self._fields.get(name.lower())
|
|
454
|
+
|
|
455
|
+
def get_all_fields(self) -> Dict[str, FilterField[T]]:
|
|
456
|
+
"""Get all registered fields"""
|
|
457
|
+
return self._fields.copy()
|
|
458
|
+
|
|
459
|
+
def evaluate_condition(self, obj: T, field_name: str, operator: TokenType, value: Any) -> bool:
|
|
460
|
+
"""Evaluate a single condition against an object"""
|
|
461
|
+
field = self.get_field(field_name)
|
|
462
|
+
if field is None:
|
|
463
|
+
raise pylo.PyloEx(f"Unknown field '{field_name}'. Available fields: {', '.join(self._fields.keys())}")
|
|
464
|
+
|
|
465
|
+
if operator not in field.supported_operators:
|
|
466
|
+
raise pylo.PyloEx(
|
|
467
|
+
f"Operator {operator.name} is not supported for field '{field_name}'. "
|
|
468
|
+
f"Supported operators: {[op.name for op in field.supported_operators]}"
|
|
469
|
+
)
|
|
470
|
+
|
|
471
|
+
# Get the actual value from the object
|
|
472
|
+
actual_value = field.getter(obj)
|
|
473
|
+
|
|
474
|
+
# Convert value if needed based on field type
|
|
475
|
+
converted_value = self._convert_value(value, field.value_type)
|
|
476
|
+
|
|
477
|
+
# Perform the comparison
|
|
478
|
+
return self._compare(actual_value, operator, converted_value, field.value_type)
|
|
479
|
+
|
|
480
|
+
def _convert_value(self, value: Any, value_type: ValueType) -> Any:
|
|
481
|
+
"""Convert a parsed value to the appropriate type"""
|
|
482
|
+
if value is None:
|
|
483
|
+
return None
|
|
484
|
+
|
|
485
|
+
try:
|
|
486
|
+
if value_type == ValueType.STRING:
|
|
487
|
+
return str(value)
|
|
488
|
+
elif value_type == ValueType.INT:
|
|
489
|
+
return int(value)
|
|
490
|
+
elif value_type == ValueType.FLOAT:
|
|
491
|
+
return float(value)
|
|
492
|
+
elif value_type == ValueType.BOOLEAN:
|
|
493
|
+
if isinstance(value, bool):
|
|
494
|
+
return value
|
|
495
|
+
if isinstance(value, str):
|
|
496
|
+
return value.lower() in ('true', '1', 'yes')
|
|
497
|
+
return bool(value)
|
|
498
|
+
elif value_type == ValueType.DATE:
|
|
499
|
+
if isinstance(value, date):
|
|
500
|
+
return value
|
|
501
|
+
if isinstance(value, str):
|
|
502
|
+
return datetime.strptime(value, '%Y-%m-%d').date()
|
|
503
|
+
raise pylo.PyloEx(f"Cannot convert {value!r} to date")
|
|
504
|
+
elif value_type == ValueType.DATETIME:
|
|
505
|
+
if isinstance(value, datetime):
|
|
506
|
+
return value
|
|
507
|
+
if isinstance(value, str):
|
|
508
|
+
# Try multiple formats
|
|
509
|
+
for fmt in ('%Y-%m-%dT%H:%M:%S', '%Y-%m-%d %H:%M:%S', '%Y-%m-%d'):
|
|
510
|
+
try:
|
|
511
|
+
return datetime.strptime(value, fmt)
|
|
512
|
+
except ValueError:
|
|
513
|
+
continue
|
|
514
|
+
raise pylo.PyloEx(f"Cannot parse datetime from '{value}'")
|
|
515
|
+
raise pylo.PyloEx(f"Cannot convert {value!r} to datetime")
|
|
516
|
+
elif value_type == ValueType.IP_ADDRESS:
|
|
517
|
+
return str(value)
|
|
518
|
+
except (ValueError, TypeError) as e:
|
|
519
|
+
raise pylo.PyloEx(f"Cannot convert value '{value}' to {value_type.name}: {e}")
|
|
520
|
+
|
|
521
|
+
return value
|
|
522
|
+
|
|
523
|
+
def _compare(self, actual: Any, operator: TokenType, expected: Any, value_type: ValueType) -> bool:
|
|
524
|
+
"""Compare actual value with expected value using the operator"""
|
|
525
|
+
# Handle None values
|
|
526
|
+
if actual is None:
|
|
527
|
+
if operator == TokenType.EQ:
|
|
528
|
+
return expected is None or (isinstance(expected, str) and expected.lower() == 'none')
|
|
529
|
+
elif operator == TokenType.NEQ:
|
|
530
|
+
return expected is not None and not (isinstance(expected, str) and expected.lower() == 'none')
|
|
531
|
+
return False
|
|
532
|
+
|
|
533
|
+
# String comparisons (case-insensitive by default)
|
|
534
|
+
if value_type == ValueType.STRING:
|
|
535
|
+
actual_lower = actual.lower() if isinstance(actual, str) else str(actual).lower()
|
|
536
|
+
expected_lower = expected.lower() if isinstance(expected, str) else str(expected).lower()
|
|
537
|
+
|
|
538
|
+
if operator == TokenType.EQ:
|
|
539
|
+
return actual_lower == expected_lower
|
|
540
|
+
elif operator == TokenType.NEQ:
|
|
541
|
+
return actual_lower != expected_lower
|
|
542
|
+
elif operator == TokenType.CONTAINS:
|
|
543
|
+
return expected_lower in actual_lower
|
|
544
|
+
elif operator == TokenType.MATCHES:
|
|
545
|
+
try:
|
|
546
|
+
return bool(re.search(expected, actual, re.IGNORECASE))
|
|
547
|
+
except re.error as e:
|
|
548
|
+
raise pylo.PyloEx(f"Invalid regex pattern '{expected}': {e}")
|
|
549
|
+
|
|
550
|
+
# Numeric comparisons
|
|
551
|
+
elif value_type in (ValueType.INT, ValueType.FLOAT):
|
|
552
|
+
if operator == TokenType.EQ:
|
|
553
|
+
return actual == expected
|
|
554
|
+
elif operator == TokenType.NEQ:
|
|
555
|
+
return actual != expected
|
|
556
|
+
elif operator == TokenType.LT:
|
|
557
|
+
return actual < expected
|
|
558
|
+
elif operator == TokenType.GT:
|
|
559
|
+
return actual > expected
|
|
560
|
+
elif operator == TokenType.LTE:
|
|
561
|
+
return actual <= expected
|
|
562
|
+
elif operator == TokenType.GTE:
|
|
563
|
+
return actual >= expected
|
|
564
|
+
|
|
565
|
+
# Boolean comparisons
|
|
566
|
+
elif value_type == ValueType.BOOLEAN:
|
|
567
|
+
if operator == TokenType.EQ:
|
|
568
|
+
return actual == expected
|
|
569
|
+
elif operator == TokenType.NEQ:
|
|
570
|
+
return actual != expected
|
|
571
|
+
|
|
572
|
+
# Date/DateTime comparisons
|
|
573
|
+
elif value_type in (ValueType.DATE, ValueType.DATETIME):
|
|
574
|
+
# Convert actual to comparable format if it's a datetime and expected is a date
|
|
575
|
+
if isinstance(actual, datetime) and isinstance(expected, date) and not isinstance(expected, datetime):
|
|
576
|
+
actual = actual.date()
|
|
577
|
+
|
|
578
|
+
if operator == TokenType.EQ:
|
|
579
|
+
return actual == expected
|
|
580
|
+
elif operator == TokenType.NEQ:
|
|
581
|
+
return actual != expected
|
|
582
|
+
elif operator == TokenType.LT:
|
|
583
|
+
return actual < expected
|
|
584
|
+
elif operator == TokenType.GT:
|
|
585
|
+
return actual > expected
|
|
586
|
+
elif operator == TokenType.LTE:
|
|
587
|
+
return actual <= expected
|
|
588
|
+
elif operator == TokenType.GTE:
|
|
589
|
+
return actual >= expected
|
|
590
|
+
|
|
591
|
+
# IP Address comparisons
|
|
592
|
+
elif value_type == ValueType.IP_ADDRESS:
|
|
593
|
+
# Handle list of IPs (from interfaces)
|
|
594
|
+
if isinstance(actual, (list, tuple)):
|
|
595
|
+
if operator == TokenType.EQ:
|
|
596
|
+
return expected in actual
|
|
597
|
+
elif operator == TokenType.NEQ:
|
|
598
|
+
return expected not in actual
|
|
599
|
+
elif operator == TokenType.CONTAINS:
|
|
600
|
+
return expected in actual
|
|
601
|
+
else:
|
|
602
|
+
if operator == TokenType.EQ:
|
|
603
|
+
return actual == expected
|
|
604
|
+
elif operator == TokenType.NEQ:
|
|
605
|
+
return actual != expected
|
|
606
|
+
elif operator == TokenType.CONTAINS:
|
|
607
|
+
return expected in str(actual)
|
|
608
|
+
|
|
609
|
+
return False
|
|
610
|
+
|
|
611
|
+
|
|
612
|
+
class FilterQuery(Generic[T]):
|
|
613
|
+
"""
|
|
614
|
+
Main class for executing filter queries against objects.
|
|
615
|
+
|
|
616
|
+
Usage:
|
|
617
|
+
registry = WorkloadFilterRegistry()
|
|
618
|
+
query = FilterQuery(registry)
|
|
619
|
+
results = query.execute("name contains 'prod' and online == true", workloads)
|
|
620
|
+
"""
|
|
621
|
+
|
|
622
|
+
def __init__(self, registry: FilterRegistry[T]):
|
|
623
|
+
self.registry = registry
|
|
624
|
+
self._ast: Optional[QueryNode] = None
|
|
625
|
+
self._query_string: Optional[str] = None
|
|
626
|
+
|
|
627
|
+
def parse(self, query_string: str) -> 'FilterQuery[T]':
|
|
628
|
+
"""Parse a query string into an AST"""
|
|
629
|
+
self._query_string = query_string
|
|
630
|
+
lexer = QueryLexer(query_string)
|
|
631
|
+
tokens = lexer.tokenize()
|
|
632
|
+
parser = QueryParser(tokens)
|
|
633
|
+
self._ast = parser.parse()
|
|
634
|
+
return self
|
|
635
|
+
|
|
636
|
+
def evaluate(self, obj: T) -> bool:
|
|
637
|
+
"""Evaluate the parsed query against a single object"""
|
|
638
|
+
if self._ast is None:
|
|
639
|
+
raise pylo.PyloEx("No query has been parsed. Call parse() first.")
|
|
640
|
+
return self._ast.evaluate(obj, self.registry)
|
|
641
|
+
|
|
642
|
+
def execute(self, query_string: str, objects: List[T]) -> List[T]:
|
|
643
|
+
"""Parse a query and execute it against a list of objects"""
|
|
644
|
+
self.parse(query_string)
|
|
645
|
+
return [obj for obj in objects if self.evaluate(obj)]
|
|
646
|
+
|
|
647
|
+
def execute_to_dict(self, query_string: str, objects: Dict[str, T]) -> Dict[str, T]:
|
|
648
|
+
"""Parse a query and execute it against a dict of objects"""
|
|
649
|
+
self.parse(query_string)
|
|
650
|
+
return {key: obj for key, obj in objects.items() if self.evaluate(obj)}
|
|
651
|
+
|
|
652
|
+
|
|
653
|
+
# =============================================================================
|
|
654
|
+
# Workload Filter Registry
|
|
655
|
+
# =============================================================================
|
|
656
|
+
|
|
657
|
+
class WorkloadFilterRegistry(FilterRegistry['pylo.Workload']):
|
|
658
|
+
"""
|
|
659
|
+
Filter registry for Workload objects.
|
|
660
|
+
Defines all filterable fields for workloads.
|
|
661
|
+
|
|
662
|
+
If an Organization is provided, label fields will be dynamically registered
|
|
663
|
+
for all label types configured in that PCE (not just the default role, app, env, loc).
|
|
664
|
+
"""
|
|
665
|
+
|
|
666
|
+
def __init__(self, org: Optional['pylo.Organization'] = None):
|
|
667
|
+
super().__init__()
|
|
668
|
+
self._org = org
|
|
669
|
+
self._register_fields()
|
|
670
|
+
|
|
671
|
+
def _register_fields(self):
|
|
672
|
+
"""Register all filterable fields for Workloads"""
|
|
673
|
+
|
|
674
|
+
# Basic identity fields
|
|
675
|
+
self.register_field(FilterField(
|
|
676
|
+
name='name',
|
|
677
|
+
value_type=ValueType.STRING,
|
|
678
|
+
getter=lambda w: w.get_name(),
|
|
679
|
+
description='Workload name (forced_name if set, otherwise hostname)'
|
|
680
|
+
))
|
|
681
|
+
|
|
682
|
+
self.register_field(FilterField(
|
|
683
|
+
name='hostname',
|
|
684
|
+
value_type=ValueType.STRING,
|
|
685
|
+
getter=lambda w: w.hostname,
|
|
686
|
+
description='Workload hostname'
|
|
687
|
+
))
|
|
688
|
+
|
|
689
|
+
self.register_field(FilterField(
|
|
690
|
+
name='forced_name',
|
|
691
|
+
value_type=ValueType.STRING,
|
|
692
|
+
getter=lambda w: w.forced_name,
|
|
693
|
+
description='Manually set workload name'
|
|
694
|
+
))
|
|
695
|
+
|
|
696
|
+
self.register_field(FilterField(
|
|
697
|
+
name='href',
|
|
698
|
+
value_type=ValueType.STRING,
|
|
699
|
+
getter=lambda w: w.href,
|
|
700
|
+
description='Workload HREF'
|
|
701
|
+
))
|
|
702
|
+
|
|
703
|
+
self.register_field(FilterField(
|
|
704
|
+
name='description',
|
|
705
|
+
value_type=ValueType.STRING,
|
|
706
|
+
getter=lambda w: w.description or '',
|
|
707
|
+
description='Workload description'
|
|
708
|
+
))
|
|
709
|
+
|
|
710
|
+
# Status fields
|
|
711
|
+
self.register_field(FilterField(
|
|
712
|
+
name='online',
|
|
713
|
+
value_type=ValueType.BOOLEAN,
|
|
714
|
+
getter=lambda w: w.online,
|
|
715
|
+
description='Whether the workload is online'
|
|
716
|
+
))
|
|
717
|
+
|
|
718
|
+
self.register_field(FilterField(
|
|
719
|
+
name='managed',
|
|
720
|
+
value_type=ValueType.BOOLEAN,
|
|
721
|
+
getter=lambda w: not w.unmanaged,
|
|
722
|
+
description='Whether the workload is managed (has VEN)'
|
|
723
|
+
))
|
|
724
|
+
|
|
725
|
+
self.register_field(FilterField(
|
|
726
|
+
name='unmanaged',
|
|
727
|
+
value_type=ValueType.BOOLEAN,
|
|
728
|
+
getter=lambda w: w.unmanaged,
|
|
729
|
+
description='Whether the workload is unmanaged'
|
|
730
|
+
))
|
|
731
|
+
|
|
732
|
+
self.register_field(FilterField(
|
|
733
|
+
name='deleted',
|
|
734
|
+
value_type=ValueType.BOOLEAN,
|
|
735
|
+
getter=lambda w: w.deleted,
|
|
736
|
+
description='Whether the workload is deleted'
|
|
737
|
+
))
|
|
738
|
+
|
|
739
|
+
# OS fields
|
|
740
|
+
self.register_field(FilterField(
|
|
741
|
+
name='os_id',
|
|
742
|
+
value_type=ValueType.STRING,
|
|
743
|
+
getter=lambda w: w.os_id or '',
|
|
744
|
+
description='Operating system identifier'
|
|
745
|
+
))
|
|
746
|
+
|
|
747
|
+
self.register_field(FilterField(
|
|
748
|
+
name='os_detail',
|
|
749
|
+
value_type=ValueType.STRING,
|
|
750
|
+
getter=lambda w: w.os_detail or '',
|
|
751
|
+
description='Operating system details'
|
|
752
|
+
))
|
|
753
|
+
|
|
754
|
+
# IP address field
|
|
755
|
+
self.register_field(FilterField(
|
|
756
|
+
name='ip_address',
|
|
757
|
+
value_type=ValueType.IP_ADDRESS,
|
|
758
|
+
getter=lambda w: [iface.ip for iface in w.interfaces if iface.ip],
|
|
759
|
+
description='Workload IP addresses (checks all interfaces)'
|
|
760
|
+
))
|
|
761
|
+
|
|
762
|
+
# Alias for ip_address
|
|
763
|
+
self.register_field(FilterField(
|
|
764
|
+
name='ip',
|
|
765
|
+
value_type=ValueType.IP_ADDRESS,
|
|
766
|
+
getter=lambda w: [iface.ip for iface in w.interfaces if iface.ip],
|
|
767
|
+
description='Workload IP addresses (alias for ip_address)'
|
|
768
|
+
))
|
|
769
|
+
|
|
770
|
+
# VEN/Agent fields
|
|
771
|
+
self.register_field(FilterField(
|
|
772
|
+
name='last_heartbeat',
|
|
773
|
+
value_type=ValueType.DATETIME,
|
|
774
|
+
getter=lambda w: w.ven_agent.get_last_heartbeat_date() if w.ven_agent else None,
|
|
775
|
+
description='Last VEN heartbeat timestamp'
|
|
776
|
+
))
|
|
777
|
+
|
|
778
|
+
# Alias with different naming
|
|
779
|
+
self.register_field(FilterField(
|
|
780
|
+
name='last_heartbeat_received',
|
|
781
|
+
value_type=ValueType.DATETIME,
|
|
782
|
+
getter=lambda w: w.ven_agent.get_last_heartbeat_date() if w.ven_agent else None,
|
|
783
|
+
description='Last VEN heartbeat timestamp (alias)'
|
|
784
|
+
))
|
|
785
|
+
|
|
786
|
+
self.register_field(FilterField(
|
|
787
|
+
name='agent.status',
|
|
788
|
+
value_type=ValueType.STRING,
|
|
789
|
+
getter=lambda w: w.ven_agent.status if w.ven_agent else None,
|
|
790
|
+
description='VEN agent status (active, stopped, suspended, uninstalled)'
|
|
791
|
+
))
|
|
792
|
+
|
|
793
|
+
self.register_field(FilterField(
|
|
794
|
+
name='agent.mode',
|
|
795
|
+
value_type=ValueType.STRING,
|
|
796
|
+
getter=lambda w: w.ven_agent.mode if w.ven_agent else None,
|
|
797
|
+
description='VEN agent mode (idle, build, test, enforced)'
|
|
798
|
+
))
|
|
799
|
+
|
|
800
|
+
self.register_field(FilterField(
|
|
801
|
+
name='mode',
|
|
802
|
+
value_type=ValueType.STRING,
|
|
803
|
+
getter=lambda w: w.ven_agent.mode if w.ven_agent else None,
|
|
804
|
+
description='VEN agent mode (alias for agent.mode)'
|
|
805
|
+
))
|
|
806
|
+
|
|
807
|
+
self.register_field(FilterField(
|
|
808
|
+
name='agent.version',
|
|
809
|
+
value_type=ValueType.STRING,
|
|
810
|
+
getter=lambda w: w.ven_agent.software_version.version_string if w.ven_agent and w.ven_agent.software_version else None,
|
|
811
|
+
description='VEN software version'
|
|
812
|
+
))
|
|
813
|
+
|
|
814
|
+
# Register label fields dynamically based on organization's label types
|
|
815
|
+
self._register_label_fields()
|
|
816
|
+
|
|
817
|
+
# Date fields
|
|
818
|
+
self.register_field(FilterField(
|
|
819
|
+
name='created_at',
|
|
820
|
+
value_type=ValueType.DATETIME,
|
|
821
|
+
getter=lambda w: w.created_at_datetime(),
|
|
822
|
+
description='Workload creation timestamp'
|
|
823
|
+
))
|
|
824
|
+
|
|
825
|
+
# Reference tracking
|
|
826
|
+
self.register_field(FilterField(
|
|
827
|
+
name='reference_count',
|
|
828
|
+
value_type=ValueType.INT,
|
|
829
|
+
getter=lambda w: w.count_references(),
|
|
830
|
+
description='Number of references to this workload'
|
|
831
|
+
))
|
|
832
|
+
|
|
833
|
+
def _register_label_fields(self):
|
|
834
|
+
"""
|
|
835
|
+
Register label fields dynamically.
|
|
836
|
+
If an Organization is provided, register fields for all label types configured in the PCE.
|
|
837
|
+
Otherwise, register only the default label types (role, app, env, loc).
|
|
838
|
+
"""
|
|
839
|
+
if self._org is not None:
|
|
840
|
+
# Use the label types from the organization
|
|
841
|
+
label_types = self._org.LabelStore.label_types
|
|
842
|
+
else:
|
|
843
|
+
# Fall back to default label types
|
|
844
|
+
label_types = ['role', 'app', 'env', 'loc']
|
|
845
|
+
|
|
846
|
+
for label_type in label_types:
|
|
847
|
+
# Create a closure to capture the label_type value correctly
|
|
848
|
+
def make_getter(lt: str):
|
|
849
|
+
return lambda w: w.get_label(lt).name if w.get_label(lt) else None
|
|
850
|
+
|
|
851
|
+
getter = make_getter(label_type)
|
|
852
|
+
|
|
853
|
+
# Register with 'label.' prefix
|
|
854
|
+
self.register_field(FilterField(
|
|
855
|
+
name=f'label.{label_type}',
|
|
856
|
+
value_type=ValueType.STRING,
|
|
857
|
+
getter=getter,
|
|
858
|
+
description=f'{label_type.capitalize()} label name'
|
|
859
|
+
))
|
|
860
|
+
|
|
861
|
+
# Register shorthand alias (just the label type name)
|
|
862
|
+
self.register_field(FilterField(
|
|
863
|
+
name=label_type,
|
|
864
|
+
value_type=ValueType.STRING,
|
|
865
|
+
getter=getter,
|
|
866
|
+
description=f'{label_type.capitalize()} label name (alias for label.{label_type})'
|
|
867
|
+
))
|
|
868
|
+
|
|
869
|
+
|
|
870
|
+
# Registry cache: maps organization id (or None for default) to WorkloadFilterRegistry
|
|
871
|
+
_workload_filter_registry_cache: Dict[Optional[int], WorkloadFilterRegistry] = {}
|
|
872
|
+
|
|
873
|
+
|
|
874
|
+
def get_workload_filter_registry(org: Optional['pylo.Organization'] = None) -> WorkloadFilterRegistry:
|
|
875
|
+
"""
|
|
876
|
+
Get a WorkloadFilterRegistry instance.
|
|
877
|
+
|
|
878
|
+
If an Organization is provided, returns a registry with label fields for all
|
|
879
|
+
label types configured in that PCE. Registries are cached per organization.
|
|
880
|
+
|
|
881
|
+
:param org: Optional Organization to get label types from
|
|
882
|
+
:return: WorkloadFilterRegistry instance
|
|
883
|
+
"""
|
|
884
|
+
global _workload_filter_registry_cache
|
|
885
|
+
|
|
886
|
+
# Use org id as cache key, or None for default registry
|
|
887
|
+
cache_key = org.id if org is not None else None
|
|
888
|
+
|
|
889
|
+
if cache_key not in _workload_filter_registry_cache:
|
|
890
|
+
_workload_filter_registry_cache[cache_key] = WorkloadFilterRegistry(org)
|
|
891
|
+
|
|
892
|
+
return _workload_filter_registry_cache[cache_key]
|