dissect.util 3.24.dev1__cp314-cp314t-manylinux_2_28_s390x.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dissect/util/__init__.py +20 -0
- dissect/util/_build.py +17 -0
- dissect/util/_native/__init__.pyi +3 -0
- dissect/util/_native/compression/__init__.pyi +3 -0
- dissect/util/_native/compression/lz4.pyi +7 -0
- dissect/util/_native/compression/lzo.pyi +3 -0
- dissect/util/_native/hash/__init__.py +3 -0
- dissect/util/_native/hash/crc32c.py +2 -0
- dissect/util/_native.cpython-314t-s390x-linux-gnu.so +0 -0
- dissect/util/compression/__init__.py +45 -0
- dissect/util/compression/lz4.py +95 -0
- dissect/util/compression/lzbitmap.py +130 -0
- dissect/util/compression/lzfse.py +467 -0
- dissect/util/compression/lznt1.py +92 -0
- dissect/util/compression/lzo.py +118 -0
- dissect/util/compression/lzvn.py +241 -0
- dissect/util/compression/lzxpress.py +80 -0
- dissect/util/compression/lzxpress_huffman.py +184 -0
- dissect/util/compression/sevenbit.py +77 -0
- dissect/util/compression/xz.py +112 -0
- dissect/util/cpio.py +226 -0
- dissect/util/encoding/__init__.py +0 -0
- dissect/util/encoding/surrogateescape.py +21 -0
- dissect/util/exceptions.py +6 -0
- dissect/util/hash/__init__.py +28 -0
- dissect/util/hash/crc32.py +55 -0
- dissect/util/hash/crc32c.py +60 -0
- dissect/util/hash/jenkins.py +102 -0
- dissect/util/ldap.py +237 -0
- dissect/util/plist.py +156 -0
- dissect/util/sid.py +81 -0
- dissect/util/stream.py +671 -0
- dissect/util/tools/__init__.py +0 -0
- dissect/util/tools/dump_nskeyedarchiver.py +61 -0
- dissect/util/ts.py +295 -0
- dissect/util/xmemoryview.py +117 -0
- dissect_util-3.24.dev1.dist-info/METADATA +89 -0
- dissect_util-3.24.dev1.dist-info/RECORD +43 -0
- dissect_util-3.24.dev1.dist-info/WHEEL +5 -0
- dissect_util-3.24.dev1.dist-info/entry_points.txt +2 -0
- dissect_util-3.24.dev1.dist-info/licenses/COPYRIGHT +5 -0
- dissect_util-3.24.dev1.dist-info/licenses/LICENSE +201 -0
- dissect_util-3.24.dev1.dist-info/top_level.txt +1 -0
dissect/util/ldap.py
ADDED
|
@@ -0,0 +1,237 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import operator
|
|
4
|
+
import re
|
|
5
|
+
from enum import Enum
|
|
6
|
+
|
|
7
|
+
from dissect.util.exceptions import Error
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class InvalidQueryError(Error):
|
|
11
|
+
pass
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class LogicalOperator(Enum):
|
|
15
|
+
AND = "&"
|
|
16
|
+
OR = "|"
|
|
17
|
+
NOT = "!"
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
_LOGICAL_OPERATORS = tuple(op.value for op in LogicalOperator)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class ComparisonOperator(Enum):
|
|
24
|
+
GE = ">="
|
|
25
|
+
LE = "<="
|
|
26
|
+
GT = ">"
|
|
27
|
+
LT = "<"
|
|
28
|
+
EQ = "="
|
|
29
|
+
APPROX = "~="
|
|
30
|
+
BIT = ":="
|
|
31
|
+
EXTENDED = ":"
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
_NORMAL_COMPARISON_OPERATORS = [op for op in ComparisonOperator if op != ComparisonOperator.EXTENDED]
|
|
35
|
+
_SORTED_COMPARISON_OPERATORS = sorted(_NORMAL_COMPARISON_OPERATORS, key=lambda op: len(op.value), reverse=True)
|
|
36
|
+
|
|
37
|
+
_RE_EXTENDED = re.compile(r"(.+?):(.+?):=(.+)?")
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class SearchFilter:
|
|
41
|
+
"""Represents an LDAP search filter (simple or nested).
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
query: The LDAP search filter string.
|
|
45
|
+
"""
|
|
46
|
+
|
|
47
|
+
def __init__(self, query: str) -> None:
|
|
48
|
+
self.query: str = query
|
|
49
|
+
|
|
50
|
+
self.children: list[SearchFilter] = []
|
|
51
|
+
self.operator: LogicalOperator | ComparisonOperator | None = None
|
|
52
|
+
self.attribute: str | None = None
|
|
53
|
+
self.value: str | None = None
|
|
54
|
+
self._extended_rule: str | None = None
|
|
55
|
+
|
|
56
|
+
_validate_syntax(query)
|
|
57
|
+
|
|
58
|
+
if query[1:-1].startswith(_LOGICAL_OPERATORS):
|
|
59
|
+
self._parse_nested()
|
|
60
|
+
else:
|
|
61
|
+
self._parse_simple()
|
|
62
|
+
|
|
63
|
+
def __repr__(self) -> str:
|
|
64
|
+
if self.is_nested():
|
|
65
|
+
return f"<SearchFilter nested operator={self.operator.value!r} children={self.children}>"
|
|
66
|
+
return f"<SearchFilter attribute={self.attribute!r} operator={self.operator.value!r} value={self.value}>"
|
|
67
|
+
|
|
68
|
+
@classmethod
|
|
69
|
+
def parse(cls, query: str, optimize: bool = True) -> SearchFilter:
|
|
70
|
+
"""Parse an LDAP query into a filter object, with optional optimization."""
|
|
71
|
+
result = cls(query)
|
|
72
|
+
if optimize:
|
|
73
|
+
return optimize_ldap_query(result)[0]
|
|
74
|
+
return result
|
|
75
|
+
|
|
76
|
+
def is_nested(self) -> bool:
|
|
77
|
+
"""Return whether the filter is nested (i.e., contains logical operators and child filters)."""
|
|
78
|
+
return isinstance(self.operator, LogicalOperator)
|
|
79
|
+
|
|
80
|
+
def format(self) -> str:
|
|
81
|
+
"""Format the search filter back into an LDAP query string."""
|
|
82
|
+
if self.is_nested():
|
|
83
|
+
childs = "".join([child.format() for child in self.children])
|
|
84
|
+
return f"({self.operator.value}{childs})"
|
|
85
|
+
|
|
86
|
+
if self.operator == ComparisonOperator.EXTENDED:
|
|
87
|
+
return f"({self.attribute}:{self._extended_rule}:={self.value})"
|
|
88
|
+
|
|
89
|
+
return f"({self.attribute}{self.operator.value}{self.value})"
|
|
90
|
+
|
|
91
|
+
def _parse_simple(self) -> None:
|
|
92
|
+
"""Parse simple filter."""
|
|
93
|
+
query = self.query[1:-1]
|
|
94
|
+
|
|
95
|
+
# Check for extended matching rules first
|
|
96
|
+
if ":" in query and (match := _RE_EXTENDED.match(query)):
|
|
97
|
+
self.operator = ComparisonOperator.EXTENDED
|
|
98
|
+
self.attribute, self._extended_rule, self.value = match.groups()
|
|
99
|
+
return
|
|
100
|
+
|
|
101
|
+
# Regular operator parsing
|
|
102
|
+
test = query
|
|
103
|
+
operators: list[ComparisonOperator] = []
|
|
104
|
+
for op in _SORTED_COMPARISON_OPERATORS:
|
|
105
|
+
if op.value not in test:
|
|
106
|
+
continue
|
|
107
|
+
|
|
108
|
+
if test.count(op.value) > 1:
|
|
109
|
+
raise InvalidQueryError(f"Comparison operator {op.value} found multiple times in query: {self.query}")
|
|
110
|
+
|
|
111
|
+
operators.append(op)
|
|
112
|
+
test = test.replace(op.value, "")
|
|
113
|
+
|
|
114
|
+
if len(operators) == 0:
|
|
115
|
+
raise InvalidQueryError(
|
|
116
|
+
f"No comparison operator found in query: {self.query}. "
|
|
117
|
+
f"Expected one of {[op.value for op in _NORMAL_COMPARISON_OPERATORS]}."
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
if len(operators) > 1:
|
|
121
|
+
raise InvalidQueryError(
|
|
122
|
+
f"Multiple comparison operators found in query: {self.query} -> {[o.value for o in operators]} "
|
|
123
|
+
f"Expected only one of {[op.value for op in _NORMAL_COMPARISON_OPERATORS]}."
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
self.operator = operators[0]
|
|
127
|
+
self.attribute, _, self.value = query.partition(self.operator.value)
|
|
128
|
+
|
|
129
|
+
def _parse_nested(self) -> None:
|
|
130
|
+
"""Parse nested filter."""
|
|
131
|
+
query = self.query[1:-1]
|
|
132
|
+
self.operator = LogicalOperator(query[0])
|
|
133
|
+
|
|
134
|
+
start = 1
|
|
135
|
+
while start < len(query):
|
|
136
|
+
end = start + 1
|
|
137
|
+
depth = 1
|
|
138
|
+
|
|
139
|
+
while end < len(query) and depth > 0:
|
|
140
|
+
if query[end] == "(":
|
|
141
|
+
depth += 1
|
|
142
|
+
elif query[end] == ")":
|
|
143
|
+
depth -= 1
|
|
144
|
+
end += 1
|
|
145
|
+
|
|
146
|
+
self.children.append(SearchFilter(query[start:end]))
|
|
147
|
+
start = end
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
_ATTRIBUTE_WEIGHTS = {
|
|
151
|
+
"objectGUID": 1,
|
|
152
|
+
"distinguishedName": 1,
|
|
153
|
+
"sAMAccountName": 2,
|
|
154
|
+
"userPrincipalName": 2,
|
|
155
|
+
"mail": 2,
|
|
156
|
+
"sAMAccountType": 3,
|
|
157
|
+
"servicePrincipalName": 3,
|
|
158
|
+
"userAccountControl": 4,
|
|
159
|
+
"memberOf": 5,
|
|
160
|
+
"member": 5,
|
|
161
|
+
"pwdLastSet": 5,
|
|
162
|
+
"primaryGroupID": 6,
|
|
163
|
+
"whenCreated": 6,
|
|
164
|
+
"ou": 6,
|
|
165
|
+
"lastLogonTimestamp": 6,
|
|
166
|
+
"cn": 7,
|
|
167
|
+
"givenName": 7,
|
|
168
|
+
"name": 7,
|
|
169
|
+
"telephoneNumber": 7,
|
|
170
|
+
"objectCategory": 8,
|
|
171
|
+
"description": 9,
|
|
172
|
+
"objectClass": 10,
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
def optimize_ldap_query(query: SearchFilter) -> tuple[SearchFilter, int]:
|
|
177
|
+
"""Optimize an LDAP query in-place.
|
|
178
|
+
|
|
179
|
+
Removes redundant conditions and sorts filters and conditions based on how specific they are.
|
|
180
|
+
|
|
181
|
+
Args:
|
|
182
|
+
query: The LDAP query to optimize.
|
|
183
|
+
|
|
184
|
+
Returns:
|
|
185
|
+
A tuple containing the optimized LDAP query and its weight.
|
|
186
|
+
"""
|
|
187
|
+
# Simplify single-child AND/OR
|
|
188
|
+
if query.is_nested() and len(query.children) == 1 and query.operator in (LogicalOperator.AND, LogicalOperator.OR):
|
|
189
|
+
return optimize_ldap_query(query.children[0])
|
|
190
|
+
|
|
191
|
+
# Sort nested children by weight
|
|
192
|
+
if query.is_nested() and len(query.children) > 1:
|
|
193
|
+
children = sorted((optimize_ldap_query(child) for child in query.children), key=operator.itemgetter(1))
|
|
194
|
+
|
|
195
|
+
query.children = [child for child, _ in children]
|
|
196
|
+
query.query = query.format()
|
|
197
|
+
|
|
198
|
+
return query, max(weight for _, weight in children)
|
|
199
|
+
|
|
200
|
+
# Handle NOT
|
|
201
|
+
if query.is_nested() and len(query.children) == 1 and query.operator == LogicalOperator.NOT:
|
|
202
|
+
child, weight = optimize_ldap_query(query.children[0])
|
|
203
|
+
|
|
204
|
+
query.children[0] = child
|
|
205
|
+
query.query = query.format()
|
|
206
|
+
|
|
207
|
+
return query, weight
|
|
208
|
+
|
|
209
|
+
# Base case: simple filter
|
|
210
|
+
if not query.is_nested():
|
|
211
|
+
return query, _ATTRIBUTE_WEIGHTS.get(query.attribute, max(_ATTRIBUTE_WEIGHTS.values()))
|
|
212
|
+
|
|
213
|
+
return query, max(_ATTRIBUTE_WEIGHTS.values())
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
def _validate_syntax(query: str) -> None:
|
|
217
|
+
"""Validate basic LDAP query syntax.
|
|
218
|
+
|
|
219
|
+
Args:
|
|
220
|
+
query: The LDAP query to validate.
|
|
221
|
+
"""
|
|
222
|
+
if not query:
|
|
223
|
+
raise InvalidQueryError("Empty query")
|
|
224
|
+
|
|
225
|
+
if not query.startswith("(") or not query.endswith(")"):
|
|
226
|
+
raise InvalidQueryError(f"Query must be wrapped in parentheses: {query}")
|
|
227
|
+
|
|
228
|
+
if query.count("(") != query.count(")"):
|
|
229
|
+
raise InvalidQueryError(f"Unbalanced parentheses in query: {query}")
|
|
230
|
+
|
|
231
|
+
# Check for empty parentheses
|
|
232
|
+
if "()" in query:
|
|
233
|
+
raise InvalidQueryError(f"Empty parentheses found in query: {query}")
|
|
234
|
+
|
|
235
|
+
# Check for queries that start with double opening parentheses
|
|
236
|
+
if query.startswith("(("):
|
|
237
|
+
raise InvalidQueryError(f"Invalid query structure: {query}")
|
dissect/util/plist.py
ADDED
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import plistlib
|
|
4
|
+
import uuid
|
|
5
|
+
from collections import UserDict
|
|
6
|
+
from typing import TYPE_CHECKING, Any, BinaryIO
|
|
7
|
+
|
|
8
|
+
from dissect.util.ts import cocoatimestamp
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from datetime import datetime
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class NSKeyedArchiver:
|
|
15
|
+
def __init__(self, fh: BinaryIO):
|
|
16
|
+
self.plist = plistlib.load(fh)
|
|
17
|
+
|
|
18
|
+
if not isinstance(self.plist, dict) or not all(
|
|
19
|
+
key in self.plist for key in ["$version", "$archiver", "$top", "$objects"]
|
|
20
|
+
):
|
|
21
|
+
raise ValueError("File is not an NSKeyedArchiver plist")
|
|
22
|
+
|
|
23
|
+
self._objects = self.plist.get("$objects")
|
|
24
|
+
self._cache = {}
|
|
25
|
+
|
|
26
|
+
self.top = {}
|
|
27
|
+
for name, value in self.plist.get("$top", {}).items():
|
|
28
|
+
self.top[name] = self._parse(value)
|
|
29
|
+
|
|
30
|
+
def __getitem__(self, key: str) -> Any:
|
|
31
|
+
return self.top[key]
|
|
32
|
+
|
|
33
|
+
def __repr__(self) -> str:
|
|
34
|
+
return f"<NSKeyedArchiver top={self.top}>"
|
|
35
|
+
|
|
36
|
+
def get(self, key: str, default: Any | None = None) -> Any:
|
|
37
|
+
return self.top.get(key, default)
|
|
38
|
+
|
|
39
|
+
def _parse(self, uid: Any) -> Any:
|
|
40
|
+
if not isinstance(uid, plistlib.UID):
|
|
41
|
+
return uid
|
|
42
|
+
|
|
43
|
+
num = uid.data
|
|
44
|
+
if num in self._cache:
|
|
45
|
+
return self._cache[num]
|
|
46
|
+
result = self._parse_obj(self._objects[num])
|
|
47
|
+
self._cache[num] = result
|
|
48
|
+
return result
|
|
49
|
+
|
|
50
|
+
def _parse_obj(self, obj: Any) -> Any:
|
|
51
|
+
if isinstance(obj, dict):
|
|
52
|
+
klass = obj.get("$class")
|
|
53
|
+
if klass:
|
|
54
|
+
klass_name = self._parse(klass).get("$classname")
|
|
55
|
+
return CLASSES.get(klass_name, NSObject)(self, obj)
|
|
56
|
+
return obj
|
|
57
|
+
|
|
58
|
+
if isinstance(obj, list):
|
|
59
|
+
return list(map(self._parse, obj))
|
|
60
|
+
|
|
61
|
+
if isinstance(obj, bool | bytes | int | float) or obj is None:
|
|
62
|
+
return obj
|
|
63
|
+
|
|
64
|
+
if isinstance(obj, str):
|
|
65
|
+
return None if obj == "$null" else obj
|
|
66
|
+
|
|
67
|
+
return None
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class NSObject:
|
|
71
|
+
def __init__(self, nskeyed: NSKeyedArchiver, obj: dict[str, Any]):
|
|
72
|
+
self.nskeyed = nskeyed
|
|
73
|
+
self.obj = obj
|
|
74
|
+
|
|
75
|
+
self._class = nskeyed._parse(obj.get("$class", {}))
|
|
76
|
+
self._classname = self._class.get("$classname", "Unknown")
|
|
77
|
+
self._classes = self._class.get("$classes", [])
|
|
78
|
+
|
|
79
|
+
def __getitem__(self, attr: str) -> Any:
|
|
80
|
+
obj = self.obj[attr]
|
|
81
|
+
return self.nskeyed._parse(obj)
|
|
82
|
+
|
|
83
|
+
def __getattr__(self, attr: str) -> Any:
|
|
84
|
+
try:
|
|
85
|
+
return self[attr]
|
|
86
|
+
except KeyError:
|
|
87
|
+
raise AttributeError(attr)
|
|
88
|
+
|
|
89
|
+
def __repr__(self):
|
|
90
|
+
return f"<{self._classname}>"
|
|
91
|
+
|
|
92
|
+
def keys(self) -> list[str]:
|
|
93
|
+
return self.obj.keys()
|
|
94
|
+
|
|
95
|
+
def get(self, attr: str, default: Any | None = None) -> Any:
|
|
96
|
+
try:
|
|
97
|
+
return self[attr]
|
|
98
|
+
except KeyError:
|
|
99
|
+
return default
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
class NSDictionary(UserDict, NSObject):
|
|
103
|
+
def __init__(self, nskeyed: NSKeyedArchiver, obj: dict[str, Any]):
|
|
104
|
+
NSObject.__init__(self, nskeyed, obj)
|
|
105
|
+
self.data = {nskeyed._parse(key): obj for key, obj in zip(obj["NS.keys"], obj["NS.objects"], strict=False)}
|
|
106
|
+
|
|
107
|
+
def __repr__(self) -> str:
|
|
108
|
+
return NSObject.__repr__(self)
|
|
109
|
+
|
|
110
|
+
def __getitem__(self, key: str) -> Any:
|
|
111
|
+
return self.nskeyed._parse(self.data[key])
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def parse_nsarray(nskeyed: NSKeyedArchiver, obj: dict[str, Any]) -> list[Any]:
|
|
115
|
+
return list(map(nskeyed._parse, obj["NS.objects"]))
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def parse_nsset(nskeyed: NSKeyedArchiver, obj: dict[str, Any]) -> list[Any]:
|
|
119
|
+
# Some values are not hashable, so return as list
|
|
120
|
+
return parse_nsarray(nskeyed, obj)
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def parse_nsdata(nskeyed: NSKeyedArchiver, obj: dict[str, Any]) -> Any:
|
|
124
|
+
return obj["NS.data"]
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def parse_nsdate(nskeyed: NSKeyedArchiver, obj: dict[str, Any]) -> datetime:
|
|
128
|
+
return cocoatimestamp(obj["NS.time"])
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def parse_nsuuid(nskeyed: NSKeyedArchiver, obj: dict[str, Any]) -> uuid.UUID:
|
|
132
|
+
return uuid.UUID(bytes=obj["NS.uuidbytes"])
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def parse_nsurl(nskeyed: NSKeyedArchiver, obj: dict[str, Any]) -> str:
|
|
136
|
+
base = nskeyed._parse(obj["NS.base"])
|
|
137
|
+
relative = nskeyed._parse(obj["NS.relative"])
|
|
138
|
+
if base:
|
|
139
|
+
return f"{base}/{relative}"
|
|
140
|
+
return relative
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
CLASSES = {
|
|
144
|
+
"NSArray": parse_nsarray,
|
|
145
|
+
"NSMutableArray": parse_nsarray,
|
|
146
|
+
"NSDictionary": NSDictionary,
|
|
147
|
+
"NSMutableDictionary": NSDictionary,
|
|
148
|
+
"NSSet": parse_nsset,
|
|
149
|
+
"NSMutableSet": parse_nsset,
|
|
150
|
+
"NSData": parse_nsdata,
|
|
151
|
+
"NSMutableData": parse_nsdata,
|
|
152
|
+
"NSDate": parse_nsdate,
|
|
153
|
+
"NSUUID": parse_nsuuid,
|
|
154
|
+
"NSURL": parse_nsurl,
|
|
155
|
+
"NSNull": lambda nskeyed, obj: None,
|
|
156
|
+
}
|
dissect/util/sid.py
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import io
|
|
4
|
+
import struct
|
|
5
|
+
from typing import BinaryIO
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def read_sid(fh: BinaryIO | bytes, endian: str = "<", swap_last: bool = False) -> str:
|
|
9
|
+
"""Read a Windows SID from bytes.
|
|
10
|
+
|
|
11
|
+
Normally we'd do this with cstruct, but do it with just struct to keep dissect.util dependency-free.
|
|
12
|
+
On the upside, this also improves performance!
|
|
13
|
+
|
|
14
|
+
This is equivalent to the following structure::
|
|
15
|
+
|
|
16
|
+
typedef struct _SID {
|
|
17
|
+
BYTE Revision;
|
|
18
|
+
BYTE SubAuthorityCount;
|
|
19
|
+
CHAR IdentifierAuthority[6];
|
|
20
|
+
DWORD SubAuthority[SubAuthorityCount];
|
|
21
|
+
} SID;
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
fh: A file-like object or bytes object to read the SID from.
|
|
25
|
+
endian: Optional endianness for reading the sub authorities.
|
|
26
|
+
swap_list: Optional flag for swapping the endianess of the _last_ sub authority entry.
|
|
27
|
+
"""
|
|
28
|
+
if isinstance(fh, bytes):
|
|
29
|
+
fh = io.BytesIO(fh)
|
|
30
|
+
|
|
31
|
+
if len(buf := fh.read(8)) != 8:
|
|
32
|
+
return ""
|
|
33
|
+
|
|
34
|
+
revision = buf[0]
|
|
35
|
+
sub_authority_count = buf[1]
|
|
36
|
+
authority = int.from_bytes(buf[2:], "big")
|
|
37
|
+
|
|
38
|
+
sub_authority_buf = bytearray(fh.read(sub_authority_count * 4))
|
|
39
|
+
if sub_authority_count and swap_last:
|
|
40
|
+
sub_authority_buf[-4:] = sub_authority_buf[-4:][::-1]
|
|
41
|
+
|
|
42
|
+
sub_authorities = struct.unpack(f"{endian}{sub_authority_count}I", sub_authority_buf)
|
|
43
|
+
|
|
44
|
+
sid_elements = [
|
|
45
|
+
"S",
|
|
46
|
+
f"{revision}",
|
|
47
|
+
f"{authority}",
|
|
48
|
+
]
|
|
49
|
+
sid_elements.extend(map(str, sub_authorities))
|
|
50
|
+
return "-".join(sid_elements)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def write_sid(sid: str, endian: str = "<", swap_last: bool = False) -> bytes:
|
|
54
|
+
"""Write a Windows SID string to bytes.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
sid: SID in the form ``S-Revision-Authority-SubAuth1-...``.
|
|
58
|
+
endian: Optional endianness for reading the sub authorities.
|
|
59
|
+
swap_last: Optional flag for swapping the endianess of the _last_ sub authority entry.
|
|
60
|
+
"""
|
|
61
|
+
if not sid:
|
|
62
|
+
return b""
|
|
63
|
+
|
|
64
|
+
parts = sid.split("-")
|
|
65
|
+
if len(parts) < 3 or parts[0].upper() != "S":
|
|
66
|
+
raise ValueError("Invalid SID string format: insufficient parts")
|
|
67
|
+
|
|
68
|
+
revision = int(parts[1]).to_bytes(1, "little")
|
|
69
|
+
authority = int(parts[2]).to_bytes(6, "big")
|
|
70
|
+
sub_authorities = [int(x) for x in parts[3:]]
|
|
71
|
+
|
|
72
|
+
header = revision + len(sub_authorities).to_bytes(1, "little") + authority
|
|
73
|
+
|
|
74
|
+
if not sub_authorities:
|
|
75
|
+
return header
|
|
76
|
+
|
|
77
|
+
sub_bytes = bytearray(struct.pack(f"{endian}{len(sub_authorities)}I", *sub_authorities))
|
|
78
|
+
if swap_last:
|
|
79
|
+
sub_bytes[-4:] = sub_bytes[-4:][::-1]
|
|
80
|
+
|
|
81
|
+
return header + bytes(sub_bytes)
|