flo-python 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flo/__init__.py +214 -0
- flo/actions.py +397 -0
- flo/client.py +284 -0
- flo/exceptions.py +224 -0
- flo/kv.py +257 -0
- flo/queue.py +376 -0
- flo/streams.py +379 -0
- flo/types.py +804 -0
- flo/wire.py +926 -0
- flo/worker.py +421 -0
- flo_python-0.1.0.dist-info/METADATA +561 -0
- flo_python-0.1.0.dist-info/RECORD +13 -0
- flo_python-0.1.0.dist-info/WHEEL +4 -0
flo/wire.py
ADDED
|
@@ -0,0 +1,926 @@
|
|
|
1
|
+
"""Flo Wire Protocol
|
|
2
|
+
|
|
3
|
+
Binary serialization/deserialization for the Flo protocol.
|
|
4
|
+
Header: 24 bytes, little-endian, CRC32 validated.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import struct
|
|
8
|
+
from binascii import crc32
|
|
9
|
+
from dataclasses import dataclass
|
|
10
|
+
|
|
11
|
+
from . import types as types
|
|
12
|
+
from .exceptions import (
|
|
13
|
+
IncompleteResponseError,
|
|
14
|
+
InvalidChecksumError,
|
|
15
|
+
InvalidMagicError,
|
|
16
|
+
KeyTooLargeError,
|
|
17
|
+
NamespaceTooLargeError,
|
|
18
|
+
UnsupportedVersionError,
|
|
19
|
+
ValueTooLargeError,
|
|
20
|
+
)
|
|
21
|
+
from .types import (
|
|
22
|
+
HEADER_SIZE,
|
|
23
|
+
MAGIC,
|
|
24
|
+
MAX_KEY_SIZE,
|
|
25
|
+
MAX_NAMESPACE_SIZE,
|
|
26
|
+
MAX_VALUE_SIZE,
|
|
27
|
+
VERSION,
|
|
28
|
+
DequeueResult,
|
|
29
|
+
KVEntry,
|
|
30
|
+
Message,
|
|
31
|
+
OpCode,
|
|
32
|
+
OptionTag,
|
|
33
|
+
ScanResult,
|
|
34
|
+
StatusCode,
|
|
35
|
+
StorageTier,
|
|
36
|
+
StreamAppendResult,
|
|
37
|
+
StreamInfo,
|
|
38
|
+
StreamReadResult,
|
|
39
|
+
StreamRecord,
|
|
40
|
+
VersionEntry,
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
# Header: magic(u32) + payload_len(u32) + request_id(u64)
|
|
44
|
+
# + crc32(u32) + version(u8) + status(u8) + flags(u8) + reserved(u8)
|
|
45
|
+
REQUEST_HEADER_FORMAT = "<IIQIBBBB"
|
|
46
|
+
RESPONSE_HEADER_FORMAT = "<IIQIBBBB"
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
# =============================================================================
|
|
50
|
+
# TLV Options
|
|
51
|
+
# =============================================================================
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class OptionsBuilder:
|
|
55
|
+
"""Helper for building TLV-encoded options."""
|
|
56
|
+
|
|
57
|
+
def __init__(self) -> None:
|
|
58
|
+
self._buffer = bytearray()
|
|
59
|
+
|
|
60
|
+
def add_u8(self, tag: OptionTag, value: int) -> "OptionsBuilder":
|
|
61
|
+
"""Add a u8 option."""
|
|
62
|
+
self._buffer.extend(bytes([tag, 1, value & 0xFF]))
|
|
63
|
+
return self
|
|
64
|
+
|
|
65
|
+
def add_u32(self, tag: OptionTag, value: int) -> "OptionsBuilder":
|
|
66
|
+
"""Add a u32 option."""
|
|
67
|
+
self._buffer.extend(bytes([tag, 4]))
|
|
68
|
+
self._buffer.extend(struct.pack("<I", value))
|
|
69
|
+
return self
|
|
70
|
+
|
|
71
|
+
def add_u64(self, tag: OptionTag, value: int) -> "OptionsBuilder":
|
|
72
|
+
"""Add a u64 option."""
|
|
73
|
+
self._buffer.extend(bytes([tag, 8]))
|
|
74
|
+
self._buffer.extend(struct.pack("<Q", value))
|
|
75
|
+
return self
|
|
76
|
+
|
|
77
|
+
def add_bytes(self, tag: OptionTag, value: bytes) -> "OptionsBuilder":
|
|
78
|
+
"""Add a bytes option."""
|
|
79
|
+
if len(value) > 255:
|
|
80
|
+
raise ValueError("Option value too large (max 255 bytes)")
|
|
81
|
+
self._buffer.extend(bytes([tag, len(value)]))
|
|
82
|
+
self._buffer.extend(value)
|
|
83
|
+
return self
|
|
84
|
+
|
|
85
|
+
def add_flag(self, tag: OptionTag) -> "OptionsBuilder":
|
|
86
|
+
"""Add a flag option (presence indicates true)."""
|
|
87
|
+
self._buffer.extend(bytes([tag, 0]))
|
|
88
|
+
return self
|
|
89
|
+
|
|
90
|
+
def build(self) -> bytes:
|
|
91
|
+
"""Get the built options as bytes."""
|
|
92
|
+
return bytes(self._buffer)
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
@dataclass
|
|
96
|
+
class Option:
|
|
97
|
+
"""A single TLV option."""
|
|
98
|
+
|
|
99
|
+
tag: OptionTag
|
|
100
|
+
data: bytes
|
|
101
|
+
|
|
102
|
+
def as_u8(self) -> int | None:
|
|
103
|
+
"""Get option value as u8."""
|
|
104
|
+
if len(self.data) != 1:
|
|
105
|
+
return None
|
|
106
|
+
return self.data[0]
|
|
107
|
+
|
|
108
|
+
def as_u32(self) -> int | None:
|
|
109
|
+
"""Get option value as u32."""
|
|
110
|
+
if len(self.data) != 4:
|
|
111
|
+
return None
|
|
112
|
+
result: int = struct.unpack("<I", self.data)[0]
|
|
113
|
+
return result
|
|
114
|
+
|
|
115
|
+
def as_u64(self) -> int | None:
|
|
116
|
+
"""Get option value as u64."""
|
|
117
|
+
if len(self.data) != 8:
|
|
118
|
+
return None
|
|
119
|
+
result: int = struct.unpack("<Q", self.data)[0]
|
|
120
|
+
return result
|
|
121
|
+
|
|
122
|
+
def as_string(self) -> str:
|
|
123
|
+
"""Get option value as UTF-8 string."""
|
|
124
|
+
return self.data.decode("utf-8")
|
|
125
|
+
|
|
126
|
+
def is_flag(self) -> bool:
|
|
127
|
+
"""Check if this is a flag option."""
|
|
128
|
+
return len(self.data) == 0
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
class OptionsIterator:
|
|
132
|
+
"""Iterator for parsing TLV options."""
|
|
133
|
+
|
|
134
|
+
def __init__(self, data: bytes) -> None:
|
|
135
|
+
self._data = data
|
|
136
|
+
self._offset = 0
|
|
137
|
+
|
|
138
|
+
def __iter__(self) -> "OptionsIterator":
|
|
139
|
+
return self
|
|
140
|
+
|
|
141
|
+
def __next__(self) -> Option:
|
|
142
|
+
if self._offset + 2 > len(self._data):
|
|
143
|
+
raise StopIteration
|
|
144
|
+
|
|
145
|
+
tag = OptionTag(self._data[self._offset])
|
|
146
|
+
length = self._data[self._offset + 1]
|
|
147
|
+
|
|
148
|
+
if self._offset + 2 + length > len(self._data):
|
|
149
|
+
raise StopIteration
|
|
150
|
+
|
|
151
|
+
data = self._data[self._offset + 2 : self._offset + 2 + length]
|
|
152
|
+
self._offset += 2 + length
|
|
153
|
+
|
|
154
|
+
return Option(tag=tag, data=data)
|
|
155
|
+
|
|
156
|
+
def find(self, tag: OptionTag) -> Option | None:
|
|
157
|
+
"""Find a specific option by tag."""
|
|
158
|
+
self._offset = 0
|
|
159
|
+
for opt in self:
|
|
160
|
+
if opt.tag == tag:
|
|
161
|
+
return opt
|
|
162
|
+
return None
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
# =============================================================================
|
|
166
|
+
# Request Serialization
|
|
167
|
+
# =============================================================================
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def compute_crc32(header_bytes: bytes, payload: bytes) -> int:
|
|
171
|
+
"""Compute CRC32 for header (excluding crc32 field) + payload.
|
|
172
|
+
|
|
173
|
+
The CRC32 is computed over:
|
|
174
|
+
- Header bytes 0-15 (magic, payload_length, request_id)
|
|
175
|
+
- Header bytes 20-23 (version, op_code/status, flags, reserved)
|
|
176
|
+
- Payload bytes
|
|
177
|
+
"""
|
|
178
|
+
# Hash bytes 0-15 and 20-23 of header (skip crc32 field at 16-19)
|
|
179
|
+
crc_data = header_bytes[0:16] + header_bytes[20:24] + payload
|
|
180
|
+
return crc32(crc_data) & 0xFFFFFFFF
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def serialize_request(
|
|
184
|
+
request_id: int,
|
|
185
|
+
op_code: OpCode,
|
|
186
|
+
namespace: bytes,
|
|
187
|
+
key: bytes,
|
|
188
|
+
value: bytes,
|
|
189
|
+
options: bytes = b"",
|
|
190
|
+
) -> bytes:
|
|
191
|
+
"""Serialize a request into wire format.
|
|
192
|
+
|
|
193
|
+
Args:
|
|
194
|
+
request_id: Unique request identifier.
|
|
195
|
+
op_code: Operation code.
|
|
196
|
+
namespace: Namespace bytes (UTF-8 encoded).
|
|
197
|
+
key: Key bytes.
|
|
198
|
+
value: Value bytes.
|
|
199
|
+
options: TLV-encoded options.
|
|
200
|
+
|
|
201
|
+
Returns:
|
|
202
|
+
Serialized request bytes.
|
|
203
|
+
|
|
204
|
+
Raises:
|
|
205
|
+
NamespaceTooLargeError: If namespace exceeds 255 bytes.
|
|
206
|
+
KeyTooLargeError: If key exceeds 64 KB.
|
|
207
|
+
ValueTooLargeError: If value exceeds 16 MB.
|
|
208
|
+
"""
|
|
209
|
+
# Validate sizes
|
|
210
|
+
if len(namespace) > MAX_NAMESPACE_SIZE:
|
|
211
|
+
raise NamespaceTooLargeError(
|
|
212
|
+
f"Namespace too large: {len(namespace)} > {MAX_NAMESPACE_SIZE}"
|
|
213
|
+
)
|
|
214
|
+
if len(key) > MAX_KEY_SIZE:
|
|
215
|
+
raise KeyTooLargeError(f"Key too large: {len(key)} > {MAX_KEY_SIZE}")
|
|
216
|
+
if len(value) > MAX_VALUE_SIZE:
|
|
217
|
+
raise ValueTooLargeError(f"Value too large: {len(value)} > {MAX_VALUE_SIZE}")
|
|
218
|
+
|
|
219
|
+
# Build payload
|
|
220
|
+
payload = bytearray()
|
|
221
|
+
|
|
222
|
+
# Namespace: [len:u16][data]
|
|
223
|
+
payload.extend(struct.pack("<H", len(namespace)))
|
|
224
|
+
payload.extend(namespace)
|
|
225
|
+
|
|
226
|
+
# Key: [len:u16][data]
|
|
227
|
+
payload.extend(struct.pack("<H", len(key)))
|
|
228
|
+
payload.extend(key)
|
|
229
|
+
|
|
230
|
+
# Value: [len:u32][data]
|
|
231
|
+
payload.extend(struct.pack("<I", len(value)))
|
|
232
|
+
payload.extend(value)
|
|
233
|
+
|
|
234
|
+
# Options: [len:u16][data]
|
|
235
|
+
payload.extend(struct.pack("<H", len(options)))
|
|
236
|
+
payload.extend(options)
|
|
237
|
+
|
|
238
|
+
payload_bytes = bytes(payload)
|
|
239
|
+
|
|
240
|
+
# Build header without CRC32
|
|
241
|
+
header_without_crc = struct.pack(
|
|
242
|
+
REQUEST_HEADER_FORMAT,
|
|
243
|
+
MAGIC,
|
|
244
|
+
len(payload_bytes),
|
|
245
|
+
request_id,
|
|
246
|
+
0, # CRC32 placeholder
|
|
247
|
+
VERSION,
|
|
248
|
+
op_code,
|
|
249
|
+
0, # flags
|
|
250
|
+
0, # reserved
|
|
251
|
+
)
|
|
252
|
+
|
|
253
|
+
# Compute CRC32
|
|
254
|
+
crc = compute_crc32(header_without_crc, payload_bytes)
|
|
255
|
+
|
|
256
|
+
# Build final header with CRC32
|
|
257
|
+
header = struct.pack(
|
|
258
|
+
REQUEST_HEADER_FORMAT,
|
|
259
|
+
MAGIC,
|
|
260
|
+
len(payload_bytes),
|
|
261
|
+
request_id,
|
|
262
|
+
crc,
|
|
263
|
+
VERSION,
|
|
264
|
+
op_code,
|
|
265
|
+
0, # flags
|
|
266
|
+
0, # reserved
|
|
267
|
+
)
|
|
268
|
+
|
|
269
|
+
return header + payload_bytes
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
# =============================================================================
|
|
273
|
+
# Response Parsing
|
|
274
|
+
# =============================================================================
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
@dataclass
|
|
278
|
+
class RawResponse:
|
|
279
|
+
"""Raw response from server."""
|
|
280
|
+
|
|
281
|
+
status: StatusCode
|
|
282
|
+
data: bytes
|
|
283
|
+
request_id: int
|
|
284
|
+
|
|
285
|
+
def is_ok(self) -> bool:
|
|
286
|
+
"""Check if response status is OK."""
|
|
287
|
+
return self.status == StatusCode.OK
|
|
288
|
+
|
|
289
|
+
def is_not_found(self) -> bool:
|
|
290
|
+
"""Check if response status is NOT_FOUND."""
|
|
291
|
+
return self.status == StatusCode.NOT_FOUND
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
def parse_response_header(data: bytes) -> tuple[StatusCode, int, int, int]:
|
|
295
|
+
"""Parse response header.
|
|
296
|
+
|
|
297
|
+
Args:
|
|
298
|
+
data: Raw response data (must be at least HEADER_SIZE bytes).
|
|
299
|
+
|
|
300
|
+
Returns:
|
|
301
|
+
Tuple of (status, data_len, request_id, crc32).
|
|
302
|
+
|
|
303
|
+
Raises:
|
|
304
|
+
IncompleteResponseError: If data is too short.
|
|
305
|
+
InvalidMagicError: If magic number is invalid.
|
|
306
|
+
UnsupportedVersionError: If protocol version is unsupported.
|
|
307
|
+
InvalidChecksumError: If CRC32 validation fails.
|
|
308
|
+
"""
|
|
309
|
+
if len(data) < HEADER_SIZE:
|
|
310
|
+
raise IncompleteResponseError(f"Response too short: {len(data)} < {HEADER_SIZE}")
|
|
311
|
+
|
|
312
|
+
magic, data_len, request_id, crc, version, status, flags, reserved = struct.unpack(
|
|
313
|
+
RESPONSE_HEADER_FORMAT, data[:HEADER_SIZE]
|
|
314
|
+
)
|
|
315
|
+
|
|
316
|
+
if magic != MAGIC:
|
|
317
|
+
raise InvalidMagicError(f"Invalid magic: 0x{magic:08X} != 0x{MAGIC:08X}")
|
|
318
|
+
|
|
319
|
+
if version != VERSION:
|
|
320
|
+
raise UnsupportedVersionError(f"Unsupported version: {version} != {VERSION}")
|
|
321
|
+
|
|
322
|
+
return StatusCode(status), data_len, request_id, crc
|
|
323
|
+
|
|
324
|
+
|
|
325
|
+
def parse_response(data: bytes) -> RawResponse:
|
|
326
|
+
"""Parse complete response.
|
|
327
|
+
|
|
328
|
+
Args:
|
|
329
|
+
data: Raw response data including header and payload.
|
|
330
|
+
|
|
331
|
+
Returns:
|
|
332
|
+
RawResponse with status, data, and request_id.
|
|
333
|
+
|
|
334
|
+
Raises:
|
|
335
|
+
IncompleteResponseError: If response is incomplete.
|
|
336
|
+
InvalidChecksumError: If CRC32 validation fails.
|
|
337
|
+
"""
|
|
338
|
+
status, data_len, request_id, expected_crc = parse_response_header(data)
|
|
339
|
+
|
|
340
|
+
expected_size = HEADER_SIZE + data_len
|
|
341
|
+
if len(data) < expected_size:
|
|
342
|
+
raise IncompleteResponseError(f"Response incomplete: {len(data)} < {expected_size}")
|
|
343
|
+
|
|
344
|
+
response_data = data[HEADER_SIZE : HEADER_SIZE + data_len]
|
|
345
|
+
|
|
346
|
+
# Verify CRC32
|
|
347
|
+
computed_crc = compute_crc32(data[:HEADER_SIZE], response_data)
|
|
348
|
+
if computed_crc != expected_crc:
|
|
349
|
+
raise InvalidChecksumError(f"CRC32 mismatch: 0x{computed_crc:08X} != 0x{expected_crc:08X}")
|
|
350
|
+
|
|
351
|
+
return RawResponse(status=status, data=response_data, request_id=request_id)
|
|
352
|
+
|
|
353
|
+
|
|
354
|
+
# =============================================================================
|
|
355
|
+
# Response Data Parsing
|
|
356
|
+
# =============================================================================
|
|
357
|
+
|
|
358
|
+
|
|
359
|
+
def parse_scan_response(data: bytes) -> ScanResult:
|
|
360
|
+
"""Parse scan response data.
|
|
361
|
+
|
|
362
|
+
Format: [has_more:u8][cursor_len:u32][cursor:bytes][count:u32][entries...]
|
|
363
|
+
Entry format: [key_len:u16][key][value_len:u32][value]
|
|
364
|
+
"""
|
|
365
|
+
if len(data) < 9:
|
|
366
|
+
raise IncompleteResponseError("Scan response too short")
|
|
367
|
+
|
|
368
|
+
offset = 0
|
|
369
|
+
|
|
370
|
+
# has_more
|
|
371
|
+
has_more = data[offset] != 0
|
|
372
|
+
offset += 1
|
|
373
|
+
|
|
374
|
+
# cursor
|
|
375
|
+
cursor_len = struct.unpack("<I", data[offset : offset + 4])[0]
|
|
376
|
+
offset += 4
|
|
377
|
+
|
|
378
|
+
if len(data) < offset + cursor_len:
|
|
379
|
+
raise IncompleteResponseError("Scan response cursor incomplete")
|
|
380
|
+
|
|
381
|
+
cursor = data[offset : offset + cursor_len] if cursor_len > 0 else None
|
|
382
|
+
offset += cursor_len
|
|
383
|
+
|
|
384
|
+
# count
|
|
385
|
+
if len(data) < offset + 4:
|
|
386
|
+
raise IncompleteResponseError("Scan response count incomplete")
|
|
387
|
+
|
|
388
|
+
count = struct.unpack("<I", data[offset : offset + 4])[0]
|
|
389
|
+
offset += 4
|
|
390
|
+
|
|
391
|
+
# entries
|
|
392
|
+
entries: list[KVEntry] = []
|
|
393
|
+
for _ in range(count):
|
|
394
|
+
# key
|
|
395
|
+
if len(data) < offset + 2:
|
|
396
|
+
raise IncompleteResponseError("Scan response key length incomplete")
|
|
397
|
+
|
|
398
|
+
key_len = struct.unpack("<H", data[offset : offset + 2])[0]
|
|
399
|
+
offset += 2
|
|
400
|
+
|
|
401
|
+
if len(data) < offset + key_len:
|
|
402
|
+
raise IncompleteResponseError("Scan response key incomplete")
|
|
403
|
+
|
|
404
|
+
key = data[offset : offset + key_len]
|
|
405
|
+
offset += key_len
|
|
406
|
+
|
|
407
|
+
# value
|
|
408
|
+
if len(data) < offset + 4:
|
|
409
|
+
raise IncompleteResponseError("Scan response value length incomplete")
|
|
410
|
+
|
|
411
|
+
value_len = struct.unpack("<I", data[offset : offset + 4])[0]
|
|
412
|
+
offset += 4
|
|
413
|
+
|
|
414
|
+
value: bytes | None = None
|
|
415
|
+
if value_len > 0:
|
|
416
|
+
if len(data) < offset + value_len:
|
|
417
|
+
raise IncompleteResponseError("Scan response value incomplete")
|
|
418
|
+
value = data[offset : offset + value_len]
|
|
419
|
+
offset += value_len
|
|
420
|
+
|
|
421
|
+
entries.append(KVEntry(key=key, value=value))
|
|
422
|
+
|
|
423
|
+
return ScanResult(entries=entries, cursor=cursor, has_more=has_more)
|
|
424
|
+
|
|
425
|
+
|
|
426
|
+
def parse_history_response(data: bytes) -> list[VersionEntry]:
|
|
427
|
+
"""Parse history response data.
|
|
428
|
+
|
|
429
|
+
Format: [count:u32][entries...]
|
|
430
|
+
Entry format: [version:u64][timestamp:i64][value_len:u32][value]
|
|
431
|
+
"""
|
|
432
|
+
if len(data) < 4:
|
|
433
|
+
raise IncompleteResponseError("History response too short")
|
|
434
|
+
|
|
435
|
+
offset = 0
|
|
436
|
+
|
|
437
|
+
count = struct.unpack("<I", data[offset : offset + 4])[0]
|
|
438
|
+
offset += 4
|
|
439
|
+
|
|
440
|
+
entries: list[VersionEntry] = []
|
|
441
|
+
for _ in range(count):
|
|
442
|
+
if len(data) < offset + 20:
|
|
443
|
+
raise IncompleteResponseError("History response entry incomplete")
|
|
444
|
+
|
|
445
|
+
version = struct.unpack("<Q", data[offset : offset + 8])[0]
|
|
446
|
+
offset += 8
|
|
447
|
+
|
|
448
|
+
timestamp = struct.unpack("<q", data[offset : offset + 8])[0]
|
|
449
|
+
offset += 8
|
|
450
|
+
|
|
451
|
+
value_len = struct.unpack("<I", data[offset : offset + 4])[0]
|
|
452
|
+
offset += 4
|
|
453
|
+
|
|
454
|
+
if len(data) < offset + value_len:
|
|
455
|
+
raise IncompleteResponseError("History response value incomplete")
|
|
456
|
+
|
|
457
|
+
value = data[offset : offset + value_len]
|
|
458
|
+
offset += value_len
|
|
459
|
+
|
|
460
|
+
entries.append(VersionEntry(version=version, timestamp=timestamp, value=value))
|
|
461
|
+
|
|
462
|
+
return entries
|
|
463
|
+
|
|
464
|
+
|
|
465
|
+
def parse_dequeue_response(data: bytes) -> DequeueResult:
|
|
466
|
+
"""Parse dequeue response data.
|
|
467
|
+
|
|
468
|
+
Format: [count:u32][messages...]
|
|
469
|
+
Message format: [seq:u64][payload_len:u32][payload]
|
|
470
|
+
"""
|
|
471
|
+
if len(data) < 4:
|
|
472
|
+
raise IncompleteResponseError("Dequeue response too short")
|
|
473
|
+
|
|
474
|
+
offset = 0
|
|
475
|
+
|
|
476
|
+
count = struct.unpack("<I", data[offset : offset + 4])[0]
|
|
477
|
+
offset += 4
|
|
478
|
+
|
|
479
|
+
messages: list[Message] = []
|
|
480
|
+
for _ in range(count):
|
|
481
|
+
if len(data) < offset + 12:
|
|
482
|
+
raise IncompleteResponseError("Dequeue response message incomplete")
|
|
483
|
+
|
|
484
|
+
seq = struct.unpack("<Q", data[offset : offset + 8])[0]
|
|
485
|
+
offset += 8
|
|
486
|
+
|
|
487
|
+
payload_len = struct.unpack("<I", data[offset : offset + 4])[0]
|
|
488
|
+
offset += 4
|
|
489
|
+
|
|
490
|
+
if len(data) < offset + payload_len:
|
|
491
|
+
raise IncompleteResponseError("Dequeue response payload incomplete")
|
|
492
|
+
|
|
493
|
+
payload = data[offset : offset + payload_len]
|
|
494
|
+
offset += payload_len
|
|
495
|
+
|
|
496
|
+
messages.append(Message(seq=seq, payload=payload))
|
|
497
|
+
|
|
498
|
+
return DequeueResult(messages=messages)
|
|
499
|
+
|
|
500
|
+
|
|
501
|
+
def parse_enqueue_response(data: bytes) -> int:
|
|
502
|
+
"""Parse enqueue response data.
|
|
503
|
+
|
|
504
|
+
Format: [seq:u64]
|
|
505
|
+
"""
|
|
506
|
+
if len(data) < 8:
|
|
507
|
+
raise IncompleteResponseError("Enqueue response too short")
|
|
508
|
+
|
|
509
|
+
result: int = struct.unpack("<Q", data[:8])[0]
|
|
510
|
+
return result
|
|
511
|
+
|
|
512
|
+
|
|
513
|
+
def serialize_seqs(seqs: list[int]) -> bytes:
|
|
514
|
+
"""Serialize sequence numbers for ack/nack.
|
|
515
|
+
|
|
516
|
+
Format: [count:u32][seq:u64]*
|
|
517
|
+
"""
|
|
518
|
+
result = bytearray()
|
|
519
|
+
result.extend(struct.pack("<I", len(seqs)))
|
|
520
|
+
for seq in seqs:
|
|
521
|
+
result.extend(struct.pack("<Q", seq))
|
|
522
|
+
return bytes(result)
|
|
523
|
+
|
|
524
|
+
|
|
525
|
+
# =============================================================================
|
|
526
|
+
# Stream Response Parsing
|
|
527
|
+
# =============================================================================
|
|
528
|
+
|
|
529
|
+
|
|
530
|
+
def parse_stream_append_response(data: bytes) -> StreamAppendResult:
|
|
531
|
+
"""Parse stream append response data.
|
|
532
|
+
|
|
533
|
+
Format: [sequence:u64][timestamp_ms:i64]
|
|
534
|
+
"""
|
|
535
|
+
if len(data) < 16:
|
|
536
|
+
raise IncompleteResponseError("Stream append response too short")
|
|
537
|
+
|
|
538
|
+
sequence = struct.unpack("<Q", data[0:8])[0]
|
|
539
|
+
timestamp_ms = struct.unpack("<q", data[8:16])[0]
|
|
540
|
+
|
|
541
|
+
return StreamAppendResult(sequence=sequence, timestamp_ms=timestamp_ms)
|
|
542
|
+
|
|
543
|
+
|
|
544
|
+
def parse_stream_read_response(data: bytes) -> StreamReadResult:
|
|
545
|
+
"""Parse stream read response data.
|
|
546
|
+
|
|
547
|
+
Wire format: [count:u32]([sequence:u64][timestamp_ms:i64][tier:u8][partition:u32]
|
|
548
|
+
[key_present:u8][key_len:u32]?[key]?[payload_len:u32][payload]
|
|
549
|
+
[header_count:u32])*
|
|
550
|
+
"""
|
|
551
|
+
if len(data) < 4:
|
|
552
|
+
# Empty response is valid (no records)
|
|
553
|
+
return StreamReadResult(records=[])
|
|
554
|
+
|
|
555
|
+
pos = 0
|
|
556
|
+
|
|
557
|
+
count = struct.unpack("<I", data[pos : pos + 4])[0]
|
|
558
|
+
pos += 4
|
|
559
|
+
|
|
560
|
+
records: list[StreamRecord] = []
|
|
561
|
+
for _ in range(count):
|
|
562
|
+
if pos >= len(data):
|
|
563
|
+
break
|
|
564
|
+
|
|
565
|
+
# Read sequence
|
|
566
|
+
if pos + 8 > len(data):
|
|
567
|
+
raise IncompleteResponseError("Stream record: missing sequence")
|
|
568
|
+
sequence = struct.unpack("<Q", data[pos : pos + 8])[0]
|
|
569
|
+
pos += 8
|
|
570
|
+
|
|
571
|
+
# Read timestamp_ms
|
|
572
|
+
if pos + 8 > len(data):
|
|
573
|
+
raise IncompleteResponseError("Stream record: missing timestamp_ms")
|
|
574
|
+
timestamp_ms = struct.unpack("<q", data[pos : pos + 8])[0]
|
|
575
|
+
pos += 8
|
|
576
|
+
|
|
577
|
+
# Read tier
|
|
578
|
+
if pos + 1 > len(data):
|
|
579
|
+
raise IncompleteResponseError("Stream record: missing tier")
|
|
580
|
+
tier = StorageTier(data[pos])
|
|
581
|
+
pos += 1
|
|
582
|
+
|
|
583
|
+
# Skip partition
|
|
584
|
+
if pos + 4 > len(data):
|
|
585
|
+
raise IncompleteResponseError("Stream record: missing partition")
|
|
586
|
+
pos += 4
|
|
587
|
+
|
|
588
|
+
# Read key_present
|
|
589
|
+
if pos + 1 > len(data):
|
|
590
|
+
raise IncompleteResponseError("Stream record: missing key_present")
|
|
591
|
+
key_present = data[pos]
|
|
592
|
+
pos += 1
|
|
593
|
+
|
|
594
|
+
# Skip key if present
|
|
595
|
+
if key_present != 0:
|
|
596
|
+
if pos + 4 > len(data):
|
|
597
|
+
raise IncompleteResponseError("Stream record: missing key length")
|
|
598
|
+
key_len = struct.unpack("<I", data[pos : pos + 4])[0]
|
|
599
|
+
pos += 4
|
|
600
|
+
if pos + key_len > len(data):
|
|
601
|
+
raise IncompleteResponseError("Stream record: missing key data")
|
|
602
|
+
pos += key_len
|
|
603
|
+
|
|
604
|
+
# Read payload
|
|
605
|
+
if pos + 4 > len(data):
|
|
606
|
+
raise IncompleteResponseError("Stream record: missing payload length")
|
|
607
|
+
payload_len = struct.unpack("<I", data[pos : pos + 4])[0]
|
|
608
|
+
pos += 4
|
|
609
|
+
|
|
610
|
+
if pos + payload_len > len(data):
|
|
611
|
+
raise IncompleteResponseError("Stream record: missing payload data")
|
|
612
|
+
payload = data[pos : pos + payload_len]
|
|
613
|
+
pos += payload_len
|
|
614
|
+
|
|
615
|
+
# Skip header_count (TODO: parse headers)
|
|
616
|
+
if pos + 4 > len(data):
|
|
617
|
+
raise IncompleteResponseError("Stream record: missing header count")
|
|
618
|
+
pos += 4
|
|
619
|
+
|
|
620
|
+
records.append(
|
|
621
|
+
StreamRecord(
|
|
622
|
+
sequence=sequence,
|
|
623
|
+
timestamp_ms=timestamp_ms,
|
|
624
|
+
tier=tier,
|
|
625
|
+
payload=payload,
|
|
626
|
+
headers=None,
|
|
627
|
+
)
|
|
628
|
+
)
|
|
629
|
+
|
|
630
|
+
return StreamReadResult(records=records)
|
|
631
|
+
|
|
632
|
+
|
|
633
|
+
def parse_stream_info_response(data: bytes) -> StreamInfo:
|
|
634
|
+
"""Parse stream info response data.
|
|
635
|
+
|
|
636
|
+
Format: [first_seq:u64][last_seq:u64][count:u64][bytes:u64]
|
|
637
|
+
"""
|
|
638
|
+
if len(data) < 32:
|
|
639
|
+
raise IncompleteResponseError("Stream info response too short")
|
|
640
|
+
|
|
641
|
+
first_seq = struct.unpack("<Q", data[0:8])[0]
|
|
642
|
+
last_seq = struct.unpack("<Q", data[8:16])[0]
|
|
643
|
+
count = struct.unpack("<Q", data[16:24])[0]
|
|
644
|
+
bytes_size = struct.unpack("<Q", data[24:32])[0]
|
|
645
|
+
|
|
646
|
+
return StreamInfo(first_seq=first_seq, last_seq=last_seq, count=count, bytes_size=bytes_size)
|
|
647
|
+
|
|
648
|
+
|
|
649
|
+
def serialize_group_value(group: str, consumer: str) -> bytes:
|
|
650
|
+
"""Serialize group and consumer names for consumer group operations.
|
|
651
|
+
|
|
652
|
+
Format: [group_len:u16][group][consumer_len:u16][consumer]
|
|
653
|
+
"""
|
|
654
|
+
group_bytes = group.encode("utf-8")
|
|
655
|
+
consumer_bytes = consumer.encode("utf-8")
|
|
656
|
+
|
|
657
|
+
result = bytearray()
|
|
658
|
+
result.extend(struct.pack("<H", len(group_bytes)))
|
|
659
|
+
result.extend(group_bytes)
|
|
660
|
+
result.extend(struct.pack("<H", len(consumer_bytes)))
|
|
661
|
+
result.extend(consumer_bytes)
|
|
662
|
+
return bytes(result)
|
|
663
|
+
|
|
664
|
+
|
|
665
|
+
def serialize_group_ack_value(group: str, seqs: list[int]) -> bytes:
|
|
666
|
+
"""Serialize group name and sequence numbers for group ack.
|
|
667
|
+
|
|
668
|
+
Format: [group_len:u16][group][count:u32][seq:u64]*
|
|
669
|
+
"""
|
|
670
|
+
group_bytes = group.encode("utf-8")
|
|
671
|
+
|
|
672
|
+
result = bytearray()
|
|
673
|
+
result.extend(struct.pack("<H", len(group_bytes)))
|
|
674
|
+
result.extend(group_bytes)
|
|
675
|
+
result.extend(struct.pack("<I", len(seqs)))
|
|
676
|
+
for seq in seqs:
|
|
677
|
+
result.extend(struct.pack("<Q", seq))
|
|
678
|
+
return bytes(result)
|
|
679
|
+
|
|
680
|
+
|
|
681
|
+
# =============================================================================
|
|
682
|
+
# Action/Worker Value Serialization
|
|
683
|
+
# =============================================================================
|
|
684
|
+
|
|
685
|
+
|
|
686
|
+
def serialize_action_register_value(
|
|
687
|
+
action_type: int,
|
|
688
|
+
timeout_ms: int,
|
|
689
|
+
max_retries: int,
|
|
690
|
+
description: str | None = None,
|
|
691
|
+
) -> bytes:
|
|
692
|
+
"""Serialize action register value.
|
|
693
|
+
|
|
694
|
+
Format: [action_type:u8][timeout_ms:u32][max_retries:u32]
|
|
695
|
+
[has_desc:u8][desc_len:u16]?[desc]?
|
|
696
|
+
[has_wasm_module:u8]...(all optional fields as u8=0)
|
|
697
|
+
"""
|
|
698
|
+
result = bytearray()
|
|
699
|
+
|
|
700
|
+
# action_type
|
|
701
|
+
result.append(action_type & 0xFF)
|
|
702
|
+
|
|
703
|
+
# timeout_ms
|
|
704
|
+
result.extend(struct.pack("<I", timeout_ms))
|
|
705
|
+
|
|
706
|
+
# max_retries
|
|
707
|
+
result.extend(struct.pack("<I", max_retries))
|
|
708
|
+
|
|
709
|
+
# description (optional)
|
|
710
|
+
if description:
|
|
711
|
+
desc_bytes = description.encode("utf-8")
|
|
712
|
+
result.append(1) # has_desc
|
|
713
|
+
result.extend(struct.pack("<H", len(desc_bytes)))
|
|
714
|
+
result.extend(desc_bytes)
|
|
715
|
+
else:
|
|
716
|
+
result.append(0)
|
|
717
|
+
|
|
718
|
+
# wasm_module (optional, not used)
|
|
719
|
+
result.append(0)
|
|
720
|
+
|
|
721
|
+
# wasm_entrypoint (optional, not used)
|
|
722
|
+
result.append(0)
|
|
723
|
+
|
|
724
|
+
# wasm_memory_limit (optional, not used)
|
|
725
|
+
result.append(0)
|
|
726
|
+
|
|
727
|
+
# trigger_stream (optional, not used)
|
|
728
|
+
result.append(0)
|
|
729
|
+
|
|
730
|
+
# trigger_group (optional, not used)
|
|
731
|
+
result.append(0)
|
|
732
|
+
|
|
733
|
+
return bytes(result)
|
|
734
|
+
|
|
735
|
+
|
|
736
|
+
def serialize_action_invoke_value(
|
|
737
|
+
input_data: bytes,
|
|
738
|
+
priority: int = 10,
|
|
739
|
+
idempotency_key: str | None = None,
|
|
740
|
+
) -> bytes:
|
|
741
|
+
"""Serialize action invoke value.
|
|
742
|
+
|
|
743
|
+
Format: [priority:u8][delay_ms:i64][has_caller:u8]
|
|
744
|
+
[has_idempotency_key:u8][key_len:u16]?[key]?[input...]
|
|
745
|
+
"""
|
|
746
|
+
result = bytearray()
|
|
747
|
+
|
|
748
|
+
# priority
|
|
749
|
+
result.append(priority & 0xFF)
|
|
750
|
+
|
|
751
|
+
# delay_ms (default 0)
|
|
752
|
+
result.extend(struct.pack("<q", 0))
|
|
753
|
+
|
|
754
|
+
# caller_id (optional, none)
|
|
755
|
+
result.append(0)
|
|
756
|
+
|
|
757
|
+
# idempotency_key (optional)
|
|
758
|
+
if idempotency_key:
|
|
759
|
+
key_bytes = idempotency_key.encode("utf-8")
|
|
760
|
+
result.append(1)
|
|
761
|
+
result.extend(struct.pack("<H", len(key_bytes)))
|
|
762
|
+
result.extend(key_bytes)
|
|
763
|
+
else:
|
|
764
|
+
result.append(0)
|
|
765
|
+
|
|
766
|
+
# input
|
|
767
|
+
result.extend(input_data)
|
|
768
|
+
|
|
769
|
+
return bytes(result)
|
|
770
|
+
|
|
771
|
+
|
|
772
|
+
def serialize_action_list_value(limit: int = 100) -> bytes:
|
|
773
|
+
"""Serialize action list value.
|
|
774
|
+
|
|
775
|
+
Format: [limit:u32][cursor...] (cursor omitted if empty)
|
|
776
|
+
"""
|
|
777
|
+
return struct.pack("<I", limit)
|
|
778
|
+
|
|
779
|
+
|
|
780
|
+
def serialize_worker_register_value(task_types: list[str]) -> bytes:
|
|
781
|
+
"""Serialize worker register value.
|
|
782
|
+
|
|
783
|
+
Format: [count:u32][task_type_len:u16][task_type]...[has_caps:u8][caps]?
|
|
784
|
+
"""
|
|
785
|
+
result = bytearray()
|
|
786
|
+
|
|
787
|
+
# count
|
|
788
|
+
result.extend(struct.pack("<I", len(task_types)))
|
|
789
|
+
|
|
790
|
+
# task types
|
|
791
|
+
for tt in task_types:
|
|
792
|
+
tt_bytes = tt.encode("utf-8")
|
|
793
|
+
result.extend(struct.pack("<H", len(tt_bytes)))
|
|
794
|
+
result.extend(tt_bytes)
|
|
795
|
+
|
|
796
|
+
# capabilities (optional, none)
|
|
797
|
+
result.append(0)
|
|
798
|
+
|
|
799
|
+
return bytes(result)
|
|
800
|
+
|
|
801
|
+
|
|
802
|
+
def serialize_worker_await_value(task_types: list[str]) -> bytes:
|
|
803
|
+
"""Serialize worker await value.
|
|
804
|
+
|
|
805
|
+
Format: [count:u32][task_type_len:u16][task_type]...
|
|
806
|
+
"""
|
|
807
|
+
result = bytearray()
|
|
808
|
+
|
|
809
|
+
# count
|
|
810
|
+
result.extend(struct.pack("<I", len(task_types)))
|
|
811
|
+
|
|
812
|
+
# task types
|
|
813
|
+
for tt in task_types:
|
|
814
|
+
tt_bytes = tt.encode("utf-8")
|
|
815
|
+
result.extend(struct.pack("<H", len(tt_bytes)))
|
|
816
|
+
result.extend(tt_bytes)
|
|
817
|
+
|
|
818
|
+
return bytes(result)
|
|
819
|
+
|
|
820
|
+
|
|
821
|
+
def serialize_worker_touch_value(task_id: str, extend_ms: int = 30000) -> bytes:
|
|
822
|
+
"""Serialize worker touch value.
|
|
823
|
+
|
|
824
|
+
Format: [task_id_len:u16][task_id][extend_ms:u32]
|
|
825
|
+
"""
|
|
826
|
+
task_id_bytes = task_id.encode("utf-8")
|
|
827
|
+
|
|
828
|
+
result = bytearray()
|
|
829
|
+
result.extend(struct.pack("<H", len(task_id_bytes)))
|
|
830
|
+
result.extend(task_id_bytes)
|
|
831
|
+
result.extend(struct.pack("<I", extend_ms))
|
|
832
|
+
|
|
833
|
+
return bytes(result)
|
|
834
|
+
|
|
835
|
+
|
|
836
|
+
def serialize_worker_complete_value(task_id: str, result_data: bytes) -> bytes:
|
|
837
|
+
"""Serialize worker complete value.
|
|
838
|
+
|
|
839
|
+
Format: [task_id_len:u16][task_id][result...]
|
|
840
|
+
"""
|
|
841
|
+
task_id_bytes = task_id.encode("utf-8")
|
|
842
|
+
|
|
843
|
+
result = bytearray()
|
|
844
|
+
result.extend(struct.pack("<H", len(task_id_bytes)))
|
|
845
|
+
result.extend(task_id_bytes)
|
|
846
|
+
result.extend(result_data)
|
|
847
|
+
|
|
848
|
+
return bytes(result)
|
|
849
|
+
|
|
850
|
+
|
|
851
|
+
def serialize_worker_fail_value(task_id: str, error_message: str) -> bytes:
|
|
852
|
+
"""Serialize worker fail value.
|
|
853
|
+
|
|
854
|
+
Format: [task_id_len:u16][task_id][error_message...]
|
|
855
|
+
Note: retry flag is handled via TLV options, not in payload (matches Go SDK).
|
|
856
|
+
"""
|
|
857
|
+
task_id_bytes = task_id.encode("utf-8")
|
|
858
|
+
error_bytes = error_message.encode("utf-8")
|
|
859
|
+
|
|
860
|
+
result = bytearray()
|
|
861
|
+
result.extend(struct.pack("<H", len(task_id_bytes)))
|
|
862
|
+
result.extend(task_id_bytes)
|
|
863
|
+
result.extend(error_bytes)
|
|
864
|
+
|
|
865
|
+
return bytes(result)
|
|
866
|
+
|
|
867
|
+
|
|
868
|
+
def serialize_worker_list_value(limit: int = 100) -> bytes:
|
|
869
|
+
"""Serialize worker list value.
|
|
870
|
+
|
|
871
|
+
Format: [limit:u32]
|
|
872
|
+
"""
|
|
873
|
+
return struct.pack("<I", limit)
|
|
874
|
+
|
|
875
|
+
|
|
876
|
+
def parse_task_assignment(data: bytes) -> "types.TaskAssignment":
|
|
877
|
+
"""Parse task assignment from server response.
|
|
878
|
+
|
|
879
|
+
Format: [task_id_len:u16][task_id][task_type_len:u16][task_type]
|
|
880
|
+
[created_at:i64][attempt:u32][payload...]
|
|
881
|
+
"""
|
|
882
|
+
if len(data) < 10:
|
|
883
|
+
raise ValueError("Incomplete task assignment response")
|
|
884
|
+
|
|
885
|
+
pos = 0
|
|
886
|
+
|
|
887
|
+
# task_id
|
|
888
|
+
task_id_len = struct.unpack_from("<H", data, pos)[0]
|
|
889
|
+
pos += 2
|
|
890
|
+
if pos + task_id_len > len(data):
|
|
891
|
+
raise ValueError("Incomplete task assignment: missing task_id")
|
|
892
|
+
task_id = data[pos : pos + task_id_len].decode("utf-8")
|
|
893
|
+
pos += task_id_len
|
|
894
|
+
|
|
895
|
+
# task_type
|
|
896
|
+
if pos + 2 > len(data):
|
|
897
|
+
raise ValueError("Incomplete task assignment: missing task_type length")
|
|
898
|
+
task_type_len = struct.unpack_from("<H", data, pos)[0]
|
|
899
|
+
pos += 2
|
|
900
|
+
if pos + task_type_len > len(data):
|
|
901
|
+
raise ValueError("Incomplete task assignment: missing task_type")
|
|
902
|
+
task_type = data[pos : pos + task_type_len].decode("utf-8")
|
|
903
|
+
pos += task_type_len
|
|
904
|
+
|
|
905
|
+
# created_at
|
|
906
|
+
if pos + 8 > len(data):
|
|
907
|
+
raise ValueError("Incomplete task assignment: missing created_at")
|
|
908
|
+
created_at = struct.unpack_from("<q", data, pos)[0]
|
|
909
|
+
pos += 8
|
|
910
|
+
|
|
911
|
+
# attempt
|
|
912
|
+
if pos + 4 > len(data):
|
|
913
|
+
raise ValueError("Incomplete task assignment: missing attempt")
|
|
914
|
+
attempt = struct.unpack_from("<I", data, pos)[0]
|
|
915
|
+
pos += 4
|
|
916
|
+
|
|
917
|
+
# payload (rest of data)
|
|
918
|
+
payload = data[pos:]
|
|
919
|
+
|
|
920
|
+
return types.TaskAssignment(
|
|
921
|
+
task_id=task_id,
|
|
922
|
+
task_type=task_type,
|
|
923
|
+
payload=payload,
|
|
924
|
+
created_at=created_at,
|
|
925
|
+
attempt=attempt,
|
|
926
|
+
)
|