flo-python 0.1.0.dev2__py3-none-any.whl → 0.1.0.dev4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
flo/workflows.py ADDED
@@ -0,0 +1,463 @@
1
+ """Flo Workflow Operations
2
+
3
+ Workflow operations: create, start, signal, cancel, status, history,
4
+ list runs, list definitions, disable, enable, sync.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import os
10
+ import re
11
+ import struct
12
+ from typing import TYPE_CHECKING, Any
13
+
14
+ from .types import (
15
+ OpCode,
16
+ StatusCode,
17
+ WorkflowCancelOptions,
18
+ WorkflowCreateOptions,
19
+ WorkflowDisableOptions,
20
+ WorkflowEnableOptions,
21
+ WorkflowGetDefinitionOptions,
22
+ WorkflowHistoryOptions,
23
+ WorkflowListDefinitionsOptions,
24
+ WorkflowListRunsOptions,
25
+ WorkflowSignalOptions,
26
+ WorkflowStartOptions,
27
+ WorkflowStatusOptions,
28
+ WorkflowSyncOptions,
29
+ WorkflowSyncResult,
30
+ )
31
+
32
+ if TYPE_CHECKING:
33
+ from .client import FloClient
34
+
35
+
36
+ class WorkflowOperations:
37
+ """Workflow operations for the Flo client."""
38
+
39
+ def __init__(self, client: FloClient) -> None:
40
+ self._client = client
41
+
42
+ # =========================================================================
43
+ # Core Operations
44
+ # =========================================================================
45
+
46
+ async def create(
47
+ self, name: str, yaml: str | bytes, options: WorkflowCreateOptions | None = None
48
+ ) -> None:
49
+ """Create (or replace) a workflow from a YAML definition."""
50
+ opts = options or WorkflowCreateOptions()
51
+ namespace = self._client.get_namespace(opts.namespace)
52
+
53
+ yaml_bytes = yaml.encode("utf-8") if isinstance(yaml, str) else yaml
54
+
55
+ await self._client._send_and_check(
56
+ OpCode.WORKFLOW_CREATE,
57
+ namespace,
58
+ name.encode("utf-8"),
59
+ yaml_bytes,
60
+ )
61
+
62
+ async def get_definition(
63
+ self, name: str, options: WorkflowGetDefinitionOptions | None = None
64
+ ) -> str | None:
65
+ """Get the YAML definition of a workflow. Returns None if not found."""
66
+ opts = options or WorkflowGetDefinitionOptions()
67
+ namespace = self._client.get_namespace(opts.namespace)
68
+
69
+ value = opts.version.encode("utf-8") if opts.version else b""
70
+
71
+ resp = await self._client._send_and_check(
72
+ OpCode.WORKFLOW_GET_DEFINITION,
73
+ namespace,
74
+ name.encode("utf-8"),
75
+ value,
76
+ allow_not_found=True,
77
+ )
78
+
79
+ if resp.status == StatusCode.NOT_FOUND:
80
+ return None
81
+
82
+ return resp.data.decode("utf-8")
83
+
84
+ async def start(
85
+ self,
86
+ name: str,
87
+ input_data: str | bytes | None = None,
88
+ options: WorkflowStartOptions | None = None,
89
+ ) -> str:
90
+ """Start a workflow run. Returns the run ID."""
91
+ opts = options or WorkflowStartOptions()
92
+ namespace = self._client.get_namespace(opts.namespace)
93
+
94
+ input_bytes = b""
95
+ if input_data is not None:
96
+ input_bytes = input_data.encode("utf-8") if isinstance(input_data, str) else input_data
97
+
98
+ # Wire format: [ver_len:u16][ver]?[has_idem:u8][idem_len:u16]?[idem]?
99
+ # [has_rid:u8][rid_len:u16]?[rid]?[input...]
100
+ parts = bytearray()
101
+
102
+ # Version prefix
103
+ if opts.version:
104
+ ver_bytes = opts.version.encode("utf-8")
105
+ parts.extend(struct.pack("<H", len(ver_bytes)))
106
+ parts.extend(ver_bytes)
107
+ else:
108
+ parts.extend(struct.pack("<H", 0))
109
+
110
+ # Idempotency key
111
+ if opts.idempotency_key:
112
+ idem_bytes = opts.idempotency_key.encode("utf-8")
113
+ parts.append(1)
114
+ parts.extend(struct.pack("<H", len(idem_bytes)))
115
+ parts.extend(idem_bytes)
116
+ else:
117
+ parts.append(0)
118
+
119
+ # Explicit run ID
120
+ if opts.run_id:
121
+ rid_bytes = opts.run_id.encode("utf-8")
122
+ parts.append(1)
123
+ parts.extend(struct.pack("<H", len(rid_bytes)))
124
+ parts.extend(rid_bytes)
125
+ else:
126
+ parts.append(0)
127
+
128
+ # Input payload
129
+ parts.extend(input_bytes)
130
+
131
+ resp = await self._client._send_and_check(
132
+ OpCode.WORKFLOW_START,
133
+ namespace,
134
+ name.encode("utf-8"),
135
+ bytes(parts),
136
+ )
137
+
138
+ return resp.data.decode("utf-8")
139
+
140
+ async def status(
141
+ self, run_id: str, options: WorkflowStatusOptions | None = None
142
+ ) -> dict[str, Any]:
143
+ """Get the status of a workflow run. Returns parsed binary status."""
144
+ import struct
145
+
146
+ opts = options or WorkflowStatusOptions()
147
+ namespace = self._client.get_namespace(opts.namespace)
148
+
149
+ resp = await self._client._send_and_check(
150
+ OpCode.WORKFLOW_STATUS,
151
+ namespace,
152
+ run_id.encode("utf-8"),
153
+ b"",
154
+ )
155
+
156
+ data = resp.data
157
+ pos = 0
158
+ status_names = [
159
+ "pending",
160
+ "running",
161
+ "waiting",
162
+ "completed",
163
+ "failed",
164
+ "cancelled",
165
+ "timed_out",
166
+ ]
167
+
168
+ def read_u16() -> int:
169
+ nonlocal pos
170
+ v: int = struct.unpack_from("<H", data, pos)[0]
171
+ pos += 2
172
+ return v
173
+
174
+ def read_str() -> str:
175
+ n = read_u16()
176
+ nonlocal pos
177
+ s = data[pos : pos + n].decode("utf-8")
178
+ pos += n
179
+ return s
180
+
181
+ parsed_run_id = read_str()
182
+ workflow = read_str()
183
+ version = read_str()
184
+
185
+ status_byte = data[pos]
186
+ pos += 1
187
+ status_str = (
188
+ status_names[status_byte]
189
+ if status_byte < len(status_names)
190
+ else f"unknown({status_byte})"
191
+ )
192
+
193
+ current_step = read_str()
194
+
195
+ (input_len,) = struct.unpack_from("<I", data, pos)
196
+ pos += 4
197
+ input_data = data[pos : pos + input_len]
198
+ pos += input_len
199
+
200
+ (created_at,) = struct.unpack_from("<q", data, pos)
201
+ pos += 8
202
+
203
+ result: dict[str, Any] = {
204
+ "run_id": parsed_run_id,
205
+ "workflow": workflow,
206
+ "version": version,
207
+ "status": status_str,
208
+ "current_step": current_step,
209
+ "input": input_data,
210
+ "created_at": created_at,
211
+ }
212
+
213
+ # Optional: started_at
214
+ if pos < len(data) and data[pos] == 1:
215
+ pos += 1
216
+ (started_at,) = struct.unpack_from("<q", data, pos)
217
+ pos += 8
218
+ result["started_at"] = started_at
219
+ elif pos < len(data):
220
+ pos += 1
221
+
222
+ # Optional: completed_at
223
+ if pos < len(data) and data[pos] == 1:
224
+ pos += 1
225
+ (completed_at,) = struct.unpack_from("<q", data, pos)
226
+ pos += 8
227
+ result["completed_at"] = completed_at
228
+ elif pos < len(data):
229
+ pos += 1
230
+
231
+ # Optional: wait_signal
232
+ if pos < len(data) and data[pos] == 1:
233
+ pos += 1
234
+ result["wait_signal"] = read_str()
235
+
236
+ return result
237
+
238
+ async def signal(
239
+ self,
240
+ run_id: str,
241
+ signal_name: str,
242
+ data: str | bytes | None = None,
243
+ options: WorkflowSignalOptions | None = None,
244
+ ) -> None:
245
+ """Send a signal to a running workflow."""
246
+ opts = options or WorkflowSignalOptions()
247
+ namespace = self._client.get_namespace(opts.namespace)
248
+
249
+ sig_bytes = signal_name.encode("utf-8")
250
+ data_bytes = b""
251
+ if data is not None:
252
+ data_bytes = data.encode("utf-8") if isinstance(data, str) else data
253
+
254
+ value = bytearray()
255
+ value.extend(struct.pack("<H", len(sig_bytes)))
256
+ value.extend(sig_bytes)
257
+ value.extend(data_bytes)
258
+
259
+ await self._client._send_and_check(
260
+ OpCode.WORKFLOW_SIGNAL,
261
+ namespace,
262
+ run_id.encode("utf-8"),
263
+ bytes(value),
264
+ )
265
+
266
+ async def cancel(
267
+ self,
268
+ run_id: str,
269
+ reason: str | None = None,
270
+ options: WorkflowCancelOptions | None = None,
271
+ ) -> None:
272
+ """Cancel a running workflow."""
273
+ opts = options or WorkflowCancelOptions()
274
+ namespace = self._client.get_namespace(opts.namespace)
275
+
276
+ value = reason.encode("utf-8") if reason else b""
277
+
278
+ await self._client._send_and_check(
279
+ OpCode.WORKFLOW_CANCEL,
280
+ namespace,
281
+ run_id.encode("utf-8"),
282
+ value,
283
+ )
284
+
285
+ async def history(self, run_id: str, options: WorkflowHistoryOptions | None = None) -> bytes:
286
+ """Get the execution history of a workflow run. Returns raw response bytes."""
287
+ opts = options or WorkflowHistoryOptions()
288
+ namespace = self._client.get_namespace(opts.namespace)
289
+
290
+ value = struct.pack("<I", opts.limit)
291
+
292
+ resp = await self._client._send_and_check(
293
+ OpCode.WORKFLOW_HISTORY,
294
+ namespace,
295
+ run_id.encode("utf-8"),
296
+ value,
297
+ allow_not_found=True,
298
+ )
299
+
300
+ return resp.data
301
+
302
+ async def list_runs(self, options: WorkflowListRunsOptions | None = None) -> bytes:
303
+ """List workflow runs. Returns raw response bytes."""
304
+ opts = options or WorkflowListRunsOptions()
305
+ namespace = self._client.get_namespace(opts.namespace)
306
+
307
+ key = opts.workflow_name.encode("utf-8") if opts.workflow_name else b""
308
+
309
+ # Value: [limit:u32][status_len:u16][status][cursor_len:u16][cursor][search_len:u16][search]
310
+ status_bytes = opts.status_filter.encode("utf-8") if opts.status_filter else b""
311
+ cursor_bytes = opts.cursor if opts.cursor else b""
312
+ search_bytes = b"" # search not exposed yet
313
+ value = bytearray()
314
+ value.extend(struct.pack("<I", opts.limit))
315
+ value.extend(struct.pack("<H", len(status_bytes)))
316
+ value.extend(status_bytes)
317
+ value.extend(struct.pack("<H", len(cursor_bytes)))
318
+ value.extend(cursor_bytes)
319
+ value.extend(struct.pack("<H", len(search_bytes)))
320
+ value.extend(search_bytes)
321
+
322
+ resp = await self._client._send_and_check(
323
+ OpCode.WORKFLOW_LIST_RUNS,
324
+ namespace,
325
+ key,
326
+ bytes(value),
327
+ )
328
+
329
+ return resp.data
330
+
331
+ async def list_definitions(
332
+ self, options: WorkflowListDefinitionsOptions | None = None
333
+ ) -> bytes:
334
+ """List workflow definitions. Returns raw response bytes."""
335
+ opts = options or WorkflowListDefinitionsOptions()
336
+ namespace = self._client.get_namespace(opts.namespace)
337
+
338
+ # Wire format: [limit:u32][cursor...]
339
+ cursor = opts.cursor or b""
340
+ value = struct.pack("<I", opts.limit) + cursor
341
+
342
+ resp = await self._client._send_and_check(
343
+ OpCode.WORKFLOW_LIST_DEFINITIONS,
344
+ namespace,
345
+ b"",
346
+ value,
347
+ )
348
+
349
+ return resp.data
350
+
351
+ async def disable(self, name: str, options: WorkflowDisableOptions | None = None) -> None:
352
+ """Disable a workflow definition (prevents new runs)."""
353
+ opts = options or WorkflowDisableOptions()
354
+ namespace = self._client.get_namespace(opts.namespace)
355
+
356
+ await self._client._send_and_check(
357
+ OpCode.WORKFLOW_DISABLE,
358
+ namespace,
359
+ name.encode("utf-8"),
360
+ b"",
361
+ )
362
+
363
+ async def enable(self, name: str, options: WorkflowEnableOptions | None = None) -> None:
364
+ """Re-enable a disabled workflow definition."""
365
+ opts = options or WorkflowEnableOptions()
366
+ namespace = self._client.get_namespace(opts.namespace)
367
+
368
+ await self._client._send_and_check(
369
+ OpCode.WORKFLOW_ENABLE,
370
+ namespace,
371
+ name.encode("utf-8"),
372
+ b"",
373
+ )
374
+
375
+ # =========================================================================
376
+ # Declarative Sync
377
+ # =========================================================================
378
+
379
+ async def sync(
380
+ self, yaml: str, options: WorkflowSyncOptions | None = None
381
+ ) -> WorkflowSyncResult:
382
+ """Declarative, idempotent sync of a workflow YAML string.
383
+
384
+ Safe to call on every startup. Compares versions:
385
+ - Not found → creates
386
+ - Same version → no-op ("unchanged")
387
+ - Different version → updates (upsert)
388
+ """
389
+ opts = options or WorkflowSyncOptions()
390
+ name, version, description = _extract_workflow_meta(yaml)
391
+ namespace = self._client.get_namespace(opts.namespace)
392
+
393
+ existing = await self.get_definition(
394
+ name, WorkflowGetDefinitionOptions(namespace=namespace)
395
+ )
396
+
397
+ if existing is not None:
398
+ existing_version = _extract_yaml_field(existing, "version")
399
+ if existing_version == version:
400
+ return WorkflowSyncResult(
401
+ name=name,
402
+ version=version,
403
+ description=description,
404
+ action="unchanged",
405
+ )
406
+
407
+ await self.create(name, yaml, WorkflowCreateOptions(namespace=namespace))
408
+
409
+ action = "updated" if existing is not None else "created"
410
+ return WorkflowSyncResult(
411
+ name=name,
412
+ version=version,
413
+ description=description,
414
+ action=action,
415
+ )
416
+
417
+ async def sync_bytes(
418
+ self, yaml: bytes, options: WorkflowSyncOptions | None = None
419
+ ) -> WorkflowSyncResult:
420
+ """Sync raw YAML bytes."""
421
+ return await self.sync(yaml.decode("utf-8"), options)
422
+
423
+ async def sync_dir(
424
+ self, dir_path: str, options: WorkflowSyncOptions | None = None
425
+ ) -> list[WorkflowSyncResult]:
426
+ """Sync all YAML files in a directory."""
427
+ results: list[WorkflowSyncResult] = []
428
+ for entry in sorted(os.listdir(dir_path)):
429
+ if entry.endswith((".yaml", ".yml")):
430
+ file_path = os.path.join(dir_path, entry)
431
+ with open(file_path) as f:
432
+ yaml_content = f.read()
433
+ result = await self.sync(yaml_content, options)
434
+ results.append(result)
435
+ return results
436
+
437
+
438
+ # =============================================================================
439
+ # YAML Metadata Extraction (lightweight — no full parser needed)
440
+ # =============================================================================
441
+
442
+ _YAML_FIELD_RE = re.compile(r"""^\s*['"]?(\w+)['"]?\s*:\s*['"]?([^'"#\n]+?)['"]?\s*(?:#.*)?$""")
443
+
444
+
445
+ def _extract_yaml_field(yaml: str, field: str) -> str | None:
446
+ """Extract a top-level scalar field from YAML."""
447
+ for line in yaml.split("\n"):
448
+ m = _YAML_FIELD_RE.match(line)
449
+ if m and m.group(1) == field:
450
+ return m.group(2).strip()
451
+ return None
452
+
453
+
454
+ def _extract_workflow_meta(yaml: str) -> tuple[str, str, str]:
455
+ """Extract name, version, and description from workflow YAML."""
456
+ name = _extract_yaml_field(yaml, "name")
457
+ version = _extract_yaml_field(yaml, "version")
458
+ description = _extract_yaml_field(yaml, "description") or ""
459
+ if not name:
460
+ raise ValueError("flo: workflow YAML missing required 'name' field")
461
+ if not version:
462
+ raise ValueError("flo: workflow YAML missing required 'version' field")
463
+ return name, version, description
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: flo-python
3
- Version: 0.1.0.dev2
3
+ Version: 0.1.0.dev4
4
4
  Summary: Python SDK for the Flo distributed systems platform
5
5
  Project-URL: Homepage, https://github.com/floruntime/flo-python
6
6
  Project-URL: Documentation, https://github.com/floruntime/flo-python#readme
@@ -321,7 +321,7 @@ for record in result.records:
321
321
  try:
322
322
  process(record.payload)
323
323
  # Acknowledge successful processing
324
- await client.stream.group_ack("events", "processors", [record.offset])
324
+ await client.stream.group_ack("events", "processors", [record.id])
325
325
  except Exception:
326
326
  # Record will be redelivered to another consumer
327
327
  pass
@@ -457,8 +457,8 @@ from flo import FloClient
457
457
 
458
458
  async def run_worker():
459
459
  async with FloClient("localhost:9000", namespace="myapp") as client:
460
- # Create a worker from the client
461
- worker = client.new_worker(concurrency=5)
460
+ # Create an action worker from the client
461
+ worker = client.new_action_worker(concurrency=5)
462
462
 
463
463
  @worker.action("process-image")
464
464
  async def process_image(ctx):
@@ -471,6 +471,31 @@ async def run_worker():
471
471
  asyncio.run(run_worker())
472
472
  ```
473
473
 
474
+ ### StreamWorker Example
475
+
476
+ ```python
477
+ import asyncio
478
+ from flo import FloClient, StreamContext
479
+
480
+ async def process_event(ctx: StreamContext) -> None:
481
+ event = ctx.json()
482
+ print(f"Got event: {event}")
483
+ # Return normally → auto-ack
484
+ # Raise an exception → auto-nack (redelivery)
485
+
486
+ async def run_stream_worker():
487
+ async with FloClient("localhost:9000", namespace="myapp") as client:
488
+ worker = client.new_stream_worker(
489
+ stream="events",
490
+ group="processors",
491
+ handler=process_event,
492
+ concurrency=5,
493
+ )
494
+ await worker.start()
495
+
496
+ asyncio.run(run_stream_worker())
497
+ ```
498
+
474
499
  ## Configuration
475
500
 
476
501
  ### Client Options
@@ -0,0 +1,16 @@
1
+ flo/__init__.py,sha256=7nQrfC6ge__E4scjMx5x-VwAiAKhsiRL-JEjC7cf0ZA,7039
2
+ flo/actions.py,sha256=b4cd5AAe6TRAU-_UZmenbek0h--T-C_PiiDFCHyYDTs,12258
3
+ flo/client.py,sha256=hS64RG4WA9qK_ISdIqDxmV-7DDTn3pAYvYGFRxOjq7U,15059
4
+ flo/exceptions.py,sha256=-3HUWvRGLe_4mSWxwZXk8CMpcfrRjFv0rRkTzu4kqP4,5868
5
+ flo/kv.py,sha256=2PgvoZylU8Clsx8yaYdTLzdCZmD8nCZgtrmJHhAAZlM,7344
6
+ flo/processing.py,sha256=_jOQZiSgyArOPOdnoARNy9x6QgzcYvt1R0wa0vLQfoc,10530
7
+ flo/queue.py,sha256=Nn6CkgUkbXgUuH9BOCWlRkjxTZLKRs0_iMyRmwkYK6k,10888
8
+ flo/streams.py,sha256=G_27-_cq6hru712olwImd4Yxuo8x4l8XuKljUk0Y1JA,12965
9
+ flo/types.py,sha256=g8yikYiiYxsjkUlSCZxKpLMdBv7jg-cWAGO-cnEnaEo,27808
10
+ flo/wire.py,sha256=TLCMXeA8VnUTzyyORM7G3zrsSZhyZymSGxU9jhCxmmI,29373
11
+ flo/worker.py,sha256=flSLz7FIV_zmvLhyHjR39KqUXk5cr8MQVHFkswHv78o,35395
12
+ flo/workflows.py,sha256=8Z5mhq2UOAnjV_Ef261mcWlj4Z5yGCwrFbxjey0ICoU,15050
13
+ flo_python-0.1.0.dev4.dist-info/METADATA,sha256=oufJNR5tLKj7oUBiAoMsXx6eYzD7avnehWUAmC5o3ZA,14638
14
+ flo_python-0.1.0.dev4.dist-info/WHEEL,sha256=QccIxa26bgl1E6uMy58deGWi-0aeIkkangHcxk2kWfw,87
15
+ flo_python-0.1.0.dev4.dist-info/licenses/LICENSE,sha256=mADRSuVhPxFNpZiy7zO0gBjGrNO24OtWO9VCX6wmCVU,1065
16
+ flo_python-0.1.0.dev4.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: hatchling 1.28.0
2
+ Generator: hatchling 1.29.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
@@ -1,14 +0,0 @@
1
- flo/__init__.py,sha256=NRNS5KlrSrXSqO7sG3f8AN42zIWi27hgyAPaRtWY_XA,5046
2
- flo/actions.py,sha256=wKxRYrAa7DkqVT7WTjiWvwbC5XtKlLND-ZPGblHUmKA,11358
3
- flo/client.py,sha256=cRPKhKTcx9doPwiKhvzM9PTWdYWGlZ1qAqK8O-H8Evg,10425
4
- flo/exceptions.py,sha256=awUWxRqXxKc53GK7uEg23oqt2cb5iSZcnNHgXquOu4U,5306
5
- flo/kv.py,sha256=nmdggm9x-5FCjkf5tHJ4-LdENOMDCynvX6Zba4yhl2I,7244
6
- flo/queue.py,sha256=Nn6CkgUkbXgUuH9BOCWlRkjxTZLKRs0_iMyRmwkYK6k,10888
7
- flo/streams.py,sha256=VYtX-lh3GggTsqrmgxOVCNMhmHWxtuSZ_2TanBVWUzw,13005
8
- flo/types.py,sha256=3PqUI1z5q1p5BQAmz9W453cU_OyTESPjRFS7uAa69zQ,21581
9
- flo/wire.py,sha256=392ws6QO8K_lioq80shIZUKZTqRutu28sLGdnnTr6MA,27000
10
- flo/worker.py,sha256=U51Y3XzLluFnYb4M-kBLvSssGitfrbyXkF3TE0Z4p_0,13531
11
- flo_python-0.1.0.dev2.dist-info/METADATA,sha256=ujnF7wkaJlazQD8d_hk1BGBFfjMzsW8y6c6nDkdjoUU,13989
12
- flo_python-0.1.0.dev2.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
13
- flo_python-0.1.0.dev2.dist-info/licenses/LICENSE,sha256=mADRSuVhPxFNpZiy7zO0gBjGrNO24OtWO9VCX6wmCVU,1065
14
- flo_python-0.1.0.dev2.dist-info/RECORD,,