streamator 0.1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,93 @@
1
+ Metadata-Version: 2.4
2
+ Name: streamator
3
+ Version: 0.1.1
4
+ Summary: Backend logging primitive for long-running async jobs
5
+ Author-email: Arved Klöhn <arved.kloehn@gmail.com>
6
+ License: MIT
7
+ Project-URL: Homepage, https://github.com/Redundando/streamator
8
+ Project-URL: Repository, https://github.com/Redundando/streamator
9
+ Keywords: streaming,logging,sse,fastapi,async
10
+ Classifier: Programming Language :: Python :: 3
11
+ Classifier: Framework :: FastAPI
12
+ Classifier: Intended Audience :: Developers
13
+ Requires-Python: >=3.10
14
+ Description-Content-Type: text/markdown
15
+ Provides-Extra: dynamo
16
+ Requires-Dist: dynamorator>=0.1.6; extra == "dynamo"
17
+ Provides-Extra: fastapi
18
+ Requires-Dist: fastapi>=0.133.1; extra == "fastapi"
19
+
20
+ # streamator
21
+
22
+ Backend logging primitive for long-running async jobs. Stream timestamped log messages
23
+ to a browser UI via SSE or polling — no WebSockets, no custom infrastructure.
24
+
25
+ Pairs with [`streamator-react`](https://www.npmjs.com/package/streamator-react) on the frontend.
26
+
27
+ ## Install
28
+
29
+ ```bash
30
+ pip install streamator # core only (in-memory)
31
+ pip install streamator[fastapi] # + FastAPI route helpers
32
+ pip install streamator[dynamo] # + DynamoDB backend
33
+ pip install streamator[fastapi,dynamo] # everything
34
+ ```
35
+
36
+ ## Usage
37
+
38
+ ```python
39
+ from streamator import JobLogger
40
+
41
+ async def my_job(logger: JobLogger):
42
+ logger.log("Starting…")
43
+ await do_work()
44
+ logger.log("Done", level="success")
45
+ logger.close()
46
+
47
+ @router.post("/start")
48
+ async def start():
49
+ logger = JobLogger()
50
+ asyncio.create_task(my_job(logger))
51
+ return {"log_job_id": logger.job_id}
52
+ ```
53
+
54
+ Or use the context manager — `close()` is called automatically, even on exception:
55
+
56
+ ```python
57
+ async with JobLogger() as logger:
58
+ logger.log("Starting…")
59
+ await do_work()
60
+ logger.log("Done", level="success")
61
+ ```
62
+
63
+ ## FastAPI integration
64
+
65
+ ```python
66
+ from streamator.fastapi import add_log_routes
67
+
68
+ add_log_routes(app, prefix="/log")
69
+ # GET /log/{job_id}/stream → SSE stream
70
+ # GET /log/{job_id} → { logs: [...] } snapshot
71
+ ```
72
+
73
+ ## Storage backends
74
+
75
+ **Memory** (default) — `asyncio.Queue`, SSE push, zero dependencies, single-process only.
76
+
77
+ **DynamoDB** — persists across restarts, readable by polling, distributed-friendly.
78
+
79
+ ```python
80
+ logger = JobLogger(store="dynamo", table="my-logs-table", ttl_days=1)
81
+ ```
82
+
83
+ ## Log levels
84
+
85
+ `"info"` (default) · `"success"` · `"warning"` · `"error"`
86
+
87
+ ## Log entry format
88
+
89
+ ```python
90
+ {"event": "log", "message": "...", "level": "info", "t": 1.23}
91
+ ```
92
+
93
+ `t` is seconds since the logger was created, computed at the moment `log()` is called.
@@ -0,0 +1,74 @@
1
+ # streamator
2
+
3
+ Backend logging primitive for long-running async jobs. Stream timestamped log messages
4
+ to a browser UI via SSE or polling — no WebSockets, no custom infrastructure.
5
+
6
+ Pairs with [`streamator-react`](https://www.npmjs.com/package/streamator-react) on the frontend.
7
+
8
+ ## Install
9
+
10
+ ```bash
11
+ pip install streamator # core only (in-memory)
12
+ pip install streamator[fastapi] # + FastAPI route helpers
13
+ pip install streamator[dynamo] # + DynamoDB backend
14
+ pip install streamator[fastapi,dynamo] # everything
15
+ ```
16
+
17
+ ## Usage
18
+
19
+ ```python
20
+ from streamator import JobLogger
21
+
22
+ async def my_job(logger: JobLogger):
23
+ logger.log("Starting…")
24
+ await do_work()
25
+ logger.log("Done", level="success")
26
+ logger.close()
27
+
28
+ @router.post("/start")
29
+ async def start():
30
+ logger = JobLogger()
31
+ asyncio.create_task(my_job(logger))
32
+ return {"log_job_id": logger.job_id}
33
+ ```
34
+
35
+ Or use the context manager — `close()` is called automatically, even on exception:
36
+
37
+ ```python
38
+ async with JobLogger() as logger:
39
+ logger.log("Starting…")
40
+ await do_work()
41
+ logger.log("Done", level="success")
42
+ ```
43
+
44
+ ## FastAPI integration
45
+
46
+ ```python
47
+ from streamator.fastapi import add_log_routes
48
+
49
+ add_log_routes(app, prefix="/log")
50
+ # GET /log/{job_id}/stream → SSE stream
51
+ # GET /log/{job_id} → { logs: [...] } snapshot
52
+ ```
53
+
54
+ ## Storage backends
55
+
56
+ **Memory** (default) — `asyncio.Queue`, SSE push, zero dependencies, single-process only.
57
+
58
+ **DynamoDB** — persists across restarts, readable by polling, distributed-friendly.
59
+
60
+ ```python
61
+ logger = JobLogger(store="dynamo", table="my-logs-table", ttl_days=1)
62
+ ```
63
+
64
+ ## Log levels
65
+
66
+ `"info"` (default) · `"success"` · `"warning"` · `"error"`
67
+
68
+ ## Log entry format
69
+
70
+ ```python
71
+ {"event": "log", "message": "...", "level": "info", "t": 1.23}
72
+ ```
73
+
74
+ `t` is seconds since the logger was created, computed at the moment `log()` is called.
@@ -0,0 +1,30 @@
1
+ [build-system]
2
+ requires = ["setuptools>=68"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "streamator"
7
+ version = "0.1.1"
8
+ description = "Backend logging primitive for long-running async jobs"
9
+ readme = "README.md"
10
+ license = { text = "MIT" }
11
+ authors = [{ name = "Arved Klöhn", email = "arved.kloehn@gmail.com" }]
12
+ keywords = ["streaming", "logging", "sse", "fastapi", "async"]
13
+ classifiers = [
14
+ "Programming Language :: Python :: 3",
15
+ "Framework :: FastAPI",
16
+ "Intended Audience :: Developers",
17
+ ]
18
+ requires-python = ">=3.10"
19
+ dependencies = []
20
+
21
+ [project.urls]
22
+ Homepage = "https://github.com/Redundando/streamator"
23
+ Repository = "https://github.com/Redundando/streamator"
24
+
25
+ [project.optional-dependencies]
26
+ dynamo = ["dynamorator>=0.1.6"]
27
+ fastapi = ["fastapi>=0.133.1"]
28
+
29
+ [tool.pytest.ini_options]
30
+ asyncio_mode = "auto"
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,3 @@
1
+ from .logger import JobLogger
2
+
3
+ __all__ = ["JobLogger"]
@@ -0,0 +1,45 @@
1
+ import json
2
+
3
+ from fastapi import FastAPI
4
+ from fastapi.responses import StreamingResponse, JSONResponse
5
+
6
+ from .logger import JobLogger
7
+
8
+ _loggers: dict[str, JobLogger] = {}
9
+
10
+
11
+ def _get(job_id: str) -> JobLogger | None:
12
+ return _loggers.get(job_id)
13
+
14
+
15
+ async def _sse_generator(logger: JobLogger):
16
+ async for entry in logger._store.stream():
17
+ yield f"data: {json.dumps(entry)}\n\n"
18
+ yield f"data: {json.dumps({'event': 'done'})}\n\n"
19
+ _loggers.pop(logger.job_id, None)
20
+
21
+
22
+ def make_stream_response(job_id: str) -> StreamingResponse:
23
+ logger = _get(job_id)
24
+ if logger is None:
25
+ return JSONResponse({"error": "not found"}, status_code=404)
26
+ return StreamingResponse(
27
+ _sse_generator(logger),
28
+ media_type="text/event-stream",
29
+ )
30
+
31
+
32
+ def add_log_routes(app: FastAPI, prefix: str = "/log"):
33
+ @app.get(f"{prefix}/{{job_id}}/stream")
34
+ async def stream(job_id: str):
35
+ return make_stream_response(job_id)
36
+
37
+ @app.get(f"{prefix}/{{job_id}}")
38
+ async def snapshot(job_id: str):
39
+ logger = _get(job_id)
40
+ if logger is None:
41
+ return JSONResponse({"error": "not found"}, status_code=404)
42
+ logs = logger._store.snapshot()
43
+ if logger._store._closed:
44
+ logs = logs + [{"event": "done"}]
45
+ return {"logs": logs}
@@ -0,0 +1,45 @@
1
+ import time
2
+ import uuid
3
+
4
+ from .store import MemoryStore, DynamoStore
5
+
6
+ class JobLogger:
7
+ def __init__(self, store="memory", **kwargs):
8
+ self.job_id = str(uuid.uuid4())
9
+ self.start_t = time.monotonic()
10
+
11
+ if store == "memory":
12
+ self._store = MemoryStore()
13
+ elif store == "dynamo":
14
+ self._store = DynamoStore(
15
+ table=kwargs["table"],
16
+ ttl_days=kwargs.get("ttl_days", 7),
17
+ )
18
+ self._store.set_key(self.job_id)
19
+ else:
20
+ raise ValueError(f"Unknown store: {store!r}")
21
+
22
+ try:
23
+ from . import fastapi as _fa
24
+ _fa._loggers[self.job_id] = self
25
+ except ImportError:
26
+ pass
27
+
28
+ def log(self, message: str, level: str = "info"):
29
+ entry = {
30
+ "event": "log",
31
+ "message": message,
32
+ "level": level,
33
+ "t": round(time.monotonic() - self.start_t, 3),
34
+ }
35
+ self._store.append(entry)
36
+
37
+ def close(self):
38
+ self._store.close()
39
+
40
+ async def __aenter__(self):
41
+ return self
42
+
43
+ async def __aexit__(self, *_):
44
+ self.close()
45
+
@@ -0,0 +1,50 @@
1
+ import asyncio
2
+
3
+
4
+ class MemoryStore:
5
+ def __init__(self):
6
+ self._queue = asyncio.Queue()
7
+ self._log = []
8
+ self._closed = False
9
+
10
+ def append(self, entry: dict):
11
+ self._log.append(entry)
12
+ self._queue.put_nowait(entry)
13
+
14
+ async def stream(self):
15
+ while True:
16
+ entry = await self._queue.get()
17
+ if entry is None:
18
+ break
19
+ yield entry
20
+
21
+ def snapshot(self) -> list:
22
+ return list(self._log)
23
+
24
+ def close(self):
25
+ self._closed = True
26
+ self._queue.put_nowait(None)
27
+
28
+
29
+ class DynamoStore:
30
+ def __init__(self, table: str, ttl_days: float = 7):
31
+ from dynamorator import DynamoDBStore
32
+ self._store = DynamoDBStore(table_name=table)
33
+ self._ttl_days = ttl_days
34
+ self._key = None
35
+ self._closed = False
36
+
37
+ def set_key(self, job_id: str):
38
+ self._key = job_id
39
+
40
+ def append(self, entry: dict):
41
+ existing = self._store.get(self._key) or {"logs": []}
42
+ existing["logs"].append(entry)
43
+ self._store.put(self._key, existing, ttl_days=self._ttl_days)
44
+
45
+ def close(self):
46
+ self._closed = True
47
+
48
+ def snapshot(self) -> list:
49
+ data = self._store.get(self._key)
50
+ return data["logs"] if data else []
@@ -0,0 +1,93 @@
1
+ Metadata-Version: 2.4
2
+ Name: streamator
3
+ Version: 0.1.1
4
+ Summary: Backend logging primitive for long-running async jobs
5
+ Author-email: Arved Klöhn <arved.kloehn@gmail.com>
6
+ License: MIT
7
+ Project-URL: Homepage, https://github.com/Redundando/streamator
8
+ Project-URL: Repository, https://github.com/Redundando/streamator
9
+ Keywords: streaming,logging,sse,fastapi,async
10
+ Classifier: Programming Language :: Python :: 3
11
+ Classifier: Framework :: FastAPI
12
+ Classifier: Intended Audience :: Developers
13
+ Requires-Python: >=3.10
14
+ Description-Content-Type: text/markdown
15
+ Provides-Extra: dynamo
16
+ Requires-Dist: dynamorator>=0.1.6; extra == "dynamo"
17
+ Provides-Extra: fastapi
18
+ Requires-Dist: fastapi>=0.133.1; extra == "fastapi"
19
+
20
+ # streamator
21
+
22
+ Backend logging primitive for long-running async jobs. Stream timestamped log messages
23
+ to a browser UI via SSE or polling — no WebSockets, no custom infrastructure.
24
+
25
+ Pairs with [`streamator-react`](https://www.npmjs.com/package/streamator-react) on the frontend.
26
+
27
+ ## Install
28
+
29
+ ```bash
30
+ pip install streamator # core only (in-memory)
31
+ pip install streamator[fastapi] # + FastAPI route helpers
32
+ pip install streamator[dynamo] # + DynamoDB backend
33
+ pip install streamator[fastapi,dynamo] # everything
34
+ ```
35
+
36
+ ## Usage
37
+
38
+ ```python
39
+ from streamator import JobLogger
40
+
41
+ async def my_job(logger: JobLogger):
42
+ logger.log("Starting…")
43
+ await do_work()
44
+ logger.log("Done", level="success")
45
+ logger.close()
46
+
47
+ @router.post("/start")
48
+ async def start():
49
+ logger = JobLogger()
50
+ asyncio.create_task(my_job(logger))
51
+ return {"log_job_id": logger.job_id}
52
+ ```
53
+
54
+ Or use the context manager — `close()` is called automatically, even on exception:
55
+
56
+ ```python
57
+ async with JobLogger() as logger:
58
+ logger.log("Starting…")
59
+ await do_work()
60
+ logger.log("Done", level="success")
61
+ ```
62
+
63
+ ## FastAPI integration
64
+
65
+ ```python
66
+ from streamator.fastapi import add_log_routes
67
+
68
+ add_log_routes(app, prefix="/log")
69
+ # GET /log/{job_id}/stream → SSE stream
70
+ # GET /log/{job_id} → { logs: [...] } snapshot
71
+ ```
72
+
73
+ ## Storage backends
74
+
75
+ **Memory** (default) — `asyncio.Queue`, SSE push, zero dependencies, single-process only.
76
+
77
+ **DynamoDB** — persists across restarts, readable by polling, distributed-friendly.
78
+
79
+ ```python
80
+ logger = JobLogger(store="dynamo", table="my-logs-table", ttl_days=1)
81
+ ```
82
+
83
+ ## Log levels
84
+
85
+ `"info"` (default) · `"success"` · `"warning"` · `"error"`
86
+
87
+ ## Log entry format
88
+
89
+ ```python
90
+ {"event": "log", "message": "...", "level": "info", "t": 1.23}
91
+ ```
92
+
93
+ `t` is seconds since the logger was created, computed at the moment `log()` is called.
@@ -0,0 +1,12 @@
1
+ README.md
2
+ pyproject.toml
3
+ streamator/__init__.py
4
+ streamator/fastapi.py
5
+ streamator/logger.py
6
+ streamator/store.py
7
+ streamator.egg-info/PKG-INFO
8
+ streamator.egg-info/SOURCES.txt
9
+ streamator.egg-info/dependency_links.txt
10
+ streamator.egg-info/requires.txt
11
+ streamator.egg-info/top_level.txt
12
+ tests/test_backend.py
@@ -0,0 +1,6 @@
1
+
2
+ [dynamo]
3
+ dynamorator>=0.1.6
4
+
5
+ [fastapi]
6
+ fastapi>=0.133.1
@@ -0,0 +1 @@
1
+ streamator
@@ -0,0 +1,99 @@
1
+ import pytest
2
+ from fastapi import FastAPI
3
+ from fastapi.testclient import TestClient
4
+
5
+ from streamator import JobLogger
6
+ from streamator import fastapi as sf
7
+
8
+
9
+ @pytest.fixture(autouse=True)
10
+ def clear_registry():
11
+ sf._loggers.clear()
12
+ yield
13
+ sf._loggers.clear()
14
+
15
+
16
+ # --- JobLogger ---
17
+
18
+ def test_job_id_is_unique():
19
+ a, b = JobLogger(), JobLogger()
20
+ assert a.job_id != b.job_id
21
+
22
+
23
+ def test_log_entry_shape():
24
+ logger = JobLogger()
25
+ logger.log("hello")
26
+ entries = logger._store.snapshot()
27
+ assert len(entries) == 1
28
+ e = entries[0]
29
+ assert e["event"] == "log"
30
+ assert e["message"] == "hello"
31
+ assert e["level"] == "info"
32
+ assert isinstance(e["t"], float)
33
+
34
+
35
+ def test_log_levels():
36
+ logger = JobLogger()
37
+ for level in ("info", "success", "warning", "error"):
38
+ logger.log("msg", level=level)
39
+ levels = [e["level"] for e in logger._store.snapshot()]
40
+ assert levels == ["info", "success", "warning", "error"]
41
+
42
+
43
+ # --- MemoryStore streaming ---
44
+
45
+ async def test_memory_store_stream():
46
+ logger = JobLogger()
47
+ logger.log("a")
48
+ logger.log("b")
49
+ logger.close()
50
+
51
+ received = []
52
+ async for entry in logger._store.stream():
53
+ received.append(entry["message"])
54
+
55
+ assert received == ["a", "b"]
56
+
57
+
58
+ # --- FastAPI routes ---
59
+
60
+ @pytest.fixture
61
+ def app_client():
62
+ app = FastAPI()
63
+ sf.add_log_routes(app)
64
+ return TestClient(app)
65
+
66
+
67
+ def test_snapshot_route(app_client):
68
+ logger = JobLogger()
69
+ logger.log("step 1")
70
+ logger.log("step 2")
71
+
72
+ resp = app_client.get(f"/log/{logger.job_id}")
73
+ assert resp.status_code == 200
74
+ logs = resp.json()["logs"]
75
+ assert len(logs) == 2
76
+ assert logs[0]["message"] == "step 1"
77
+
78
+
79
+ def test_snapshot_not_found(app_client):
80
+ resp = app_client.get("/log/nonexistent")
81
+ assert resp.status_code == 404
82
+
83
+
84
+ def test_stream_route(app_client):
85
+ logger = JobLogger()
86
+ logger.log("streamed")
87
+ logger.close()
88
+
89
+ with app_client.stream("GET", f"/log/{logger.job_id}/stream") as resp:
90
+ assert resp.status_code == 200
91
+ chunks = list(resp.iter_lines())
92
+
93
+ data_lines = [l for l in chunks if l.startswith("data:")]
94
+ assert any("streamed" in l for l in data_lines)
95
+
96
+
97
+ def test_stream_not_found(app_client):
98
+ resp = app_client.get("/log/nonexistent/stream")
99
+ assert resp.status_code == 404