lumera 0.10.0__tar.gz → 0.10.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {lumera-0.10.0 → lumera-0.10.1}/PKG-INFO +1 -1
- {lumera-0.10.0 → lumera-0.10.1}/lumera/automations.py +93 -11
- {lumera-0.10.0 → lumera-0.10.1}/lumera.egg-info/PKG-INFO +1 -1
- {lumera-0.10.0 → lumera-0.10.1}/pyproject.toml +1 -1
- {lumera-0.10.0 → lumera-0.10.1}/lumera/__init__.py +0 -0
- {lumera-0.10.0 → lumera-0.10.1}/lumera/_utils.py +0 -0
- {lumera-0.10.0 → lumera-0.10.1}/lumera/email.py +0 -0
- {lumera-0.10.0 → lumera-0.10.1}/lumera/exceptions.py +0 -0
- {lumera-0.10.0 → lumera-0.10.1}/lumera/files.py +0 -0
- {lumera-0.10.0 → lumera-0.10.1}/lumera/google.py +0 -0
- {lumera-0.10.0 → lumera-0.10.1}/lumera/integrations/__init__.py +0 -0
- {lumera-0.10.0 → lumera-0.10.1}/lumera/integrations/google.py +0 -0
- {lumera-0.10.0 → lumera-0.10.1}/lumera/llm.py +0 -0
- {lumera-0.10.0 → lumera-0.10.1}/lumera/locks.py +0 -0
- {lumera-0.10.0 → lumera-0.10.1}/lumera/pb.py +0 -0
- {lumera-0.10.0 → lumera-0.10.1}/lumera/sdk.py +0 -0
- {lumera-0.10.0 → lumera-0.10.1}/lumera/storage.py +0 -0
- {lumera-0.10.0 → lumera-0.10.1}/lumera/webhooks.py +0 -0
- {lumera-0.10.0 → lumera-0.10.1}/lumera.egg-info/SOURCES.txt +0 -0
- {lumera-0.10.0 → lumera-0.10.1}/lumera.egg-info/dependency_links.txt +0 -0
- {lumera-0.10.0 → lumera-0.10.1}/lumera.egg-info/requires.txt +0 -0
- {lumera-0.10.0 → lumera-0.10.1}/lumera.egg-info/top_level.txt +0 -0
- {lumera-0.10.0 → lumera-0.10.1}/setup.cfg +0 -0
- {lumera-0.10.0 → lumera-0.10.1}/tests/test_sdk.py +0 -0
|
@@ -64,6 +64,7 @@ __all__ = [
|
|
|
64
64
|
# Classes
|
|
65
65
|
"Run",
|
|
66
66
|
"Automation",
|
|
67
|
+
"LogEntry",
|
|
67
68
|
"LogsResponse",
|
|
68
69
|
]
|
|
69
70
|
|
|
@@ -71,6 +72,67 @@ from ._utils import LumeraAPIError, _api_request
|
|
|
71
72
|
from .sdk import get_automation_run as _get_automation_run
|
|
72
73
|
from .sdk import run_automation as _run_automation
|
|
73
74
|
|
|
75
|
+
# ============================================================================
|
|
76
|
+
# LogEntry Class
|
|
77
|
+
# ============================================================================
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
class LogEntry:
|
|
81
|
+
"""A single log entry from an automation run.
|
|
82
|
+
|
|
83
|
+
Attributes:
|
|
84
|
+
content: The log message content.
|
|
85
|
+
type: Log type ("stream_stdout", "stream_stderr", "warning", "image_png", "image_jpeg").
|
|
86
|
+
timestamp: ISO timestamp when the log was emitted.
|
|
87
|
+
error: True if this is an error entry.
|
|
88
|
+
"""
|
|
89
|
+
|
|
90
|
+
def __init__(self, data: dict[str, Any]) -> None:
|
|
91
|
+
self._data = data
|
|
92
|
+
|
|
93
|
+
@property
|
|
94
|
+
def content(self) -> str:
|
|
95
|
+
return self._data.get("content", "")
|
|
96
|
+
|
|
97
|
+
@property
|
|
98
|
+
def type(self) -> str:
|
|
99
|
+
return self._data.get("type", "")
|
|
100
|
+
|
|
101
|
+
@property
|
|
102
|
+
def timestamp(self) -> str | None:
|
|
103
|
+
return self._data.get("timestamp")
|
|
104
|
+
|
|
105
|
+
@property
|
|
106
|
+
def error(self) -> bool:
|
|
107
|
+
return self._data.get("error", False)
|
|
108
|
+
|
|
109
|
+
@property
|
|
110
|
+
def is_image(self) -> bool:
|
|
111
|
+
"""True if this entry contains image data (base64 encoded in content)."""
|
|
112
|
+
return self.type in ("image_png", "image_jpeg")
|
|
113
|
+
|
|
114
|
+
def __repr__(self) -> str:
|
|
115
|
+
preview = self.content[:50] + "..." if len(self.content) > 50 else self.content
|
|
116
|
+
return f"LogEntry(type={self.type!r}, content={preview!r})"
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def _parse_ndjson_entries(data: str) -> list[LogEntry]:
|
|
120
|
+
"""Parse NDJSON log data into LogEntry objects."""
|
|
121
|
+
entries = []
|
|
122
|
+
for line in data.splitlines():
|
|
123
|
+
line = line.strip()
|
|
124
|
+
if not line:
|
|
125
|
+
continue
|
|
126
|
+
try:
|
|
127
|
+
parsed = json.loads(line)
|
|
128
|
+
if isinstance(parsed, dict):
|
|
129
|
+
entries.append(LogEntry(parsed))
|
|
130
|
+
except (json.JSONDecodeError, ValueError):
|
|
131
|
+
# Skip malformed lines or lines with huge numbers
|
|
132
|
+
pass
|
|
133
|
+
return entries
|
|
134
|
+
|
|
135
|
+
|
|
74
136
|
# ============================================================================
|
|
75
137
|
# LogsResponse Class
|
|
76
138
|
# ============================================================================
|
|
@@ -81,6 +143,7 @@ class LogsResponse:
|
|
|
81
143
|
|
|
82
144
|
Attributes:
|
|
83
145
|
data: Raw log content as a string (NDJSON format).
|
|
146
|
+
entries: Parsed log entries as LogEntry objects.
|
|
84
147
|
offset: Byte offset where this chunk starts.
|
|
85
148
|
size: Number of bytes in this chunk.
|
|
86
149
|
total_size: Total size of the log file.
|
|
@@ -91,11 +154,20 @@ class LogsResponse:
|
|
|
91
154
|
|
|
92
155
|
def __init__(self, data: dict[str, Any]) -> None:
|
|
93
156
|
self._data = data
|
|
157
|
+
self._entries: list[LogEntry] | None = None
|
|
94
158
|
|
|
95
159
|
@property
|
|
96
160
|
def data(self) -> str:
|
|
161
|
+
"""Raw NDJSON log content."""
|
|
97
162
|
return self._data.get("data", "")
|
|
98
163
|
|
|
164
|
+
@property
|
|
165
|
+
def entries(self) -> list[LogEntry]:
|
|
166
|
+
"""Parsed log entries. Lazily parsed from NDJSON data."""
|
|
167
|
+
if self._entries is None:
|
|
168
|
+
self._entries = _parse_ndjson_entries(self.data)
|
|
169
|
+
return self._entries
|
|
170
|
+
|
|
99
171
|
@property
|
|
100
172
|
def offset(self) -> int:
|
|
101
173
|
return self._data.get("offset", 0)
|
|
@@ -342,7 +414,7 @@ class Run:
|
|
|
342
414
|
raise ValueError("Cannot fetch logs without run id")
|
|
343
415
|
return get_logs(self.id, offset=offset, limit=limit, all=all)
|
|
344
416
|
|
|
345
|
-
def stream_logs(self, *, timeout: float = 30) -> Iterator[
|
|
417
|
+
def stream_logs(self, *, timeout: float = 30) -> Iterator[LogEntry]:
|
|
346
418
|
"""Stream logs from this run.
|
|
347
419
|
|
|
348
420
|
Works for both live (running) and archived (completed) runs.
|
|
@@ -353,12 +425,12 @@ class Run:
|
|
|
353
425
|
timeout: HTTP connection timeout in seconds.
|
|
354
426
|
|
|
355
427
|
Yields:
|
|
356
|
-
|
|
428
|
+
LogEntry objects with content, type, timestamp, and error fields.
|
|
357
429
|
|
|
358
430
|
Example:
|
|
359
431
|
>>> run = automations.run("automation_id", inputs={})
|
|
360
|
-
>>> for
|
|
361
|
-
... print(
|
|
432
|
+
>>> for entry in run.stream_logs():
|
|
433
|
+
... print(f"[{entry.type}] {entry.content}")
|
|
362
434
|
"""
|
|
363
435
|
if not self.id:
|
|
364
436
|
raise ValueError("Cannot stream logs without run id")
|
|
@@ -912,11 +984,11 @@ def delete(automation_id: str) -> None:
|
|
|
912
984
|
# ============================================================================
|
|
913
985
|
|
|
914
986
|
|
|
915
|
-
def stream_logs(run_id: str, *, timeout: float = 30) -> Iterator[
|
|
987
|
+
def stream_logs(run_id: str, *, timeout: float = 30) -> Iterator[LogEntry]:
|
|
916
988
|
"""Stream logs from an automation run.
|
|
917
989
|
|
|
918
990
|
Works for both live (running) and archived (completed) runs.
|
|
919
|
-
Connects to the server-sent events endpoint and yields
|
|
991
|
+
Connects to the server-sent events endpoint and yields LogEntry objects
|
|
920
992
|
as they arrive. For live runs, streams in real-time. For archived
|
|
921
993
|
runs, streams the entire log from storage.
|
|
922
994
|
|
|
@@ -925,11 +997,11 @@ def stream_logs(run_id: str, *, timeout: float = 30) -> Iterator[str]:
|
|
|
925
997
|
timeout: HTTP connection timeout in seconds.
|
|
926
998
|
|
|
927
999
|
Yields:
|
|
928
|
-
|
|
1000
|
+
LogEntry objects with content, type, timestamp, and error fields.
|
|
929
1001
|
|
|
930
1002
|
Example:
|
|
931
|
-
>>> for
|
|
932
|
-
... print(
|
|
1003
|
+
>>> for entry in automations.stream_logs("run_id"):
|
|
1004
|
+
... print(f"[{entry.type}] {entry.content}")
|
|
933
1005
|
"""
|
|
934
1006
|
import base64
|
|
935
1007
|
import os
|
|
@@ -971,10 +1043,20 @@ def stream_logs(run_id: str, *, timeout: float = 30) -> Iterator[str]:
|
|
|
971
1043
|
try:
|
|
972
1044
|
data = json.loads(current_data)
|
|
973
1045
|
if "data" in data:
|
|
974
|
-
# Data is base64-encoded
|
|
1046
|
+
# Data is base64-encoded NDJSON
|
|
975
1047
|
raw = base64.b64decode(data["data"])
|
|
976
1048
|
decoded = raw.decode("utf-8", errors="replace")
|
|
977
|
-
|
|
1049
|
+
for ndjson_line in decoded.splitlines():
|
|
1050
|
+
ndjson_line = ndjson_line.strip()
|
|
1051
|
+
if not ndjson_line:
|
|
1052
|
+
continue
|
|
1053
|
+
try:
|
|
1054
|
+
entry_data = json.loads(ndjson_line)
|
|
1055
|
+
if isinstance(entry_data, dict):
|
|
1056
|
+
yield LogEntry(entry_data)
|
|
1057
|
+
except (json.JSONDecodeError, ValueError):
|
|
1058
|
+
# Skip malformed lines or lines with huge numbers
|
|
1059
|
+
pass
|
|
978
1060
|
except (json.JSONDecodeError, KeyError):
|
|
979
1061
|
pass
|
|
980
1062
|
elif current_event == "complete":
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|