glacis 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- glacis/__init__.py +88 -0
- glacis/__main__.py +76 -0
- glacis/client.py +847 -0
- glacis/crypto.py +121 -0
- glacis/integrations/__init__.py +12 -0
- glacis/integrations/anthropic.py +222 -0
- glacis/integrations/openai.py +208 -0
- glacis/models.py +293 -0
- glacis/storage.py +331 -0
- glacis/streaming.py +363 -0
- glacis/wasm/s3p_core_wasi.wasm +0 -0
- glacis/wasm_runtime.py +519 -0
- glacis-0.1.0.dist-info/METADATA +324 -0
- glacis-0.1.0.dist-info/RECORD +16 -0
- glacis-0.1.0.dist-info/WHEEL +4 -0
- glacis-0.1.0.dist-info/licenses/LICENSE +190 -0
glacis/streaming.py
ADDED
|
@@ -0,0 +1,363 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Streaming SDK Extensions for Python
|
|
3
|
+
|
|
4
|
+
Provides streaming session support for real-time AI interactions
|
|
5
|
+
(voice, healthcare, etc.) where chunks are attested individually.
|
|
6
|
+
|
|
7
|
+
Example:
|
|
8
|
+
>>> from glacis import Glacis
|
|
9
|
+
>>> from glacis.streaming import StreamingSession
|
|
10
|
+
>>>
|
|
11
|
+
>>> glacis = Glacis(api_key="glsk_live_xxx")
|
|
12
|
+
>>> session = await StreamingSession.start(glacis, {
|
|
13
|
+
... "service_id": "voice-assistant",
|
|
14
|
+
... "operation_type": "completion",
|
|
15
|
+
... "session_do_url": "https://session-do.glacis.dev",
|
|
16
|
+
... })
|
|
17
|
+
>>>
|
|
18
|
+
>>> await session.attest_chunk({"input": audio_chunk, "output": transcript})
|
|
19
|
+
>>> receipt = await session.end(metadata={"duration": "00:05:23"})
|
|
20
|
+
|
|
21
|
+
Context Manager:
|
|
22
|
+
>>> async with glacis.session(config) as session:
|
|
23
|
+
... await session.attest_chunk(input=audio, output=transcript)
|
|
24
|
+
... # Auto-ends on exit
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
import asyncio
|
|
28
|
+
import uuid
|
|
29
|
+
from dataclasses import dataclass, field
|
|
30
|
+
from typing import Any, Callable, Optional, TypedDict
|
|
31
|
+
|
|
32
|
+
import httpx
|
|
33
|
+
|
|
34
|
+
from glacis.crypto import hash_payload
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class StreamingSessionConfig(TypedDict, total=False):
|
|
38
|
+
"""Configuration for starting a streaming session."""
|
|
39
|
+
|
|
40
|
+
service_id: str
|
|
41
|
+
operation_type: str
|
|
42
|
+
session_do_url: str
|
|
43
|
+
auto_end_timeout_ms: Optional[int]
|
|
44
|
+
chunk_batch_size: Optional[int]
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
@dataclass
|
|
48
|
+
class SessionReceipt:
|
|
49
|
+
"""Receipt from ending a session."""
|
|
50
|
+
|
|
51
|
+
session_id: str
|
|
52
|
+
session_root: str
|
|
53
|
+
chunk_count: int
|
|
54
|
+
started_at: str
|
|
55
|
+
ended_at: str
|
|
56
|
+
attest_receipt: Optional[Any] = None
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class StreamingSession:
|
|
60
|
+
"""
|
|
61
|
+
Streaming session for chunk-by-chunk attestation.
|
|
62
|
+
|
|
63
|
+
Use StreamingSession.start() to create a new session.
|
|
64
|
+
"""
|
|
65
|
+
|
|
66
|
+
def __init__(
|
|
67
|
+
self,
|
|
68
|
+
glacis: Any,
|
|
69
|
+
session_id: str,
|
|
70
|
+
session_do_url: str,
|
|
71
|
+
service_id: str,
|
|
72
|
+
operation_type: str,
|
|
73
|
+
api_key: str,
|
|
74
|
+
session_token: str,
|
|
75
|
+
):
|
|
76
|
+
from glacis import AsyncGlacis
|
|
77
|
+
|
|
78
|
+
self._glacis = glacis
|
|
79
|
+
self._session_id = session_id
|
|
80
|
+
self._session_do_url = session_do_url.rstrip("/")
|
|
81
|
+
self._service_id = service_id
|
|
82
|
+
self._operation_type = operation_type
|
|
83
|
+
self._api_key = api_key
|
|
84
|
+
self._session_token = session_token
|
|
85
|
+
self._sequence = 0
|
|
86
|
+
self._ended = False
|
|
87
|
+
self._pending_tasks: list[asyncio.Task[None]] = []
|
|
88
|
+
self._client = httpx.AsyncClient()
|
|
89
|
+
|
|
90
|
+
@property
|
|
91
|
+
def session_id(self) -> str:
|
|
92
|
+
"""Get the session ID."""
|
|
93
|
+
return self._session_id
|
|
94
|
+
|
|
95
|
+
@classmethod
|
|
96
|
+
async def start(
|
|
97
|
+
cls,
|
|
98
|
+
glacis: Any,
|
|
99
|
+
config: StreamingSessionConfig,
|
|
100
|
+
) -> "StreamingSession":
|
|
101
|
+
"""
|
|
102
|
+
Start a new streaming session.
|
|
103
|
+
|
|
104
|
+
Args:
|
|
105
|
+
glacis: AsyncGlacis or Glacis client
|
|
106
|
+
config: Session configuration
|
|
107
|
+
|
|
108
|
+
Returns:
|
|
109
|
+
StreamingSession instance
|
|
110
|
+
"""
|
|
111
|
+
session_id = f"ses_{uuid.uuid4()}"
|
|
112
|
+
api_key = glacis.get_api_key()
|
|
113
|
+
|
|
114
|
+
async with httpx.AsyncClient() as client:
|
|
115
|
+
response = await client.post(
|
|
116
|
+
f"{config['session_do_url']}/start",
|
|
117
|
+
headers={
|
|
118
|
+
"X-Glacis-Key": api_key,
|
|
119
|
+
},
|
|
120
|
+
json={
|
|
121
|
+
"sessionId": session_id,
|
|
122
|
+
"serviceId": config["service_id"],
|
|
123
|
+
"operationType": config["operation_type"],
|
|
124
|
+
"config": {
|
|
125
|
+
"autoEndTimeoutMs": config.get("auto_end_timeout_ms"),
|
|
126
|
+
"chunkBatchSize": config.get("chunk_batch_size"),
|
|
127
|
+
},
|
|
128
|
+
},
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
if not response.is_success:
|
|
132
|
+
try:
|
|
133
|
+
error = response.json()
|
|
134
|
+
except Exception:
|
|
135
|
+
error = {}
|
|
136
|
+
raise RuntimeError(
|
|
137
|
+
f"Failed to start session: {error.get('error', response.status_code)}"
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
result = response.json()
|
|
141
|
+
session_token = result.get("sessionToken", "")
|
|
142
|
+
|
|
143
|
+
return cls(
|
|
144
|
+
glacis=glacis,
|
|
145
|
+
session_id=session_id,
|
|
146
|
+
session_do_url=config["session_do_url"],
|
|
147
|
+
service_id=config["service_id"],
|
|
148
|
+
operation_type=config["operation_type"],
|
|
149
|
+
api_key=api_key,
|
|
150
|
+
session_token=session_token,
|
|
151
|
+
)
|
|
152
|
+
|
|
153
|
+
def attest_chunk_sync(self, input: Any, output: Any) -> None:
|
|
154
|
+
"""
|
|
155
|
+
Attest a chunk synchronously (fire-and-forget).
|
|
156
|
+
|
|
157
|
+
This queues the chunk for attestation and returns immediately.
|
|
158
|
+
Errors are logged but do not raise.
|
|
159
|
+
"""
|
|
160
|
+
if self._ended:
|
|
161
|
+
print(f"[glacis] Cannot attest chunk: session {self._session_id} has ended")
|
|
162
|
+
return
|
|
163
|
+
|
|
164
|
+
task = asyncio.create_task(self._attest_chunk_internal(input, output))
|
|
165
|
+
self._pending_tasks.append(task)
|
|
166
|
+
|
|
167
|
+
async def attest_chunk(self, input: Any, output: Any) -> None:
|
|
168
|
+
"""
|
|
169
|
+
Attest a chunk asynchronously.
|
|
170
|
+
|
|
171
|
+
Args:
|
|
172
|
+
input: Input data (hashed locally)
|
|
173
|
+
output: Output data (hashed locally)
|
|
174
|
+
"""
|
|
175
|
+
if self._ended:
|
|
176
|
+
raise RuntimeError(f"Session {self._session_id} has ended")
|
|
177
|
+
|
|
178
|
+
await self._attest_chunk_internal(input, output)
|
|
179
|
+
|
|
180
|
+
async def _attest_chunk_internal(self, input: Any, output: Any) -> None:
|
|
181
|
+
"""Internal chunk attestation."""
|
|
182
|
+
input_hash = hash_payload(input)
|
|
183
|
+
output_hash = hash_payload(output)
|
|
184
|
+
sequence = self._sequence
|
|
185
|
+
self._sequence += 1
|
|
186
|
+
|
|
187
|
+
response = await self._client.post(
|
|
188
|
+
f"{self._session_do_url}/chunk",
|
|
189
|
+
headers={
|
|
190
|
+
"X-Glacis-Session-Token": self._session_token,
|
|
191
|
+
},
|
|
192
|
+
json={
|
|
193
|
+
"sessionId": self._session_id,
|
|
194
|
+
"sequence": sequence,
|
|
195
|
+
"inputHash": input_hash,
|
|
196
|
+
"outputHash": output_hash,
|
|
197
|
+
},
|
|
198
|
+
)
|
|
199
|
+
|
|
200
|
+
if not response.is_success:
|
|
201
|
+
try:
|
|
202
|
+
error = response.json()
|
|
203
|
+
except Exception:
|
|
204
|
+
error = {}
|
|
205
|
+
raise RuntimeError(
|
|
206
|
+
f"Failed to attest chunk: {error.get('error', response.status_code)}"
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
async def end(self, metadata: Optional[dict[str, str]] = None) -> SessionReceipt:
|
|
210
|
+
"""
|
|
211
|
+
End the session and submit to main log.
|
|
212
|
+
|
|
213
|
+
Args:
|
|
214
|
+
metadata: Optional metadata to include
|
|
215
|
+
|
|
216
|
+
Returns:
|
|
217
|
+
SessionReceipt with attestation info
|
|
218
|
+
"""
|
|
219
|
+
if self._ended:
|
|
220
|
+
raise RuntimeError(f"Session {self._session_id} already ended")
|
|
221
|
+
|
|
222
|
+
# Wait for all pending chunks
|
|
223
|
+
if self._pending_tasks:
|
|
224
|
+
await asyncio.gather(*self._pending_tasks, return_exceptions=True)
|
|
225
|
+
|
|
226
|
+
self._ended = True
|
|
227
|
+
|
|
228
|
+
# End session in DO
|
|
229
|
+
response = await self._client.post(
|
|
230
|
+
f"{self._session_do_url}/end",
|
|
231
|
+
headers={
|
|
232
|
+
"X-Glacis-Session-Token": self._session_token,
|
|
233
|
+
},
|
|
234
|
+
json={"sessionId": self._session_id, "metadata": metadata},
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
if not response.is_success:
|
|
238
|
+
try:
|
|
239
|
+
error = response.json()
|
|
240
|
+
except Exception:
|
|
241
|
+
error = {}
|
|
242
|
+
raise RuntimeError(
|
|
243
|
+
f"Failed to end session: {error.get('error', response.status_code)}"
|
|
244
|
+
)
|
|
245
|
+
|
|
246
|
+
result = response.json()
|
|
247
|
+
attest_payload = result["attestPayload"]
|
|
248
|
+
|
|
249
|
+
# Submit to main transparency log
|
|
250
|
+
attest_receipt = await self._glacis.attest(
|
|
251
|
+
service_id=attest_payload["serviceId"],
|
|
252
|
+
operation_type=attest_payload["operationType"],
|
|
253
|
+
input={
|
|
254
|
+
"sessionId": attest_payload["sessionId"],
|
|
255
|
+
"sessionRoot": attest_payload["sessionRoot"],
|
|
256
|
+
},
|
|
257
|
+
output={"chunkCount": attest_payload["chunkCount"]},
|
|
258
|
+
metadata={
|
|
259
|
+
**(attest_payload.get("metadata") or {}),
|
|
260
|
+
"sessionId": attest_payload["sessionId"],
|
|
261
|
+
},
|
|
262
|
+
)
|
|
263
|
+
|
|
264
|
+
return SessionReceipt(
|
|
265
|
+
session_id=attest_payload["sessionId"],
|
|
266
|
+
session_root=attest_payload["sessionRoot"],
|
|
267
|
+
chunk_count=attest_payload["chunkCount"],
|
|
268
|
+
started_at=attest_payload["startedAt"],
|
|
269
|
+
ended_at=attest_payload["endedAt"],
|
|
270
|
+
attest_receipt=attest_receipt,
|
|
271
|
+
)
|
|
272
|
+
|
|
273
|
+
async def abort(self, reason: Optional[str] = None) -> None:
|
|
274
|
+
"""
|
|
275
|
+
Abort the session without submitting to main log.
|
|
276
|
+
"""
|
|
277
|
+
if self._ended:
|
|
278
|
+
return
|
|
279
|
+
|
|
280
|
+
self._ended = True
|
|
281
|
+
|
|
282
|
+
response = await self._client.post(
|
|
283
|
+
f"{self._session_do_url}/abandon",
|
|
284
|
+
headers={
|
|
285
|
+
"X-Glacis-Session-Token": self._session_token,
|
|
286
|
+
},
|
|
287
|
+
json={"sessionId": self._session_id, "reason": reason},
|
|
288
|
+
)
|
|
289
|
+
|
|
290
|
+
if not response.is_success:
|
|
291
|
+
try:
|
|
292
|
+
error = response.json()
|
|
293
|
+
except Exception:
|
|
294
|
+
error = {}
|
|
295
|
+
raise RuntimeError(
|
|
296
|
+
f"Failed to abort session: {error.get('error', response.status_code)}"
|
|
297
|
+
)
|
|
298
|
+
|
|
299
|
+
async def get_status(self) -> dict[str, Any]:
|
|
300
|
+
"""Get current session status."""
|
|
301
|
+
response = await self._client.get(
|
|
302
|
+
f"{self._session_do_url}/status",
|
|
303
|
+
params={"sessionId": self._session_id},
|
|
304
|
+
headers={
|
|
305
|
+
"X-Glacis-Session-Token": self._session_token,
|
|
306
|
+
},
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
if not response.is_success:
|
|
310
|
+
try:
|
|
311
|
+
error = response.json()
|
|
312
|
+
except Exception:
|
|
313
|
+
error = {}
|
|
314
|
+
raise RuntimeError(
|
|
315
|
+
f"Failed to get status: {error.get('error', response.status_code)}"
|
|
316
|
+
)
|
|
317
|
+
|
|
318
|
+
return response.json()
|
|
319
|
+
|
|
320
|
+
async def __aenter__(self) -> "StreamingSession":
|
|
321
|
+
return self
|
|
322
|
+
|
|
323
|
+
async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
|
|
324
|
+
if exc_type is not None:
|
|
325
|
+
await self.abort(str(exc_val) if exc_val else "Exception occurred")
|
|
326
|
+
elif not self._ended:
|
|
327
|
+
await self.end()
|
|
328
|
+
await self._client.aclose()
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
class SessionContext:
|
|
332
|
+
"""
|
|
333
|
+
Context manager for streaming sessions.
|
|
334
|
+
|
|
335
|
+
Example:
|
|
336
|
+
>>> async with SessionContext(glacis, config) as session:
|
|
337
|
+
... await session.attest_chunk(input=data, output=result)
|
|
338
|
+
... # Auto-ends and submits on exit
|
|
339
|
+
"""
|
|
340
|
+
|
|
341
|
+
def __init__(
|
|
342
|
+
self,
|
|
343
|
+
glacis: Any,
|
|
344
|
+
config: StreamingSessionConfig,
|
|
345
|
+
metadata: Optional[dict[str, str]] = None,
|
|
346
|
+
):
|
|
347
|
+
self._glacis = glacis
|
|
348
|
+
self._config = config
|
|
349
|
+
self._metadata = metadata
|
|
350
|
+
self._session: Optional[StreamingSession] = None
|
|
351
|
+
|
|
352
|
+
async def __aenter__(self) -> StreamingSession:
|
|
353
|
+
self._session = await StreamingSession.start(self._glacis, self._config)
|
|
354
|
+
return self._session
|
|
355
|
+
|
|
356
|
+
async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
|
|
357
|
+
if self._session is None:
|
|
358
|
+
return
|
|
359
|
+
|
|
360
|
+
if exc_type is not None:
|
|
361
|
+
await self._session.abort(str(exc_val) if exc_val else "Exception occurred")
|
|
362
|
+
elif not self._session._ended:
|
|
363
|
+
await self._session.end(metadata=self._metadata)
|
|
Binary file
|