polos-sdk 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. polos/__init__.py +105 -0
  2. polos/agents/__init__.py +7 -0
  3. polos/agents/agent.py +746 -0
  4. polos/agents/conversation_history.py +121 -0
  5. polos/agents/stop_conditions.py +280 -0
  6. polos/agents/stream.py +635 -0
  7. polos/core/__init__.py +0 -0
  8. polos/core/context.py +143 -0
  9. polos/core/state.py +26 -0
  10. polos/core/step.py +1380 -0
  11. polos/core/workflow.py +1192 -0
  12. polos/features/__init__.py +0 -0
  13. polos/features/events.py +456 -0
  14. polos/features/schedules.py +110 -0
  15. polos/features/tracing.py +605 -0
  16. polos/features/wait.py +82 -0
  17. polos/llm/__init__.py +9 -0
  18. polos/llm/generate.py +152 -0
  19. polos/llm/providers/__init__.py +5 -0
  20. polos/llm/providers/anthropic.py +615 -0
  21. polos/llm/providers/azure.py +42 -0
  22. polos/llm/providers/base.py +196 -0
  23. polos/llm/providers/fireworks.py +41 -0
  24. polos/llm/providers/gemini.py +40 -0
  25. polos/llm/providers/groq.py +40 -0
  26. polos/llm/providers/openai.py +1021 -0
  27. polos/llm/providers/together.py +40 -0
  28. polos/llm/stream.py +183 -0
  29. polos/middleware/__init__.py +0 -0
  30. polos/middleware/guardrail.py +148 -0
  31. polos/middleware/guardrail_executor.py +253 -0
  32. polos/middleware/hook.py +164 -0
  33. polos/middleware/hook_executor.py +104 -0
  34. polos/runtime/__init__.py +0 -0
  35. polos/runtime/batch.py +87 -0
  36. polos/runtime/client.py +841 -0
  37. polos/runtime/queue.py +42 -0
  38. polos/runtime/worker.py +1365 -0
  39. polos/runtime/worker_server.py +249 -0
  40. polos/tools/__init__.py +0 -0
  41. polos/tools/tool.py +587 -0
  42. polos/types/__init__.py +23 -0
  43. polos/types/types.py +116 -0
  44. polos/utils/__init__.py +27 -0
  45. polos/utils/agent.py +27 -0
  46. polos/utils/client_context.py +41 -0
  47. polos/utils/config.py +12 -0
  48. polos/utils/output_schema.py +311 -0
  49. polos/utils/retry.py +47 -0
  50. polos/utils/serializer.py +167 -0
  51. polos/utils/tracing.py +27 -0
  52. polos/utils/worker_singleton.py +40 -0
  53. polos_sdk-0.1.0.dist-info/METADATA +650 -0
  54. polos_sdk-0.1.0.dist-info/RECORD +55 -0
  55. polos_sdk-0.1.0.dist-info/WHEEL +4 -0
File without changes
@@ -0,0 +1,456 @@
1
+ """Event publish/subscribe system for Polos."""
2
+
3
+ import json
4
+ import logging
5
+ from collections.abc import AsyncIterator
6
+ from datetime import datetime
7
+ from typing import Any
8
+
9
+ import httpx
10
+ from pydantic import BaseModel, Field
11
+
12
+ from ..runtime.client import PolosClient
13
+ from ..utils.worker_singleton import get_worker_client
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ class EventData(BaseModel):
19
+ """Event data structure for publishing events.
20
+
21
+ Users can define TypedDict for type hints:
22
+ from typing import TypedDict
23
+
24
+ class ApprovalData(TypedDict):
25
+ approved: bool
26
+ reason: str | None
27
+
28
+ event = EventData(
29
+ event_type="email.approval_received",
30
+ data={"approved": True, "reason": "Looks good"} # TypedDict provides type hints
31
+ )
32
+
33
+ Attributes:
34
+ event_type: Type of event
35
+ data: Event payload (dict)
36
+ """
37
+
38
+ event_type: str | None = None
39
+ data: dict[str, Any]
40
+
41
+
42
+ class EventPayload(BaseModel):
43
+ """Event payload received when waiting for events in workflows.
44
+
45
+ This is returned by ctx.step.wait_for_event() when an event is received
46
+ and by event-triggered workflows.
47
+
48
+ Attributes:
49
+ id: Event ID (UUID string)
50
+ sequence_id: Global sequence ID for ordering
51
+ topic: Event topic
52
+ event_type: Type of event
53
+ data: Event payload (dict)
54
+ created_at: Optional timestamp when event was created
55
+ """
56
+
57
+ id: str
58
+ sequence_id: int
59
+ topic: str
60
+ event_type: str | None = None
61
+ data: dict[str, Any]
62
+ created_at: datetime
63
+
64
+
65
+ class EventItem(BaseModel):
66
+ """Single event item in a batch of events.
67
+
68
+ Used in BatchEventPayload for event-triggered workflows with batching.
69
+
70
+ Attributes:
71
+ id: Event ID (UUID string)
72
+ sequence_id: Global sequence ID for ordering
73
+ topic: Event topic
74
+ event_type: Type of event
75
+ data: Event payload (dict)
76
+ created_at: Timestamp when event was created
77
+ """
78
+
79
+ id: str
80
+ sequence_id: int
81
+ topic: str
82
+ event_type: str | None = None
83
+ data: dict[str, Any]
84
+ created_at: datetime
85
+
86
+
87
+ class BatchEventPayload(BaseModel):
88
+ """Batch event payload for event-triggered workflows with batching.
89
+
90
+ This is the payload structure when a workflow is triggered by events
91
+ with batch_size > 1 or batch_timeout_seconds set.
92
+
93
+ Attributes:
94
+ events: List of events in the batch
95
+ """
96
+
97
+ events: list[EventItem] = Field(default_factory=list)
98
+
99
+
100
+ class StreamEvent(BaseModel):
101
+ """Event received from the event stream.
102
+
103
+ Attributes:
104
+ id: Event ID (UUID string)
105
+ sequence_id: Global sequence ID for ordering
106
+ topic: Event topic
107
+ event_type: Optional type of event
108
+ data: Event payload (dict)
109
+ created_at: Optional RFC3339 timestamp string
110
+ """
111
+
112
+ id: str
113
+ sequence_id: int
114
+ topic: str
115
+ event_type: str | None = None
116
+ data: dict[str, Any] = Field(default_factory=dict)
117
+ created_at: str | None = None
118
+
119
+
120
+ class Event:
121
+ """Represents an event in the event system."""
122
+
123
+ def __init__(
124
+ self,
125
+ id: str,
126
+ sequence_id: int,
127
+ topic: str,
128
+ event_type: str | None = None,
129
+ data: dict[str, Any] | None = None,
130
+ status: str = "valid",
131
+ execution_id: str | None = None,
132
+ attempt_number: int = 0,
133
+ created_at: datetime | None = None,
134
+ ):
135
+ self.id = id
136
+ self.sequence_id = sequence_id
137
+ self.topic = topic
138
+ self.event_type = event_type
139
+ self.data = data
140
+ self.status = status
141
+ self.execution_id = execution_id
142
+ self.attempt_number = attempt_number
143
+ self.created_at = created_at
144
+
145
+ def __repr__(self) -> str:
146
+ return (
147
+ f"Event(id={self.id}, sequence_id={self.sequence_id}, "
148
+ f"topic={self.topic}, event_type={self.event_type}, "
149
+ f"status={self.status})"
150
+ )
151
+
152
+
153
+ async def batch_publish(
154
+ client: PolosClient,
155
+ topic: str,
156
+ events: list[EventData],
157
+ execution_id: str | None = None,
158
+ root_execution_id: str | None = None,
159
+ ) -> list[int]:
160
+ """Publish a batch of events for a single topic. Returns list of sequence_ids.
161
+
162
+ Args:
163
+ client: PolosClient instance
164
+ topic: Event topic (all events in the batch share this topic)
165
+ events: List of EventData instances, each with:
166
+ - event_type: str - Type of event
167
+ - data: dict[str, Any] - Event payload (can use TypedDict for type hints)
168
+ execution_id: Optional execution ID
169
+ root_execution_id: Optional root execution ID
170
+
171
+ Returns:
172
+ List of sequence IDs
173
+
174
+ Example:
175
+ from typing import TypedDict
176
+
177
+ class ApprovalData(TypedDict):
178
+ approved: bool
179
+ reason: str | None
180
+
181
+ events = [
182
+ EventData(
183
+ event_type="email.approval_received",
184
+ data={"approved": True, "reason": "Looks good"}
185
+ )
186
+ ]
187
+ sequence_ids = await batch_publish("approvals", events)
188
+ """
189
+ if not events:
190
+ return []
191
+
192
+ # Publish events to orchestrator
193
+ api_url = client.api_url
194
+ headers = client._get_headers()
195
+
196
+ # Add execution_id and attempt_number internally
197
+ events_with_internal = []
198
+ for e in events:
199
+ events_with_internal.append(e.model_dump(exclude_none=True, mode="json"))
200
+
201
+ payload = {
202
+ "topic": topic,
203
+ "events": events_with_internal,
204
+ }
205
+
206
+ # Include execution context if provided
207
+ if execution_id:
208
+ payload["execution_id"] = execution_id
209
+ if root_execution_id:
210
+ payload["root_execution_id"] = root_execution_id
211
+
212
+ # Try to reuse worker's HTTP client if available
213
+ worker_client = get_worker_client()
214
+ if worker_client is not None:
215
+ response = await worker_client.post(
216
+ f"{api_url}/api/v1/events/publish",
217
+ json=payload,
218
+ headers=headers,
219
+ )
220
+ response.raise_for_status()
221
+ result = response.json()
222
+ return result["sequence_ids"]
223
+ else:
224
+ async with httpx.AsyncClient() as client:
225
+ response = await client.post(
226
+ f"{api_url}/api/v1/events/publish",
227
+ json=payload,
228
+ headers=headers,
229
+ )
230
+ response.raise_for_status()
231
+ result = response.json()
232
+ return result["sequence_ids"]
233
+
234
+
235
+ async def publish(
236
+ client: PolosClient,
237
+ topic: str,
238
+ event_type: str | None = None,
239
+ data: dict[str, Any] = None,
240
+ execution_id: str | None = None,
241
+ root_execution_id: str | None = None,
242
+ ) -> int:
243
+ """Publish a single event to topic. Returns sequence_id.
244
+
245
+ This calls batch_publish() internally with a single event.
246
+
247
+ Args:
248
+ client: PolosClient instance
249
+ topic: Event topic
250
+ event_type: Optional type of event
251
+ data: Event payload
252
+
253
+ Returns:
254
+ sequence_id: Global sequence ID for the event
255
+ """
256
+ sequence_ids = await batch_publish(
257
+ topic=topic,
258
+ events=[
259
+ EventData(
260
+ event_type=event_type,
261
+ data=data or {},
262
+ )
263
+ ],
264
+ execution_id=execution_id,
265
+ root_execution_id=root_execution_id,
266
+ client=client,
267
+ )
268
+ return sequence_ids[0] if sequence_ids else None
269
+
270
+
271
+ async def _stream(
272
+ client: PolosClient,
273
+ topic: str | None = None,
274
+ workflow_run_id: str | None = None,
275
+ last_sequence_id: int | None = None,
276
+ last_timestamp: datetime | None = None,
277
+ ) -> AsyncIterator[StreamEvent]:
278
+ """Stream events from a topic or workflow using Server-Sent Events (SSE).
279
+
280
+ Returns an async iterator that yields StreamEvent Pydantic instances.
281
+ Each event contains: id, sequence_id, topic, event_type, data, created_at.
282
+ """
283
+ from datetime import timezone
284
+ from urllib.parse import urlencode
285
+
286
+ api_url = client.api_url
287
+
288
+ # Build query parameters
289
+ params = {
290
+ "project_id": client.project_id,
291
+ }
292
+
293
+ # If workflow_run_id is provided, use it (API will construct topic from it)
294
+ if workflow_run_id:
295
+ params["workflow_run_id"] = workflow_run_id
296
+ elif topic:
297
+ params["topic"] = topic
298
+ else:
299
+ raise ValueError("Either topic or workflow_run_id must be provided")
300
+
301
+ # Priority: last_sequence_id takes precedence over last_timestamp
302
+ if last_sequence_id is not None:
303
+ params["last_sequence_id"] = str(last_sequence_id)
304
+ elif last_timestamp is not None:
305
+ # Format timestamp as RFC3339 for the server
306
+ if last_timestamp.tzinfo is None:
307
+ # Assume UTC if no timezone info
308
+ last_timestamp = last_timestamp.replace(tzinfo=timezone.utc)
309
+ params["last_timestamp"] = last_timestamp.isoformat()
310
+ else:
311
+ # Default to current time if neither is provided
312
+ params["last_timestamp"] = datetime.now(timezone.utc).isoformat()
313
+
314
+ # Build URL with query parameters
315
+ url = f"{api_url}/api/v1/events/stream?{urlencode(params)}"
316
+
317
+ headers = client._get_headers()
318
+
319
+ async with (
320
+ httpx.AsyncClient(timeout=httpx.Timeout(None), headers=headers) as http_client,
321
+ http_client.stream("GET", url) as response,
322
+ ):
323
+ response.raise_for_status()
324
+
325
+ current_event_data = None
326
+
327
+ async for line in response.aiter_lines():
328
+ line = line.rstrip("\r\n")
329
+
330
+ # Empty line indicates end of event
331
+ if not line:
332
+ if current_event_data:
333
+ try:
334
+ event_dict = json.loads(current_event_data)
335
+ # Convert dict to StreamEvent Pydantic model
336
+ event = StreamEvent.model_validate(event_dict)
337
+ yield event
338
+ except (json.JSONDecodeError, Exception):
339
+ # Skip invalid events
340
+ pass
341
+ current_event_data = None
342
+ continue
343
+
344
+ # SSE format: data: {...}
345
+ if line.startswith("data: "):
346
+ data_str = line[6:] # Remove "data: " prefix
347
+ current_event_data = data_str
348
+ elif line == "keepalive" or line.startswith(":"):
349
+ # Skip keepalive messages and comments
350
+ continue
351
+
352
+
353
+ def stream_topic(
354
+ client: PolosClient,
355
+ topic: str = None,
356
+ last_sequence_id: int | None = None,
357
+ last_timestamp: datetime | None = None,
358
+ ) -> AsyncIterator[StreamEvent]:
359
+ """Stream events from a topic using Server-Sent Events (SSE).
360
+
361
+ Returns an async iterator that yields StreamEvent Pydantic instances.
362
+ Each event contains: id, sequence_id, topic, event_type, data, created_at.
363
+
364
+ Args:
365
+ client: PolosClient instance
366
+ topic: Event topic to stream.
367
+ last_sequence_id: Optional sequence ID to start streaming after. If provided,
368
+ streaming begins after this sequence ID.
369
+ last_timestamp: Optional timestamp to start streaming after. If provided
370
+ and last_sequence_id is None, streaming begins after this timestamp.
371
+
372
+ Yields:
373
+ StreamEvent: Pydantic model with fields: id, topic, event_type, data,
374
+ sequence_id, created_at
375
+
376
+ Example:
377
+ async for event in events.stream_topic("review/123"):
378
+ if event.event_type == "message":
379
+ print(event.data.get("message"))
380
+ elif event.event_type == "result":
381
+ print(event.data.get("result"))
382
+ """
383
+ return _stream(
384
+ client=client, topic=topic, last_sequence_id=last_sequence_id, last_timestamp=last_timestamp
385
+ )
386
+
387
+
388
+ def stream_workflow(
389
+ client: PolosClient,
390
+ workflow_run_id: str = None,
391
+ last_sequence_id: int | None = None,
392
+ last_timestamp: datetime | None = None,
393
+ ) -> AsyncIterator[StreamEvent]:
394
+ """Stream events from a workflow using Server-Sent Events (SSE).
395
+
396
+ Returns an async iterator that yields StreamEvent Pydantic instances.
397
+ Each event contains: id, sequence_id, topic, event_type, data, created_at.
398
+
399
+ The iterator automatically stops when it receives a "finish" event with
400
+ matching execution_id, indicating the workflow has completed.
401
+
402
+ Args:
403
+ client: PolosClient instance
404
+ workflow_run_id: Workflow run ID. If provided, streams events for
405
+ "workflow:{workflow_run_id}" topic.
406
+ last_sequence_id: Optional sequence ID to start streaming after. If provided,
407
+ streaming begins after this sequence ID.
408
+ last_timestamp: Optional timestamp to start streaming after. If provided
409
+ and last_sequence_id is None, streaming begins after this timestamp.
410
+
411
+ Yields:
412
+ StreamEvent: Pydantic model with fields: id, topic, event_type, data,
413
+ sequence_id, created_at
414
+
415
+ Example:
416
+ async for event in events.stream_workflow("review/123"):
417
+ if event.event_type == "message":
418
+ print(event.data.get("message"))
419
+ elif event.event_type == "result":
420
+ print(event.data.get("result"))
421
+ """
422
+
423
+ async def _stream_with_finish_check():
424
+ async for event in _stream(
425
+ client=client,
426
+ workflow_run_id=workflow_run_id,
427
+ last_sequence_id=last_sequence_id,
428
+ last_timestamp=last_timestamp,
429
+ ):
430
+ yield event
431
+
432
+ # Check for finish event with matching execution_id
433
+ if event.event_type in ["workflow_finish", "agent_finish", "tool_finish"]:
434
+ event_data = event.data
435
+ if isinstance(event_data, dict):
436
+ metadata = event_data.get("_metadata", {})
437
+ if isinstance(metadata, dict):
438
+ execution_id = metadata.get("execution_id")
439
+ if execution_id == workflow_run_id:
440
+ # Workflow streaming is complete, stop iterating
441
+ break
442
+
443
+ return _stream_with_finish_check()
444
+
445
+
446
+ # Module-level instance for convenience
447
+ events = type(
448
+ "Events",
449
+ (),
450
+ {
451
+ "publish": publish,
452
+ "batch_publish": batch_publish,
453
+ "stream_topic": stream_topic,
454
+ "stream_workflow": stream_workflow,
455
+ },
456
+ )()
@@ -0,0 +1,110 @@
1
+ """Schedule management for Polos workflows."""
2
+
3
+ from datetime import datetime
4
+
5
+ import httpx
6
+ from pydantic import BaseModel
7
+
8
+ from ..runtime.client import PolosClient
9
+ from ..utils.worker_singleton import get_worker_client
10
+
11
+
12
+ class SchedulePayload(BaseModel):
13
+ """Payload passed to scheduled workflows.
14
+
15
+ Attributes:
16
+ timestamp: When this workflow was scheduled to run
17
+ last_timestamp: When this schedule last ran (None if first run)
18
+ timezone: Timezone of the schedule
19
+ schedule_id: Unique identifier for this schedule
20
+ key: User ID or custom identifier for the schedule
21
+ upcoming: Next scheduled run time
22
+ """
23
+
24
+ timestamp: datetime
25
+ last_timestamp: datetime | None
26
+ timezone: str
27
+ schedule_id: str
28
+ key: str
29
+ upcoming: datetime
30
+
31
+
32
+ async def create(
33
+ client: PolosClient,
34
+ workflow: str,
35
+ cron: str,
36
+ timezone: str = "UTC",
37
+ key: str = "global",
38
+ ) -> str:
39
+ """Create or update a schedule for a workflow.
40
+
41
+ If a schedule with the same workflow and key already exists, it will be updated.
42
+ If key is None, multiple schedules can exist for the same workflow.
43
+
44
+ Args:
45
+ client: PolosClient instance
46
+ workflow: Workflow ID to schedule
47
+ cron: Cron expression (e.g., "0 8 * * *" for 8 AM daily)
48
+ timezone: Timezone for the schedule (default: "UTC")
49
+ key: Key for per-user/per-entity schedules. Defaults to "global" for global schedules.
50
+ If a schedule with the same workflow and key exists, it will be updated.
51
+
52
+ Returns:
53
+ schedule_id: Unique identifier for the schedule
54
+
55
+ Example:
56
+ # Per-user schedule (updates if same key exists)
57
+ await schedules.create(
58
+ workflow="daily-reminder",
59
+ cron="0 8 * * *",
60
+ timezone="America/New_York",
61
+ key=user.id
62
+ )
63
+
64
+ # Global schedule (can create multiple)
65
+ await schedules.create(
66
+ workflow="system-cleanup",
67
+ cron="0 3 * * *"
68
+ )
69
+ """
70
+ api_url = client.api_url
71
+ headers = client._get_headers()
72
+
73
+ payload = {
74
+ "workflow_id": workflow,
75
+ "cron": cron,
76
+ "timezone": timezone,
77
+ "key": key,
78
+ }
79
+
80
+ # Try to reuse worker's HTTP client if available
81
+ worker_client = get_worker_client()
82
+ if worker_client is not None:
83
+ response = await worker_client.post(
84
+ f"{api_url}/api/v1/schedules",
85
+ json=payload,
86
+ headers=headers,
87
+ )
88
+ response.raise_for_status()
89
+ result = response.json()
90
+ return result["schedule_id"]
91
+ else:
92
+ async with httpx.AsyncClient() as client:
93
+ response = await client.post(
94
+ f"{api_url}/api/v1/schedules",
95
+ json=payload,
96
+ headers=headers,
97
+ )
98
+ response.raise_for_status()
99
+ result = response.json()
100
+ return result["schedule_id"]
101
+
102
+
103
+ # Module-level instance for convenience
104
+ schedules = type(
105
+ "Schedules",
106
+ (),
107
+ {
108
+ "create": create,
109
+ },
110
+ )()