polos-sdk 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- polos/__init__.py +105 -0
- polos/agents/__init__.py +7 -0
- polos/agents/agent.py +746 -0
- polos/agents/conversation_history.py +121 -0
- polos/agents/stop_conditions.py +280 -0
- polos/agents/stream.py +635 -0
- polos/core/__init__.py +0 -0
- polos/core/context.py +143 -0
- polos/core/state.py +26 -0
- polos/core/step.py +1380 -0
- polos/core/workflow.py +1192 -0
- polos/features/__init__.py +0 -0
- polos/features/events.py +456 -0
- polos/features/schedules.py +110 -0
- polos/features/tracing.py +605 -0
- polos/features/wait.py +82 -0
- polos/llm/__init__.py +9 -0
- polos/llm/generate.py +152 -0
- polos/llm/providers/__init__.py +5 -0
- polos/llm/providers/anthropic.py +615 -0
- polos/llm/providers/azure.py +42 -0
- polos/llm/providers/base.py +196 -0
- polos/llm/providers/fireworks.py +41 -0
- polos/llm/providers/gemini.py +40 -0
- polos/llm/providers/groq.py +40 -0
- polos/llm/providers/openai.py +1021 -0
- polos/llm/providers/together.py +40 -0
- polos/llm/stream.py +183 -0
- polos/middleware/__init__.py +0 -0
- polos/middleware/guardrail.py +148 -0
- polos/middleware/guardrail_executor.py +253 -0
- polos/middleware/hook.py +164 -0
- polos/middleware/hook_executor.py +104 -0
- polos/runtime/__init__.py +0 -0
- polos/runtime/batch.py +87 -0
- polos/runtime/client.py +841 -0
- polos/runtime/queue.py +42 -0
- polos/runtime/worker.py +1365 -0
- polos/runtime/worker_server.py +249 -0
- polos/tools/__init__.py +0 -0
- polos/tools/tool.py +587 -0
- polos/types/__init__.py +23 -0
- polos/types/types.py +116 -0
- polos/utils/__init__.py +27 -0
- polos/utils/agent.py +27 -0
- polos/utils/client_context.py +41 -0
- polos/utils/config.py +12 -0
- polos/utils/output_schema.py +311 -0
- polos/utils/retry.py +47 -0
- polos/utils/serializer.py +167 -0
- polos/utils/tracing.py +27 -0
- polos/utils/worker_singleton.py +40 -0
- polos_sdk-0.1.0.dist-info/METADATA +650 -0
- polos_sdk-0.1.0.dist-info/RECORD +55 -0
- polos_sdk-0.1.0.dist-info/WHEEL +4 -0
polos/types/types.py
ADDED
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
"""Type definitions for agent execution steps, tool calls, and usage."""
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from pydantic import BaseModel, ConfigDict
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class Usage(BaseModel):
|
|
9
|
+
"""Token usage information from LLM calls."""
|
|
10
|
+
|
|
11
|
+
input_tokens: int = 0
|
|
12
|
+
output_tokens: int = 0
|
|
13
|
+
total_tokens: int = 0
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class ToolCallFunction(BaseModel):
|
|
17
|
+
"""Function information within a tool call."""
|
|
18
|
+
|
|
19
|
+
name: str
|
|
20
|
+
arguments: str # JSON string
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class ToolCall(BaseModel):
|
|
24
|
+
"""A tool call made by the LLM."""
|
|
25
|
+
|
|
26
|
+
id: str
|
|
27
|
+
type: str = "function"
|
|
28
|
+
function: ToolCallFunction
|
|
29
|
+
call_id: str | None = None
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class ToolResult(BaseModel):
|
|
33
|
+
"""Result from executing a tool."""
|
|
34
|
+
|
|
35
|
+
tool_name: str
|
|
36
|
+
status: str # completed, failed
|
|
37
|
+
result: Any | None = None
|
|
38
|
+
result_schema: str | None = None
|
|
39
|
+
error: str | None = None
|
|
40
|
+
tool_call_id: str
|
|
41
|
+
tool_call_call_id: str
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class Step(BaseModel):
|
|
45
|
+
"""A step in agent execution."""
|
|
46
|
+
|
|
47
|
+
step: int
|
|
48
|
+
content: Any | None = None
|
|
49
|
+
tool_calls: list[ToolCall] = []
|
|
50
|
+
tool_results: list[ToolResult] = []
|
|
51
|
+
usage: Usage | None = None
|
|
52
|
+
raw_output: Any | None = None
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class BatchWorkflowInput(BaseModel):
|
|
56
|
+
"""Input for batch workflow invocation.
|
|
57
|
+
|
|
58
|
+
Attributes:
|
|
59
|
+
id: The workflow ID to invoke
|
|
60
|
+
payload: The payload to pass to the workflow (can be dict or Pydantic model)
|
|
61
|
+
"""
|
|
62
|
+
|
|
63
|
+
id: str
|
|
64
|
+
payload: Any | None = None
|
|
65
|
+
initial_state: BaseModel | dict[str, Any] | None = None
|
|
66
|
+
run_timeout_seconds: int | None = None
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class BatchStepResult(BaseModel):
|
|
70
|
+
"""Result of a batch workflow invocation.
|
|
71
|
+
|
|
72
|
+
Attributes:
|
|
73
|
+
workflow_id: The workflow ID that was invoked
|
|
74
|
+
success: Whether the workflow completed successfully
|
|
75
|
+
result: The result from the workflow (if successful)
|
|
76
|
+
error: Error message (if failed)
|
|
77
|
+
"""
|
|
78
|
+
|
|
79
|
+
# Ignore extra fields that may be added by the orchestrator
|
|
80
|
+
model_config = ConfigDict(extra="ignore")
|
|
81
|
+
|
|
82
|
+
workflow_id: str
|
|
83
|
+
success: bool
|
|
84
|
+
result: Any | None = None
|
|
85
|
+
error: str | None = None
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
class AgentResult(BaseModel):
|
|
89
|
+
"""Final result returned from an agent stream/run execution."""
|
|
90
|
+
|
|
91
|
+
agent_run_id: str
|
|
92
|
+
conversation_id: str | None = None
|
|
93
|
+
result: Any | None = None
|
|
94
|
+
result_schema: str | None = None
|
|
95
|
+
tool_results: list[ToolResult] = []
|
|
96
|
+
total_steps: int
|
|
97
|
+
usage: Usage
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
class AgentConfig(BaseModel):
|
|
101
|
+
"""Configuration for agent execution."""
|
|
102
|
+
|
|
103
|
+
name: str
|
|
104
|
+
provider: str
|
|
105
|
+
model: str
|
|
106
|
+
tools: list[dict[str, Any]] = []
|
|
107
|
+
system_prompt: str | None = None
|
|
108
|
+
max_output_tokens: int | None = None
|
|
109
|
+
temperature: float | None = None
|
|
110
|
+
top_p: float | None = None
|
|
111
|
+
provider_base_url: str | None = None
|
|
112
|
+
provider_llm_api: str | None = None
|
|
113
|
+
provider_kwargs: dict[str, Any] | None = None
|
|
114
|
+
output_schema: dict[str, Any] | None = None
|
|
115
|
+
output_schema_name: str | None = None
|
|
116
|
+
guardrail_max_retries: int | None = None
|
polos/utils/__init__.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
"""Utility functions for Polos runtime."""
|
|
2
|
+
|
|
3
|
+
from .agent import convert_input_to_messages
|
|
4
|
+
from .config import is_localhost_url
|
|
5
|
+
from .output_schema import convert_output_schema
|
|
6
|
+
from .retry import retry_with_backoff
|
|
7
|
+
from .serializer import (
|
|
8
|
+
deserialize,
|
|
9
|
+
deserialize_agent_result,
|
|
10
|
+
is_json_serializable,
|
|
11
|
+
json_serialize,
|
|
12
|
+
safe_serialize,
|
|
13
|
+
serialize,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
__all__ = [
|
|
17
|
+
"convert_input_to_messages",
|
|
18
|
+
"convert_output_schema",
|
|
19
|
+
"is_json_serializable",
|
|
20
|
+
"serialize",
|
|
21
|
+
"json_serialize",
|
|
22
|
+
"safe_serialize",
|
|
23
|
+
"deserialize",
|
|
24
|
+
"deserialize_agent_result",
|
|
25
|
+
"retry_with_backoff",
|
|
26
|
+
"is_localhost_url",
|
|
27
|
+
]
|
polos/utils/agent.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def convert_input_to_messages(
|
|
5
|
+
input_data: str | list[dict[str, Any]], system_prompt: str | None = None
|
|
6
|
+
) -> list[dict[str, Any]]:
|
|
7
|
+
"""
|
|
8
|
+
Convert input to messages format.
|
|
9
|
+
|
|
10
|
+
Args:
|
|
11
|
+
input_data: String or array of input items
|
|
12
|
+
system_prompt: Optional system prompt
|
|
13
|
+
|
|
14
|
+
Returns:
|
|
15
|
+
List of message dicts
|
|
16
|
+
"""
|
|
17
|
+
messages = []
|
|
18
|
+
|
|
19
|
+
if system_prompt:
|
|
20
|
+
messages.append({"role": "system", "content": system_prompt})
|
|
21
|
+
|
|
22
|
+
if isinstance(input_data, str):
|
|
23
|
+
messages.append({"role": "user", "content": input_data})
|
|
24
|
+
elif isinstance(input_data, list):
|
|
25
|
+
messages.extend(input_data)
|
|
26
|
+
|
|
27
|
+
return messages
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
"""Client context utilities for getting PolosClient from worker context."""
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
from .worker_singleton import get_current_worker
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from ..runtime.client import PolosClient
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def get_client_from_context() -> "PolosClient | None":
|
|
12
|
+
"""Get PolosClient from current worker context.
|
|
13
|
+
|
|
14
|
+
Returns:
|
|
15
|
+
PolosClient instance if available, None otherwise
|
|
16
|
+
"""
|
|
17
|
+
worker = get_current_worker()
|
|
18
|
+
if worker and hasattr(worker, "client"):
|
|
19
|
+
# Import here to avoid circular dependency
|
|
20
|
+
from ..runtime.client import PolosClient
|
|
21
|
+
|
|
22
|
+
if isinstance(worker.polos_client, PolosClient):
|
|
23
|
+
return worker.polos_client
|
|
24
|
+
return None
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def get_client_or_raise() -> "PolosClient":
|
|
28
|
+
"""Get PolosClient from context or raise error.
|
|
29
|
+
|
|
30
|
+
Returns:
|
|
31
|
+
PolosClient instance from worker context
|
|
32
|
+
|
|
33
|
+
Raises:
|
|
34
|
+
RuntimeError: If no PolosClient is available in context
|
|
35
|
+
"""
|
|
36
|
+
client = get_client_from_context()
|
|
37
|
+
if client is None:
|
|
38
|
+
raise RuntimeError(
|
|
39
|
+
"No PolosClient available. Pass client parameter or run in Worker context."
|
|
40
|
+
)
|
|
41
|
+
return client
|
polos/utils/config.py
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
def is_localhost_url(url: str) -> bool:
|
|
2
|
+
"""Check if URL is a localhost address."""
|
|
3
|
+
try:
|
|
4
|
+
if url:
|
|
5
|
+
from urllib.parse import urlparse
|
|
6
|
+
|
|
7
|
+
parsed = urlparse(url)
|
|
8
|
+
hostname = parsed.hostname or ""
|
|
9
|
+
return hostname in ("localhost", "127.0.0.1", "::1") or hostname.startswith("127.")
|
|
10
|
+
return False
|
|
11
|
+
except Exception:
|
|
12
|
+
return False
|
|
@@ -0,0 +1,311 @@
|
|
|
1
|
+
"""Utility functions for converting Pydantic models to JSON schemas for structured output."""
|
|
2
|
+
|
|
3
|
+
import copy
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def convert_output_schema(
|
|
8
|
+
output_schema: type[Any] | None, context_id: str = ""
|
|
9
|
+
) -> tuple[dict[str, Any] | None, str | None]:
|
|
10
|
+
"""
|
|
11
|
+
Convert a Pydantic model class to JSON schema dict and name for structured output.
|
|
12
|
+
|
|
13
|
+
Args:
|
|
14
|
+
output_schema: Pydantic model class (v1 or v2)
|
|
15
|
+
context_id: Optional context identifier for error messages (e.g., agent ID)
|
|
16
|
+
|
|
17
|
+
Returns:
|
|
18
|
+
Tuple of (output_schema_dict, output_schema_name) where:
|
|
19
|
+
- output_schema_dict: JSON schema dictionary (None if output_schema is None)
|
|
20
|
+
- output_schema_name: Name of the model class (None if output_schema is None)
|
|
21
|
+
|
|
22
|
+
Raises:
|
|
23
|
+
ValueError: If output_schema is not a Pydantic model or conversion fails
|
|
24
|
+
"""
|
|
25
|
+
if output_schema is None:
|
|
26
|
+
return None, None
|
|
27
|
+
|
|
28
|
+
try:
|
|
29
|
+
# Check if it's a Pydantic model
|
|
30
|
+
if hasattr(output_schema, "model_json_schema"):
|
|
31
|
+
# Pydantic v2 - validate fields first
|
|
32
|
+
_validate_output_schema_v2(output_schema, context_id)
|
|
33
|
+
|
|
34
|
+
schema_dict = output_schema.model_json_schema()
|
|
35
|
+
# Inline $ref references
|
|
36
|
+
_inline_refs(schema_dict)
|
|
37
|
+
# Requires additionalProperties: false for structured output
|
|
38
|
+
if "additionalProperties" not in schema_dict:
|
|
39
|
+
schema_dict["additionalProperties"] = False
|
|
40
|
+
# Requires all properties to be in required array
|
|
41
|
+
if "properties" in schema_dict:
|
|
42
|
+
all_properties = set(schema_dict["properties"].keys())
|
|
43
|
+
required_properties = set(schema_dict.get("required", []))
|
|
44
|
+
# Add any missing properties to required
|
|
45
|
+
if all_properties != required_properties:
|
|
46
|
+
schema_dict["required"] = list(all_properties)
|
|
47
|
+
# Also ensure nested objects have additionalProperties: false
|
|
48
|
+
_ensure_additional_properties_false(schema_dict)
|
|
49
|
+
# Also ensure all nested schemas in $defs have required arrays
|
|
50
|
+
_ensure_required_for_all_properties(schema_dict)
|
|
51
|
+
return schema_dict, output_schema.__name__
|
|
52
|
+
elif hasattr(output_schema, "schema"):
|
|
53
|
+
# Pydantic v1 - validate fields first
|
|
54
|
+
_validate_output_schema_v1(output_schema, context_id)
|
|
55
|
+
|
|
56
|
+
schema_dict = output_schema.schema()
|
|
57
|
+
# Inline $ref references
|
|
58
|
+
_inline_refs(schema_dict)
|
|
59
|
+
# Requires additionalProperties: false for structured output
|
|
60
|
+
if "additionalProperties" not in schema_dict:
|
|
61
|
+
schema_dict["additionalProperties"] = False
|
|
62
|
+
# Requires all properties to be in required array
|
|
63
|
+
if "properties" in schema_dict:
|
|
64
|
+
all_properties = set(schema_dict["properties"].keys())
|
|
65
|
+
required_properties = set(schema_dict.get("required", []))
|
|
66
|
+
# Add any missing properties to required
|
|
67
|
+
if all_properties != required_properties:
|
|
68
|
+
schema_dict["required"] = list(all_properties)
|
|
69
|
+
# Also ensure nested objects have additionalProperties: false
|
|
70
|
+
_ensure_additional_properties_false(schema_dict)
|
|
71
|
+
# Also ensure all nested schemas in $defs have required arrays
|
|
72
|
+
_ensure_required_for_all_properties(schema_dict)
|
|
73
|
+
return schema_dict, output_schema.__name__
|
|
74
|
+
else:
|
|
75
|
+
raise ValueError(
|
|
76
|
+
f"output_schema must be a Pydantic model class. Got: {type(output_schema)}"
|
|
77
|
+
)
|
|
78
|
+
except Exception as e:
|
|
79
|
+
raise ValueError(
|
|
80
|
+
f"Failed to convert output_schema to JSON schema"
|
|
81
|
+
f"{f' for {context_id}' if context_id else ''}: {e}"
|
|
82
|
+
) from e
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def _validate_output_schema_v2(model_class: type[Any], context_id: str = "") -> None:
|
|
86
|
+
"""Validate that all optional fields have default values (Pydantic v2)."""
|
|
87
|
+
if not hasattr(model_class, "model_fields"):
|
|
88
|
+
return
|
|
89
|
+
|
|
90
|
+
invalid_fields = []
|
|
91
|
+
for _field_name, field_info in model_class.model_fields.items():
|
|
92
|
+
# Check if field is Optional (Union[Type, None] or Type | None)
|
|
93
|
+
import types
|
|
94
|
+
import typing
|
|
95
|
+
|
|
96
|
+
field_type = field_info.annotation
|
|
97
|
+
is_optional = False
|
|
98
|
+
if hasattr(typing, "get_origin"):
|
|
99
|
+
origin = typing.get_origin(field_type)
|
|
100
|
+
# Handle both typing.Union and types.UnionType (for Python 3.10+ syntax like int | None)
|
|
101
|
+
if origin is typing.Union or origin is types.UnionType:
|
|
102
|
+
args = typing.get_args(field_type)
|
|
103
|
+
is_optional = type(None) in args
|
|
104
|
+
|
|
105
|
+
# Check if field has a default value
|
|
106
|
+
# In Pydantic v2, PydanticUndefined is the sentinel for "no default"
|
|
107
|
+
has_default = False
|
|
108
|
+
try:
|
|
109
|
+
from pydantic_core import PydanticUndefined
|
|
110
|
+
|
|
111
|
+
has_default = field_info.default is not PydanticUndefined
|
|
112
|
+
except ImportError:
|
|
113
|
+
# Fallback: PydanticUndefined might be in a different location
|
|
114
|
+
# Check if default is a sentinel value by comparing to known sentinels
|
|
115
|
+
default = field_info.default
|
|
116
|
+
# If default is None, it's an actual default value (not undefined)
|
|
117
|
+
# If default is ..., it's not a default
|
|
118
|
+
# Check if it's the PydanticUndefined sentinel by checking its type/name
|
|
119
|
+
if default is None:
|
|
120
|
+
has_default = True # None is a valid default
|
|
121
|
+
elif default is not ... and not (
|
|
122
|
+
isinstance(default, type)
|
|
123
|
+
and hasattr(default, "__name__")
|
|
124
|
+
and "Undefined" in default.__name__
|
|
125
|
+
):
|
|
126
|
+
has_default = True
|
|
127
|
+
|
|
128
|
+
# Also check default_factory
|
|
129
|
+
if not has_default:
|
|
130
|
+
has_default = field_info.default_factory is not None
|
|
131
|
+
|
|
132
|
+
# If field is Optional but doesn't have a default, it's invalid
|
|
133
|
+
if is_optional and not has_default:
|
|
134
|
+
invalid_fields.append(_field_name)
|
|
135
|
+
|
|
136
|
+
if invalid_fields:
|
|
137
|
+
fields_str = ", ".join(invalid_fields)
|
|
138
|
+
context_str = f" for {context_id}" if context_id else ""
|
|
139
|
+
error_msg = (
|
|
140
|
+
f"Invalid output_schema{context_str}: Optional fields must have "
|
|
141
|
+
f"default values. Fields without defaults: {fields_str}. "
|
|
142
|
+
f"Either add default values (e.g., Optional[str] = None) or "
|
|
143
|
+
f"make them required (remove Optional)."
|
|
144
|
+
)
|
|
145
|
+
raise ValueError(error_msg)
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def _validate_output_schema_v1(model_class: type[Any], context_id: str = "") -> None:
|
|
149
|
+
"""Validate that all optional fields have default values (Pydantic v1)."""
|
|
150
|
+
if not hasattr(model_class, "__fields__"):
|
|
151
|
+
return
|
|
152
|
+
|
|
153
|
+
invalid_fields = []
|
|
154
|
+
for _field_name, field_info in model_class.__fields__.items():
|
|
155
|
+
# Check if field is Optional
|
|
156
|
+
field_type = (
|
|
157
|
+
field_info.outer_type_ if hasattr(field_info, "outer_type_") else field_info.type_
|
|
158
|
+
)
|
|
159
|
+
import types
|
|
160
|
+
import typing
|
|
161
|
+
|
|
162
|
+
is_optional = False
|
|
163
|
+
if hasattr(typing, "get_origin"):
|
|
164
|
+
origin = typing.get_origin(field_type)
|
|
165
|
+
# Handle both typing.Union and types.UnionType (for Python 3.10+ syntax like int | None)
|
|
166
|
+
if origin is typing.Union or origin is types.UnionType:
|
|
167
|
+
args = typing.get_args(field_type)
|
|
168
|
+
is_optional = type(None) in args
|
|
169
|
+
elif hasattr(field_type, "__origin__") and field_type.__origin__ is typing.Union:
|
|
170
|
+
args = field_type.__args__
|
|
171
|
+
is_optional = type(None) in args
|
|
172
|
+
|
|
173
|
+
# Check if field has a default value
|
|
174
|
+
has_default = (
|
|
175
|
+
field_info.default is not ...
|
|
176
|
+
and field_info.default is not None
|
|
177
|
+
or hasattr(field_info, "default_factory")
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
# If field is Optional but doesn't have a default, it's invalid
|
|
181
|
+
if is_optional and not has_default:
|
|
182
|
+
invalid_fields.append(_field_name)
|
|
183
|
+
|
|
184
|
+
if invalid_fields:
|
|
185
|
+
fields_str = ", ".join(invalid_fields)
|
|
186
|
+
context_str = f" for {context_id}" if context_id else ""
|
|
187
|
+
error_msg = (
|
|
188
|
+
f"Invalid output_schema{context_str}: Optional fields must have "
|
|
189
|
+
f"default values. Fields without defaults: {fields_str}. "
|
|
190
|
+
f"Either add default values (e.g., Optional[str] = None) or "
|
|
191
|
+
f"make them required (remove Optional)."
|
|
192
|
+
)
|
|
193
|
+
raise ValueError(error_msg)
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
def _ensure_additional_properties_false(schema: dict[str, Any]) -> None:
|
|
197
|
+
"""Recursively ensure all object schemas have additionalProperties: false."""
|
|
198
|
+
if isinstance(schema, dict):
|
|
199
|
+
# If this is an object type, ensure additionalProperties: false
|
|
200
|
+
if schema.get("type") == "object":
|
|
201
|
+
schema["additionalProperties"] = False
|
|
202
|
+
# Recursively process properties
|
|
203
|
+
if "properties" in schema:
|
|
204
|
+
for prop_schema in schema["properties"].values():
|
|
205
|
+
_ensure_additional_properties_false(prop_schema)
|
|
206
|
+
# Recursively process items (for arrays)
|
|
207
|
+
if "items" in schema:
|
|
208
|
+
_ensure_additional_properties_false(schema["items"])
|
|
209
|
+
# Recursively process anyOf, oneOf, allOf
|
|
210
|
+
for key in ["anyOf", "oneOf", "allOf"]:
|
|
211
|
+
if key in schema:
|
|
212
|
+
for sub_schema in schema[key]:
|
|
213
|
+
_ensure_additional_properties_false(sub_schema)
|
|
214
|
+
# Recursively process $defs (definitions) - Pydantic v2 uses this for nested models
|
|
215
|
+
if "$defs" in schema:
|
|
216
|
+
for _def_name, def_schema in schema["$defs"].items():
|
|
217
|
+
_ensure_additional_properties_false(def_schema)
|
|
218
|
+
# Recursively process definitions (Pydantic v1 uses this)
|
|
219
|
+
if "definitions" in schema:
|
|
220
|
+
for _def_name, def_schema in schema["definitions"].items():
|
|
221
|
+
_ensure_additional_properties_false(def_schema)
|
|
222
|
+
# Handle $ref references - we can't modify them directly, but we ensure
|
|
223
|
+
# the referenced schema is processed
|
|
224
|
+
# Note: $ref schemas should be in $defs or definitions, which we already process above
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
def _ensure_required_for_all_properties(schema: dict[str, Any]) -> None:
|
|
228
|
+
"""Recursively ensure all object schemas have all properties in required array."""
|
|
229
|
+
if isinstance(schema, dict):
|
|
230
|
+
# If this is an object type with properties, ensure all properties are in required
|
|
231
|
+
if schema.get("type") == "object" and "properties" in schema:
|
|
232
|
+
all_properties = set(schema["properties"].keys())
|
|
233
|
+
current_required = set(schema.get("required", []))
|
|
234
|
+
# Add any missing properties to required
|
|
235
|
+
if all_properties != current_required:
|
|
236
|
+
schema["required"] = list(all_properties)
|
|
237
|
+
|
|
238
|
+
# Recursively process properties
|
|
239
|
+
if "properties" in schema:
|
|
240
|
+
for prop_schema in schema["properties"].values():
|
|
241
|
+
_ensure_required_for_all_properties(prop_schema)
|
|
242
|
+
# Recursively process items (for arrays)
|
|
243
|
+
if "items" in schema:
|
|
244
|
+
_ensure_required_for_all_properties(schema["items"])
|
|
245
|
+
# Recursively process anyOf, oneOf, allOf
|
|
246
|
+
for key in ["anyOf", "oneOf", "allOf"]:
|
|
247
|
+
if key in schema:
|
|
248
|
+
for sub_schema in schema[key]:
|
|
249
|
+
_ensure_required_for_all_properties(sub_schema)
|
|
250
|
+
# Recursively process $defs (definitions) - Pydantic v2 uses this for nested models
|
|
251
|
+
if "$defs" in schema:
|
|
252
|
+
for _def_name, def_schema in schema["$defs"].items():
|
|
253
|
+
_ensure_required_for_all_properties(def_schema)
|
|
254
|
+
# Recursively process definitions (Pydantic v1 uses this)
|
|
255
|
+
if "definitions" in schema:
|
|
256
|
+
for _def_name, def_schema in schema["definitions"].items():
|
|
257
|
+
_ensure_required_for_all_properties(def_schema)
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
def _inline_refs(schema: dict[str, Any]) -> None:
|
|
261
|
+
"""Inline $ref references by replacing them with the actual schema from $defs/definitions."""
|
|
262
|
+
if not isinstance(schema, dict):
|
|
263
|
+
return
|
|
264
|
+
|
|
265
|
+
# Collect all definitions first
|
|
266
|
+
defs = {}
|
|
267
|
+
if "$defs" in schema:
|
|
268
|
+
defs.update(schema["$defs"])
|
|
269
|
+
if "definitions" in schema:
|
|
270
|
+
defs.update(schema["definitions"])
|
|
271
|
+
|
|
272
|
+
# Recursively inline refs in the schema
|
|
273
|
+
_inline_refs_recursive(schema, defs)
|
|
274
|
+
|
|
275
|
+
|
|
276
|
+
def _inline_refs_recursive(obj: Any, defs: dict[str, Any]) -> None:
|
|
277
|
+
"""Recursively inline $ref references in a schema object."""
|
|
278
|
+
if isinstance(obj, dict):
|
|
279
|
+
# If this object has a $ref, inline it
|
|
280
|
+
if "$ref" in obj:
|
|
281
|
+
ref_path = obj["$ref"]
|
|
282
|
+
# Parse $ref path (e.g., "#/$defs/PersonName" or "#/definitions/PersonName")
|
|
283
|
+
if ref_path.startswith("#/$defs/") or ref_path.startswith("#/definitions/"):
|
|
284
|
+
ref_name = ref_path.split("/")[-1]
|
|
285
|
+
if ref_name in defs:
|
|
286
|
+
# Get the referenced schema
|
|
287
|
+
ref_schema = defs[ref_name]
|
|
288
|
+
# Deep copy the referenced schema
|
|
289
|
+
inlined = copy.deepcopy(ref_schema)
|
|
290
|
+
# Preserve any other keys from the original object (like description)
|
|
291
|
+
# But remove $ref since we're inlining
|
|
292
|
+
other_keys = {k: v for k, v in obj.items() if k != "$ref"}
|
|
293
|
+
# Merge: start with inlined schema, then add other keys
|
|
294
|
+
obj.clear()
|
|
295
|
+
obj.update(inlined)
|
|
296
|
+
# Merge other keys (description, etc.) - but be careful not to
|
|
297
|
+
# override schema properties
|
|
298
|
+
for key, value in other_keys.items():
|
|
299
|
+
if key not in obj or key in ["description", "title"]:
|
|
300
|
+
obj[key] = value
|
|
301
|
+
# Recursively process the inlined schema
|
|
302
|
+
_inline_refs_recursive(obj, defs)
|
|
303
|
+
return
|
|
304
|
+
|
|
305
|
+
# Recursively process all values
|
|
306
|
+
for key, value in obj.items():
|
|
307
|
+
if key not in ["$defs", "definitions"]: # Skip definitions themselves
|
|
308
|
+
_inline_refs_recursive(value, defs)
|
|
309
|
+
elif isinstance(obj, list):
|
|
310
|
+
for item in obj:
|
|
311
|
+
_inline_refs_recursive(item, defs)
|
polos/utils/retry.py
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
"""Retry utilities with exponential backoff."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
from collections.abc import Callable
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
async def retry_with_backoff(
|
|
9
|
+
func: Callable,
|
|
10
|
+
max_retries: int = 2,
|
|
11
|
+
base_delay: float = 1.0,
|
|
12
|
+
max_delay: float = 10.0,
|
|
13
|
+
*args,
|
|
14
|
+
**kwargs,
|
|
15
|
+
) -> Any:
|
|
16
|
+
"""
|
|
17
|
+
Retry a function with exponential backoff.
|
|
18
|
+
|
|
19
|
+
Args:
|
|
20
|
+
func: Async function to retry
|
|
21
|
+
max_retries: Maximum number of retries (default: 2)
|
|
22
|
+
base_delay: Base delay in seconds for exponential backoff (default: 1.0)
|
|
23
|
+
max_delay: Maximum delay in seconds (default: 10.0)
|
|
24
|
+
*args: Positional arguments to pass to func
|
|
25
|
+
**kwargs: Keyword arguments to pass to func
|
|
26
|
+
|
|
27
|
+
Returns:
|
|
28
|
+
Result from func
|
|
29
|
+
|
|
30
|
+
Raises:
|
|
31
|
+
Exception: If all retries are exhausted
|
|
32
|
+
"""
|
|
33
|
+
last_exception = None
|
|
34
|
+
for attempt in range(max_retries + 1):
|
|
35
|
+
try:
|
|
36
|
+
return await func(*args, **kwargs)
|
|
37
|
+
except Exception as e:
|
|
38
|
+
last_exception = e
|
|
39
|
+
if attempt <= max_retries:
|
|
40
|
+
# Calculate delay with exponential backoff
|
|
41
|
+
delay = min(base_delay * (2**attempt), max_delay)
|
|
42
|
+
await asyncio.sleep(delay)
|
|
43
|
+
else:
|
|
44
|
+
# All retries exhausted
|
|
45
|
+
raise last_exception from None
|
|
46
|
+
# Should never reach here, but just in case
|
|
47
|
+
raise last_exception from None
|