glaip-sdk 0.0.7__py3-none-any.whl → 0.6.5b6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- glaip_sdk/__init__.py +6 -3
- glaip_sdk/_version.py +12 -5
- glaip_sdk/agents/__init__.py +27 -0
- glaip_sdk/agents/base.py +1126 -0
- glaip_sdk/branding.py +79 -15
- glaip_sdk/cli/account_store.py +540 -0
- glaip_sdk/cli/agent_config.py +2 -6
- glaip_sdk/cli/auth.py +699 -0
- glaip_sdk/cli/commands/__init__.py +2 -2
- glaip_sdk/cli/commands/accounts.py +746 -0
- glaip_sdk/cli/commands/agents.py +503 -183
- glaip_sdk/cli/commands/common_config.py +101 -0
- glaip_sdk/cli/commands/configure.py +774 -137
- glaip_sdk/cli/commands/mcps.py +1124 -181
- glaip_sdk/cli/commands/models.py +25 -10
- glaip_sdk/cli/commands/tools.py +144 -92
- glaip_sdk/cli/commands/transcripts.py +755 -0
- glaip_sdk/cli/commands/update.py +61 -0
- glaip_sdk/cli/config.py +95 -0
- glaip_sdk/cli/constants.py +38 -0
- glaip_sdk/cli/context.py +150 -0
- glaip_sdk/cli/core/__init__.py +79 -0
- glaip_sdk/cli/core/context.py +124 -0
- glaip_sdk/cli/core/output.py +846 -0
- glaip_sdk/cli/core/prompting.py +649 -0
- glaip_sdk/cli/core/rendering.py +187 -0
- glaip_sdk/cli/display.py +143 -53
- glaip_sdk/cli/hints.py +57 -0
- glaip_sdk/cli/io.py +24 -18
- glaip_sdk/cli/main.py +420 -145
- glaip_sdk/cli/masking.py +136 -0
- glaip_sdk/cli/mcp_validators.py +287 -0
- glaip_sdk/cli/pager.py +266 -0
- glaip_sdk/cli/parsers/__init__.py +7 -0
- glaip_sdk/cli/parsers/json_input.py +177 -0
- glaip_sdk/cli/resolution.py +28 -21
- glaip_sdk/cli/rich_helpers.py +27 -0
- glaip_sdk/cli/slash/__init__.py +15 -0
- glaip_sdk/cli/slash/accounts_controller.py +500 -0
- glaip_sdk/cli/slash/accounts_shared.py +75 -0
- glaip_sdk/cli/slash/agent_session.py +282 -0
- glaip_sdk/cli/slash/prompt.py +245 -0
- glaip_sdk/cli/slash/remote_runs_controller.py +566 -0
- glaip_sdk/cli/slash/session.py +1679 -0
- glaip_sdk/cli/slash/tui/__init__.py +9 -0
- glaip_sdk/cli/slash/tui/accounts.tcss +86 -0
- glaip_sdk/cli/slash/tui/accounts_app.py +872 -0
- glaip_sdk/cli/slash/tui/background_tasks.py +72 -0
- glaip_sdk/cli/slash/tui/loading.py +58 -0
- glaip_sdk/cli/slash/tui/remote_runs_app.py +628 -0
- glaip_sdk/cli/transcript/__init__.py +31 -0
- glaip_sdk/cli/transcript/cache.py +536 -0
- glaip_sdk/cli/transcript/capture.py +329 -0
- glaip_sdk/cli/transcript/export.py +38 -0
- glaip_sdk/cli/transcript/history.py +815 -0
- glaip_sdk/cli/transcript/launcher.py +77 -0
- glaip_sdk/cli/transcript/viewer.py +372 -0
- glaip_sdk/cli/update_notifier.py +290 -0
- glaip_sdk/cli/utils.py +247 -1238
- glaip_sdk/cli/validators.py +16 -18
- glaip_sdk/client/__init__.py +2 -1
- glaip_sdk/client/_agent_payloads.py +520 -0
- glaip_sdk/client/agent_runs.py +147 -0
- glaip_sdk/client/agents.py +940 -574
- glaip_sdk/client/base.py +163 -48
- glaip_sdk/client/main.py +35 -12
- glaip_sdk/client/mcps.py +126 -18
- glaip_sdk/client/run_rendering.py +415 -0
- glaip_sdk/client/shared.py +21 -0
- glaip_sdk/client/tools.py +195 -37
- glaip_sdk/client/validators.py +20 -48
- glaip_sdk/config/constants.py +15 -5
- glaip_sdk/exceptions.py +16 -9
- glaip_sdk/icons.py +25 -0
- glaip_sdk/mcps/__init__.py +21 -0
- glaip_sdk/mcps/base.py +345 -0
- glaip_sdk/models/__init__.py +90 -0
- glaip_sdk/models/agent.py +47 -0
- glaip_sdk/models/agent_runs.py +116 -0
- glaip_sdk/models/common.py +42 -0
- glaip_sdk/models/mcp.py +33 -0
- glaip_sdk/models/tool.py +33 -0
- glaip_sdk/payload_schemas/__init__.py +7 -0
- glaip_sdk/payload_schemas/agent.py +85 -0
- glaip_sdk/registry/__init__.py +55 -0
- glaip_sdk/registry/agent.py +164 -0
- glaip_sdk/registry/base.py +139 -0
- glaip_sdk/registry/mcp.py +253 -0
- glaip_sdk/registry/tool.py +231 -0
- glaip_sdk/rich_components.py +98 -2
- glaip_sdk/runner/__init__.py +59 -0
- glaip_sdk/runner/base.py +84 -0
- glaip_sdk/runner/deps.py +115 -0
- glaip_sdk/runner/langgraph.py +597 -0
- glaip_sdk/runner/mcp_adapter/__init__.py +13 -0
- glaip_sdk/runner/mcp_adapter/base_mcp_adapter.py +43 -0
- glaip_sdk/runner/mcp_adapter/langchain_mcp_adapter.py +158 -0
- glaip_sdk/runner/mcp_adapter/mcp_config_builder.py +95 -0
- glaip_sdk/runner/tool_adapter/__init__.py +18 -0
- glaip_sdk/runner/tool_adapter/base_tool_adapter.py +44 -0
- glaip_sdk/runner/tool_adapter/langchain_tool_adapter.py +177 -0
- glaip_sdk/tools/__init__.py +22 -0
- glaip_sdk/tools/base.py +435 -0
- glaip_sdk/utils/__init__.py +59 -13
- glaip_sdk/utils/a2a/__init__.py +34 -0
- glaip_sdk/utils/a2a/event_processor.py +188 -0
- glaip_sdk/utils/agent_config.py +53 -40
- glaip_sdk/utils/bundler.py +267 -0
- glaip_sdk/utils/client.py +111 -0
- glaip_sdk/utils/client_utils.py +58 -26
- glaip_sdk/utils/datetime_helpers.py +58 -0
- glaip_sdk/utils/discovery.py +78 -0
- glaip_sdk/utils/display.py +65 -32
- glaip_sdk/utils/export.py +143 -0
- glaip_sdk/utils/general.py +1 -36
- glaip_sdk/utils/import_export.py +20 -25
- glaip_sdk/utils/import_resolver.py +492 -0
- glaip_sdk/utils/instructions.py +101 -0
- glaip_sdk/utils/rendering/__init__.py +115 -1
- glaip_sdk/utils/rendering/formatting.py +85 -43
- glaip_sdk/utils/rendering/layout/__init__.py +64 -0
- glaip_sdk/utils/rendering/{renderer → layout}/panels.py +51 -19
- glaip_sdk/utils/rendering/layout/progress.py +202 -0
- glaip_sdk/utils/rendering/layout/summary.py +74 -0
- glaip_sdk/utils/rendering/layout/transcript.py +606 -0
- glaip_sdk/utils/rendering/models.py +39 -7
- glaip_sdk/utils/rendering/renderer/__init__.py +9 -51
- glaip_sdk/utils/rendering/renderer/base.py +672 -759
- glaip_sdk/utils/rendering/renderer/config.py +4 -10
- glaip_sdk/utils/rendering/renderer/debug.py +75 -22
- glaip_sdk/utils/rendering/renderer/factory.py +138 -0
- glaip_sdk/utils/rendering/renderer/stream.py +13 -54
- glaip_sdk/utils/rendering/renderer/summary_window.py +79 -0
- glaip_sdk/utils/rendering/renderer/thinking.py +273 -0
- glaip_sdk/utils/rendering/renderer/toggle.py +182 -0
- glaip_sdk/utils/rendering/renderer/tool_panels.py +442 -0
- glaip_sdk/utils/rendering/renderer/transcript_mode.py +162 -0
- glaip_sdk/utils/rendering/state.py +204 -0
- glaip_sdk/utils/rendering/step_tree_state.py +100 -0
- glaip_sdk/utils/rendering/steps/__init__.py +34 -0
- glaip_sdk/utils/rendering/steps/event_processor.py +778 -0
- glaip_sdk/utils/rendering/steps/format.py +176 -0
- glaip_sdk/utils/rendering/steps/manager.py +387 -0
- glaip_sdk/utils/rendering/timing.py +36 -0
- glaip_sdk/utils/rendering/viewer/__init__.py +21 -0
- glaip_sdk/utils/rendering/viewer/presenter.py +184 -0
- glaip_sdk/utils/resource_refs.py +29 -26
- glaip_sdk/utils/runtime_config.py +422 -0
- glaip_sdk/utils/serialization.py +184 -51
- glaip_sdk/utils/sync.py +142 -0
- glaip_sdk/utils/tool_detection.py +33 -0
- glaip_sdk/utils/validation.py +21 -30
- {glaip_sdk-0.0.7.dist-info → glaip_sdk-0.6.5b6.dist-info}/METADATA +58 -12
- glaip_sdk-0.6.5b6.dist-info/RECORD +159 -0
- {glaip_sdk-0.0.7.dist-info → glaip_sdk-0.6.5b6.dist-info}/WHEEL +1 -1
- glaip_sdk/models.py +0 -250
- glaip_sdk/utils/rendering/renderer/progress.py +0 -118
- glaip_sdk/utils/rendering/steps.py +0 -232
- glaip_sdk/utils/rich_utils.py +0 -29
- glaip_sdk-0.0.7.dist-info/RECORD +0 -55
- {glaip_sdk-0.0.7.dist-info → glaip_sdk-0.6.5b6.dist-info}/entry_points.txt +0 -0
glaip_sdk/utils/serialization.py
CHANGED
|
@@ -7,13 +7,19 @@ Authors:
|
|
|
7
7
|
Raymond Christopher (raymond.christopher@gdplabs.id)
|
|
8
8
|
"""
|
|
9
9
|
|
|
10
|
+
import importlib
|
|
10
11
|
import json
|
|
11
|
-
from collections.abc import Iterable
|
|
12
|
+
from collections.abc import Callable, Iterable
|
|
12
13
|
from pathlib import Path
|
|
13
|
-
from typing import Any
|
|
14
|
+
from typing import TYPE_CHECKING, Any
|
|
14
15
|
|
|
15
16
|
import yaml
|
|
16
17
|
|
|
18
|
+
if TYPE_CHECKING: # pragma: no cover - type-only imports
|
|
19
|
+
from rich.console import Console
|
|
20
|
+
|
|
21
|
+
from glaip_sdk.models import MCP
|
|
22
|
+
|
|
17
23
|
|
|
18
24
|
def read_json(file_path: Path) -> dict[str, Any]:
|
|
19
25
|
"""Read data from JSON file.
|
|
@@ -61,20 +67,12 @@ def read_yaml(file_path: Path) -> dict[str, Any]:
|
|
|
61
67
|
data = yaml.safe_load(f)
|
|
62
68
|
|
|
63
69
|
# Handle instruction_lines array format for user-friendly YAML
|
|
64
|
-
if (
|
|
65
|
-
isinstance(data, dict)
|
|
66
|
-
and "instruction_lines" in data
|
|
67
|
-
and isinstance(data["instruction_lines"], list)
|
|
68
|
-
):
|
|
70
|
+
if isinstance(data, dict) and "instruction_lines" in data and isinstance(data["instruction_lines"], list):
|
|
69
71
|
data["instruction"] = "\n\n".join(data["instruction_lines"])
|
|
70
72
|
del data["instruction_lines"]
|
|
71
73
|
|
|
72
74
|
# Handle instruction as list from YAML export (convert back to string)
|
|
73
|
-
if (
|
|
74
|
-
isinstance(data, dict)
|
|
75
|
-
and "instruction" in data
|
|
76
|
-
and isinstance(data["instruction"], list)
|
|
77
|
-
):
|
|
75
|
+
if isinstance(data, dict) and "instruction" in data and isinstance(data["instruction"], list):
|
|
78
76
|
data["instruction"] = "\n\n".join(data["instruction"])
|
|
79
77
|
|
|
80
78
|
return data
|
|
@@ -90,11 +88,20 @@ def write_yaml(file_path: Path, data: dict[str, Any]) -> None:
|
|
|
90
88
|
|
|
91
89
|
# Custom YAML dumper for user-friendly instruction formatting
|
|
92
90
|
class LiteralString(str):
|
|
91
|
+
"""String subclass for YAML literal block scalar formatting."""
|
|
92
|
+
|
|
93
93
|
pass
|
|
94
94
|
|
|
95
|
-
def literal_string_representer(
|
|
96
|
-
|
|
97
|
-
|
|
95
|
+
def literal_string_representer(dumper: yaml.Dumper, data: "LiteralString") -> yaml.nodes.Node:
|
|
96
|
+
"""YAML representer for LiteralString to use literal block scalar style.
|
|
97
|
+
|
|
98
|
+
Args:
|
|
99
|
+
dumper: YAML dumper instance.
|
|
100
|
+
data: LiteralString instance to represent.
|
|
101
|
+
|
|
102
|
+
Returns:
|
|
103
|
+
YAML node with literal block scalar style for multiline strings.
|
|
104
|
+
"""
|
|
98
105
|
# Use literal block scalar (|) for multiline strings to preserve formatting
|
|
99
106
|
if "\n" in data:
|
|
100
107
|
return dumper.represent_scalar("tag:yaml.org,2002:str", data, style="|")
|
|
@@ -109,9 +116,7 @@ def write_yaml(file_path: Path, data: dict[str, Any]) -> None:
|
|
|
109
116
|
data["instruction"] = LiteralString(data["instruction"])
|
|
110
117
|
|
|
111
118
|
with open(file_path, "w", encoding="utf-8") as f:
|
|
112
|
-
yaml.dump(
|
|
113
|
-
data, f, default_flow_style=False, allow_unicode=True, sort_keys=False
|
|
114
|
-
)
|
|
119
|
+
yaml.dump(data, f, default_flow_style=False, allow_unicode=True, sort_keys=False)
|
|
115
120
|
|
|
116
121
|
|
|
117
122
|
def load_resource_from_file(file_path: Path) -> dict[str, Any]:
|
|
@@ -131,14 +136,10 @@ def load_resource_from_file(file_path: Path) -> dict[str, Any]:
|
|
|
131
136
|
elif file_path.suffix.lower() == ".json":
|
|
132
137
|
return read_json(file_path)
|
|
133
138
|
else:
|
|
134
|
-
raise ValueError(
|
|
135
|
-
f"Unsupported file format: {file_path.suffix}. Only JSON and YAML files are supported."
|
|
136
|
-
)
|
|
139
|
+
raise ValueError(f"Unsupported file format: {file_path.suffix}. Only JSON and YAML files are supported.")
|
|
137
140
|
|
|
138
141
|
|
|
139
|
-
def write_resource_export(
|
|
140
|
-
file_path: Path, data: dict[str, Any], format: str = "json"
|
|
141
|
-
) -> None:
|
|
142
|
+
def write_resource_export(file_path: Path, data: dict[str, Any], format: str = "json") -> None:
|
|
142
143
|
"""Write resource export data to file.
|
|
143
144
|
|
|
144
145
|
Args:
|
|
@@ -182,15 +183,9 @@ def collect_attributes_for_export(resource: Any) -> dict[str, Any]:
|
|
|
182
183
|
and callables are filtered out so the result only contains user-configurable
|
|
183
184
|
data.
|
|
184
185
|
"""
|
|
185
|
-
|
|
186
186
|
mapping = _coerce_resource_to_mapping(resource)
|
|
187
|
-
if
|
|
188
|
-
|
|
189
|
-
): # pragma: no cover - defensive fallback when attribute introspection fails
|
|
190
|
-
items = (
|
|
191
|
-
(name, _safe_getattr(resource, name))
|
|
192
|
-
for name in _iter_public_attribute_names(resource)
|
|
193
|
-
)
|
|
187
|
+
if mapping is None: # pragma: no cover - defensive fallback when attribute introspection fails
|
|
188
|
+
items = ((name, _safe_getattr(resource, name)) for name in _iter_public_attribute_names(resource))
|
|
194
189
|
else:
|
|
195
190
|
items = mapping.items()
|
|
196
191
|
|
|
@@ -198,12 +193,35 @@ def collect_attributes_for_export(resource: Any) -> dict[str, Any]:
|
|
|
198
193
|
for key, value in items:
|
|
199
194
|
if _should_include_attribute(key, value):
|
|
200
195
|
export[key] = value
|
|
196
|
+
|
|
197
|
+
# Post-process agent exports to clean up unwanted transformations
|
|
198
|
+
if hasattr(resource, "__class__") and resource.__class__.__name__ == "Agent":
|
|
199
|
+
export = _clean_agent_export_data(export)
|
|
200
|
+
|
|
201
201
|
return export
|
|
202
202
|
|
|
203
203
|
|
|
204
|
+
def _clean_agent_export_data(agent_data: dict[str, Any]) -> dict[str, Any]:
|
|
205
|
+
"""Clean up agent export data to remove unwanted transformations.
|
|
206
|
+
|
|
207
|
+
This function addresses the issue where the backend API transforms
|
|
208
|
+
the 'timeout' field into 'execution_timeout' in an 'agent_config' section
|
|
209
|
+
during export, which is not desired for clean agent configuration exports.
|
|
210
|
+
"""
|
|
211
|
+
cleaned = agent_data.copy()
|
|
212
|
+
|
|
213
|
+
# Remove execution_timeout from agent_config if it exists
|
|
214
|
+
if "agent_config" in cleaned and isinstance(cleaned["agent_config"], dict):
|
|
215
|
+
agent_config = cleaned["agent_config"]
|
|
216
|
+
if "execution_timeout" in agent_config:
|
|
217
|
+
# Move execution_timeout back to root level as timeout
|
|
218
|
+
cleaned["timeout"] = agent_config.pop("execution_timeout")
|
|
219
|
+
|
|
220
|
+
return cleaned
|
|
221
|
+
|
|
222
|
+
|
|
204
223
|
def _coerce_resource_to_mapping(resource: Any) -> dict[str, Any] | None:
|
|
205
224
|
"""Return a mapping representation of ``resource`` when possible."""
|
|
206
|
-
|
|
207
225
|
for attr in _PREFERRED_MAPPERS:
|
|
208
226
|
method = getattr(resource, attr, None)
|
|
209
227
|
if callable(method):
|
|
@@ -217,46 +235,75 @@ def _coerce_resource_to_mapping(resource: Any) -> dict[str, Any] | None:
|
|
|
217
235
|
if isinstance(resource, dict):
|
|
218
236
|
return resource
|
|
219
237
|
|
|
220
|
-
|
|
221
|
-
|
|
238
|
+
try:
|
|
239
|
+
if hasattr(resource, "__dict__"):
|
|
222
240
|
return dict(resource.__dict__)
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
): # pragma: no cover - pathological objects can still defeat coercion
|
|
226
|
-
return None
|
|
241
|
+
except Exception: # pragma: no cover - pathological objects can still defeat coercion
|
|
242
|
+
return None
|
|
227
243
|
|
|
228
244
|
return None
|
|
229
245
|
|
|
230
246
|
|
|
231
247
|
def _iter_public_attribute_names(resource: Any) -> Iterable[str]:
|
|
232
248
|
"""Yield attribute names we should inspect on ``resource``."""
|
|
233
|
-
|
|
234
249
|
seen: set[str] = set()
|
|
235
250
|
names: list[str] = []
|
|
236
251
|
|
|
237
252
|
def _collect(candidates: Iterable[str] | None) -> None:
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
253
|
+
"""Collect unique candidate attribute names.
|
|
254
|
+
|
|
255
|
+
Args:
|
|
256
|
+
candidates: Iterable of candidate attribute names.
|
|
257
|
+
"""
|
|
258
|
+
for candidate in candidates or ():
|
|
241
259
|
if candidate not in seen:
|
|
242
260
|
seen.add(candidate)
|
|
243
261
|
names.append(candidate)
|
|
244
262
|
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
)
|
|
250
|
-
|
|
263
|
+
# Collect from __dict__
|
|
264
|
+
_collect_from_dict(resource, _collect)
|
|
265
|
+
|
|
266
|
+
# Collect from __annotations__
|
|
267
|
+
_collect_from_annotations(resource, _collect)
|
|
268
|
+
|
|
269
|
+
# Collect from __slots__
|
|
251
270
|
_collect(getattr(resource, "__slots__", ()))
|
|
252
271
|
|
|
272
|
+
# Fallback to dir() if no names found
|
|
253
273
|
if not names:
|
|
254
|
-
|
|
274
|
+
_collect_from_dir(resource, _collect)
|
|
255
275
|
|
|
256
276
|
return iter(names)
|
|
257
277
|
|
|
258
278
|
|
|
279
|
+
def _collect_from_dict(resource: Any, collect_func: Callable[[Iterable[str]], None]) -> None:
|
|
280
|
+
"""Safely collect attribute names from __dict__."""
|
|
281
|
+
try:
|
|
282
|
+
if hasattr(resource, "__dict__"):
|
|
283
|
+
dict_keys = getattr(resource, "__dict__", {})
|
|
284
|
+
if dict_keys:
|
|
285
|
+
collect_func(dict_keys.keys())
|
|
286
|
+
except Exception: # pragma: no cover - defensive programming
|
|
287
|
+
pass
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
def _collect_from_annotations(resource: Any, collect_func: Callable[[Iterable[str]], None]) -> None:
|
|
291
|
+
"""Safely collect attribute names from __annotations__."""
|
|
292
|
+
annotations = getattr(resource, "__annotations__", {})
|
|
293
|
+
if annotations:
|
|
294
|
+
collect_func(annotations.keys())
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
def _collect_from_dir(resource: Any, collect_func: Callable[[Iterable[str]], None]) -> None:
|
|
298
|
+
"""Safely collect attribute names from dir()."""
|
|
299
|
+
try:
|
|
300
|
+
collect_func(name for name in dir(resource) if not name.startswith("__"))
|
|
301
|
+
except Exception: # pragma: no cover - defensive programming
|
|
302
|
+
pass
|
|
303
|
+
|
|
304
|
+
|
|
259
305
|
def _safe_getattr(resource: Any, name: str) -> Any:
|
|
306
|
+
"""Return getattr(resource, name) but swallow any exception and return None."""
|
|
260
307
|
try:
|
|
261
308
|
return getattr(resource, name)
|
|
262
309
|
except Exception:
|
|
@@ -264,6 +311,7 @@ def _safe_getattr(resource: Any, name: str) -> Any:
|
|
|
264
311
|
|
|
265
312
|
|
|
266
313
|
def _should_include_attribute(key: str, value: Any) -> bool:
|
|
314
|
+
"""Return True when an attribute should be serialized."""
|
|
267
315
|
if key in _EXCLUDED_ATTRS or key in _EXCLUDED_NAMES:
|
|
268
316
|
return False
|
|
269
317
|
if key.startswith("_"):
|
|
@@ -273,6 +321,91 @@ def _should_include_attribute(key: str, value: Any) -> bool:
|
|
|
273
321
|
return True
|
|
274
322
|
|
|
275
323
|
|
|
324
|
+
def strip_empty_fields(data: dict[str, Any]) -> dict[str, Any]:
|
|
325
|
+
"""Recursively remove None values and empty dictionaries from a dictionary.
|
|
326
|
+
|
|
327
|
+
Args:
|
|
328
|
+
data: Dictionary to clean
|
|
329
|
+
|
|
330
|
+
Returns:
|
|
331
|
+
Cleaned dictionary with None values and empty dicts removed
|
|
332
|
+
"""
|
|
333
|
+
if not isinstance(data, dict):
|
|
334
|
+
return data
|
|
335
|
+
|
|
336
|
+
cleaned = {}
|
|
337
|
+
for key, value in data.items():
|
|
338
|
+
if value is None:
|
|
339
|
+
continue
|
|
340
|
+
if isinstance(value, dict):
|
|
341
|
+
nested = strip_empty_fields(value)
|
|
342
|
+
if nested: # Only include non-empty dicts
|
|
343
|
+
cleaned[key] = nested
|
|
344
|
+
else:
|
|
345
|
+
cleaned[key] = value
|
|
346
|
+
|
|
347
|
+
return cleaned
|
|
348
|
+
|
|
349
|
+
|
|
350
|
+
def build_mcp_export_payload(
|
|
351
|
+
mcp: "MCP",
|
|
352
|
+
*,
|
|
353
|
+
prompt_for_secrets: bool,
|
|
354
|
+
placeholder: str,
|
|
355
|
+
console: "Console",
|
|
356
|
+
) -> dict[str, Any]:
|
|
357
|
+
"""Build MCP export payload with authentication secret handling.
|
|
358
|
+
|
|
359
|
+
This function prepares an MCP resource for export by:
|
|
360
|
+
1. Starting from model_dump(exclude_none=True) for API alignment
|
|
361
|
+
2. Cleaning internal fields (_client, empty metadata)
|
|
362
|
+
3. Processing authentication with secret capture/placeholder logic
|
|
363
|
+
4. Removing empty fields recursively
|
|
364
|
+
|
|
365
|
+
Args:
|
|
366
|
+
mcp: MCP model instance to export
|
|
367
|
+
prompt_for_secrets: Whether to interactively prompt for missing secrets
|
|
368
|
+
placeholder: Placeholder text for missing secrets
|
|
369
|
+
console: Rich Console instance for user interaction
|
|
370
|
+
|
|
371
|
+
Returns:
|
|
372
|
+
Dictionary ready for export (JSON/YAML serialization)
|
|
373
|
+
|
|
374
|
+
Raises:
|
|
375
|
+
ImportError: If required modules (auth helpers) are not available
|
|
376
|
+
"""
|
|
377
|
+
auth_module = importlib.import_module("glaip_sdk.cli.auth")
|
|
378
|
+
prepare_authentication_export = auth_module.prepare_authentication_export
|
|
379
|
+
|
|
380
|
+
# Start with model dump (excludes None values automatically)
|
|
381
|
+
payload = mcp.model_dump(exclude_none=True)
|
|
382
|
+
|
|
383
|
+
# Remove internal/CLI fields
|
|
384
|
+
payload.pop("_client", None)
|
|
385
|
+
|
|
386
|
+
# Remove empty metadata dict
|
|
387
|
+
if "metadata" in payload and not payload["metadata"]:
|
|
388
|
+
payload.pop("metadata")
|
|
389
|
+
|
|
390
|
+
# Process authentication section
|
|
391
|
+
if "authentication" in payload:
|
|
392
|
+
processed_auth = prepare_authentication_export(
|
|
393
|
+
payload["authentication"],
|
|
394
|
+
prompt_for_secrets=prompt_for_secrets,
|
|
395
|
+
placeholder=placeholder,
|
|
396
|
+
console=console,
|
|
397
|
+
)
|
|
398
|
+
if processed_auth:
|
|
399
|
+
payload["authentication"] = processed_auth
|
|
400
|
+
else:
|
|
401
|
+
payload.pop("authentication")
|
|
402
|
+
|
|
403
|
+
# Apply final cleanup to remove any remaining empty fields
|
|
404
|
+
payload = strip_empty_fields(payload)
|
|
405
|
+
|
|
406
|
+
return payload
|
|
407
|
+
|
|
408
|
+
|
|
276
409
|
def validate_json_string(json_str: str) -> dict[str, Any]:
|
|
277
410
|
"""Validate JSON string and return parsed data.
|
|
278
411
|
|
|
@@ -288,4 +421,4 @@ def validate_json_string(json_str: str) -> dict[str, Any]:
|
|
|
288
421
|
try:
|
|
289
422
|
return json.loads(json_str)
|
|
290
423
|
except json.JSONDecodeError as e:
|
|
291
|
-
raise ValueError(f"Invalid JSON: {e}")
|
|
424
|
+
raise ValueError(f"Invalid JSON: {e}") from e
|
glaip_sdk/utils/sync.py
ADDED
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
"""Agent and tool synchronization (create/update) operations.
|
|
2
|
+
|
|
3
|
+
This module provides convenience functions for tool classes that need bundling.
|
|
4
|
+
|
|
5
|
+
For direct upsert operations, use the client methods:
|
|
6
|
+
- client.agents.upsert_agent(identifier, **kwargs)
|
|
7
|
+
- client.tools.upsert_tool(identifier, code, **kwargs)
|
|
8
|
+
- client.mcps.upsert_mcp(identifier, **kwargs)
|
|
9
|
+
|
|
10
|
+
Authors:
|
|
11
|
+
Christian Trisno Sen Long Chen (christian.t.s.l.chen@gdplabs.id)
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
from __future__ import annotations
|
|
15
|
+
|
|
16
|
+
from typing import TYPE_CHECKING, Any
|
|
17
|
+
|
|
18
|
+
from glaip_sdk.utils.bundler import ToolBundler
|
|
19
|
+
from glaip_sdk.utils.import_resolver import load_class
|
|
20
|
+
from gllm_core.utils import LoggerManager
|
|
21
|
+
|
|
22
|
+
if TYPE_CHECKING:
|
|
23
|
+
from glaip_sdk.models import Agent, Tool
|
|
24
|
+
|
|
25
|
+
logger = LoggerManager().get_logger(__name__)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def _extract_tool_name(tool_class: Any) -> str:
|
|
29
|
+
"""Extract tool name from a class, handling Pydantic v2 models."""
|
|
30
|
+
# Direct attribute access (works for non-Pydantic classes)
|
|
31
|
+
if hasattr(tool_class, "name"):
|
|
32
|
+
name = getattr(tool_class, "name", None)
|
|
33
|
+
if isinstance(name, str):
|
|
34
|
+
return name
|
|
35
|
+
|
|
36
|
+
# Pydantic v2 model - check model_fields
|
|
37
|
+
if hasattr(tool_class, "model_fields"):
|
|
38
|
+
model_fields = getattr(tool_class, "model_fields", {})
|
|
39
|
+
if "name" in model_fields:
|
|
40
|
+
field_info = model_fields["name"]
|
|
41
|
+
if hasattr(field_info, "default") and isinstance(field_info.default, str):
|
|
42
|
+
return field_info.default
|
|
43
|
+
|
|
44
|
+
raise ValueError(f"Cannot extract name from tool class: {tool_class}")
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _extract_tool_description(tool_class: Any) -> str:
|
|
48
|
+
"""Extract tool description from a class, handling Pydantic v2 models."""
|
|
49
|
+
# Direct attribute access
|
|
50
|
+
if hasattr(tool_class, "description"):
|
|
51
|
+
desc = getattr(tool_class, "description", None)
|
|
52
|
+
if isinstance(desc, str):
|
|
53
|
+
return desc
|
|
54
|
+
|
|
55
|
+
# Pydantic v2 model - check model_fields
|
|
56
|
+
if hasattr(tool_class, "model_fields"):
|
|
57
|
+
model_fields = getattr(tool_class, "model_fields", {})
|
|
58
|
+
if "description" in model_fields:
|
|
59
|
+
field_info = model_fields["description"]
|
|
60
|
+
if hasattr(field_info, "default") and isinstance(field_info.default, str):
|
|
61
|
+
return field_info.default
|
|
62
|
+
|
|
63
|
+
return ""
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def update_or_create_tool(tool_ref: Any) -> Tool:
|
|
67
|
+
"""Create or update a tool from a tool class with bundled source code.
|
|
68
|
+
|
|
69
|
+
This function takes a tool class (LangChain BaseTool), bundles its source
|
|
70
|
+
code with inlined imports, and creates/updates it in the backend.
|
|
71
|
+
|
|
72
|
+
Args:
|
|
73
|
+
tool_ref: A tool class (LangChain BaseTool subclass) or import path string.
|
|
74
|
+
|
|
75
|
+
Returns:
|
|
76
|
+
The created or updated tool.
|
|
77
|
+
|
|
78
|
+
Example:
|
|
79
|
+
>>> from glaip_sdk.utils.sync import update_or_create_tool
|
|
80
|
+
>>> from my_tools import WeatherAPITool
|
|
81
|
+
>>> tool = update_or_create_tool(WeatherAPITool)
|
|
82
|
+
"""
|
|
83
|
+
from glaip_sdk.utils.client import get_client # noqa: PLC0415
|
|
84
|
+
|
|
85
|
+
client = get_client()
|
|
86
|
+
|
|
87
|
+
# Handle string import path
|
|
88
|
+
if isinstance(tool_ref, str):
|
|
89
|
+
tool_class = load_class(tool_ref)
|
|
90
|
+
else:
|
|
91
|
+
tool_class = tool_ref
|
|
92
|
+
|
|
93
|
+
# Get tool info - handle Pydantic v2 model classes
|
|
94
|
+
tool_name = _extract_tool_name(tool_class)
|
|
95
|
+
tool_description = _extract_tool_description(tool_class)
|
|
96
|
+
|
|
97
|
+
# Bundle source code
|
|
98
|
+
bundler = ToolBundler(tool_class)
|
|
99
|
+
bundled_source = bundler.bundle()
|
|
100
|
+
|
|
101
|
+
logger.info("Tool info: name='%s', description='%s...'", tool_name, tool_description[:50])
|
|
102
|
+
logger.info("Bundled source code: %d characters", len(bundled_source))
|
|
103
|
+
|
|
104
|
+
# Use client's upsert method
|
|
105
|
+
return client.tools.upsert_tool(
|
|
106
|
+
tool_name,
|
|
107
|
+
code=bundled_source,
|
|
108
|
+
description=tool_description,
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def update_or_create_agent(agent_config: dict[str, Any]) -> Agent:
|
|
113
|
+
"""Create or update an agent from configuration.
|
|
114
|
+
|
|
115
|
+
Args:
|
|
116
|
+
agent_config: Agent configuration dictionary containing:
|
|
117
|
+
- name (str): Agent name (required)
|
|
118
|
+
- description (str): Agent description
|
|
119
|
+
- instruction (str): Agent instruction
|
|
120
|
+
- tools (list, optional): List of tool IDs
|
|
121
|
+
- agents (list, optional): List of sub-agent IDs
|
|
122
|
+
- metadata (dict, optional): Additional metadata
|
|
123
|
+
|
|
124
|
+
Returns:
|
|
125
|
+
The created or updated agent.
|
|
126
|
+
|
|
127
|
+
Example:
|
|
128
|
+
>>> from glaip_sdk.utils.sync import update_or_create_agent
|
|
129
|
+
>>> config = {
|
|
130
|
+
... "name": "weather_reporter",
|
|
131
|
+
... "description": "Weather reporting agent",
|
|
132
|
+
... "instruction": "You are a weather reporter.",
|
|
133
|
+
... }
|
|
134
|
+
>>> agent = update_or_create_agent(config)
|
|
135
|
+
"""
|
|
136
|
+
from glaip_sdk.utils.client import get_client # noqa: PLC0415
|
|
137
|
+
|
|
138
|
+
client = get_client()
|
|
139
|
+
agent_name = agent_config.pop("name")
|
|
140
|
+
|
|
141
|
+
# Use client's upsert method
|
|
142
|
+
return client.agents.upsert_agent(agent_name, **agent_config)
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
"""Shared utilities for tool type detection.
|
|
2
|
+
|
|
3
|
+
Authors:
|
|
4
|
+
Christian Trisno Sen Long Chen (christian.t.s.l.chen@gdplabs.id)
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def is_langchain_tool(ref: Any) -> bool:
|
|
11
|
+
"""Check if ref is a LangChain BaseTool class or instance.
|
|
12
|
+
|
|
13
|
+
Shared by:
|
|
14
|
+
- ToolRegistry._is_custom_tool() (for upload detection)
|
|
15
|
+
- LangChainToolAdapter._is_langchain_tool() (for adaptation)
|
|
16
|
+
|
|
17
|
+
Args:
|
|
18
|
+
ref: Object to check.
|
|
19
|
+
|
|
20
|
+
Returns:
|
|
21
|
+
True if ref is a LangChain BaseTool class or instance.
|
|
22
|
+
"""
|
|
23
|
+
try:
|
|
24
|
+
from langchain_core.tools import BaseTool # noqa: PLC0415
|
|
25
|
+
|
|
26
|
+
if isinstance(ref, type) and issubclass(ref, BaseTool):
|
|
27
|
+
return True
|
|
28
|
+
if isinstance(ref, BaseTool):
|
|
29
|
+
return True
|
|
30
|
+
except ImportError:
|
|
31
|
+
pass
|
|
32
|
+
|
|
33
|
+
return False
|
glaip_sdk/utils/validation.py
CHANGED
|
@@ -11,12 +11,23 @@ import re
|
|
|
11
11
|
from pathlib import Path
|
|
12
12
|
from typing import Any
|
|
13
13
|
|
|
14
|
+
from glaip_sdk.config.constants import DEFAULT_AGENT_RUN_TIMEOUT
|
|
14
15
|
from glaip_sdk.utils.resource_refs import validate_name_format
|
|
15
16
|
|
|
16
17
|
# Constants for validation
|
|
17
18
|
RESERVED_NAMES = ["admin", "root", "system", "api", "test", "demo"]
|
|
18
19
|
|
|
19
20
|
|
|
21
|
+
def _validate_named_resource(name: str, resource_type: str) -> str:
|
|
22
|
+
"""Shared validator that prevents reserved-name duplication."""
|
|
23
|
+
cleaned_name = validate_name_format(name, resource_type)
|
|
24
|
+
|
|
25
|
+
if cleaned_name.lower() in RESERVED_NAMES:
|
|
26
|
+
raise ValueError(f"{resource_type.capitalize()} name '{cleaned_name}' is reserved and cannot be used")
|
|
27
|
+
|
|
28
|
+
return cleaned_name
|
|
29
|
+
|
|
30
|
+
|
|
20
31
|
def validate_agent_name(name: str) -> str:
|
|
21
32
|
"""Validate agent name and return cleaned version.
|
|
22
33
|
|
|
@@ -29,13 +40,7 @@ def validate_agent_name(name: str) -> str:
|
|
|
29
40
|
Raises:
|
|
30
41
|
ValueError: If name is invalid
|
|
31
42
|
"""
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
# Check for reserved names
|
|
35
|
-
if cleaned_name.lower() in RESERVED_NAMES:
|
|
36
|
-
raise ValueError(f"'{cleaned_name}' is a reserved name and cannot be used")
|
|
37
|
-
|
|
38
|
-
return cleaned_name
|
|
43
|
+
return _validate_named_resource(name, "agent")
|
|
39
44
|
|
|
40
45
|
|
|
41
46
|
def validate_agent_instruction(instruction: str) -> str:
|
|
@@ -73,13 +78,7 @@ def validate_tool_name(name: str) -> str:
|
|
|
73
78
|
Raises:
|
|
74
79
|
ValueError: If name is invalid
|
|
75
80
|
"""
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
# Check for reserved names
|
|
79
|
-
if cleaned_name.lower() in RESERVED_NAMES:
|
|
80
|
-
raise ValueError(f"'{cleaned_name}' is a reserved name and cannot be used")
|
|
81
|
-
|
|
82
|
-
return cleaned_name
|
|
81
|
+
return _validate_named_resource(name, "tool")
|
|
83
82
|
|
|
84
83
|
|
|
85
84
|
def validate_mcp_name(name: str) -> str:
|
|
@@ -94,13 +93,7 @@ def validate_mcp_name(name: str) -> str:
|
|
|
94
93
|
Raises:
|
|
95
94
|
ValueError: If name is invalid
|
|
96
95
|
"""
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
# Check for reserved names
|
|
100
|
-
if cleaned_name.lower() in RESERVED_NAMES:
|
|
101
|
-
raise ValueError(f"'{cleaned_name}' is a reserved name and cannot be used")
|
|
102
|
-
|
|
103
|
-
return cleaned_name
|
|
96
|
+
return _validate_named_resource(name, "mcp")
|
|
104
97
|
|
|
105
98
|
|
|
106
99
|
def validate_timeout(timeout: int) -> int:
|
|
@@ -142,8 +135,6 @@ def coerce_timeout(value: Any) -> int:
|
|
|
142
135
|
coerce_timeout("300") -> 300
|
|
143
136
|
coerce_timeout(None) -> 300 # Uses DEFAULT_AGENT_RUN_TIMEOUT
|
|
144
137
|
"""
|
|
145
|
-
from glaip_sdk.config.constants import DEFAULT_AGENT_RUN_TIMEOUT
|
|
146
|
-
|
|
147
138
|
if value is None:
|
|
148
139
|
return DEFAULT_AGENT_RUN_TIMEOUT
|
|
149
140
|
elif isinstance(value, int):
|
|
@@ -156,13 +147,13 @@ def coerce_timeout(value: Any) -> int:
|
|
|
156
147
|
try:
|
|
157
148
|
fval = float(value)
|
|
158
149
|
return validate_timeout(int(fval))
|
|
159
|
-
except ValueError:
|
|
160
|
-
raise ValueError(f"Invalid timeout value: {value}")
|
|
150
|
+
except ValueError as err:
|
|
151
|
+
raise ValueError(f"Invalid timeout value: {value}") from err
|
|
161
152
|
else:
|
|
162
153
|
try:
|
|
163
154
|
return validate_timeout(int(value))
|
|
164
|
-
except (TypeError, ValueError):
|
|
165
|
-
raise ValueError(f"Invalid timeout value: {value}")
|
|
155
|
+
except (TypeError, ValueError) as err:
|
|
156
|
+
raise ValueError(f"Invalid timeout value: {value}") from err
|
|
166
157
|
|
|
167
158
|
|
|
168
159
|
def validate_file_path(file_path: str | Path, must_exist: bool = True) -> Path:
|
|
@@ -214,7 +205,7 @@ def validate_directory_path(dir_path: str | Path, must_exist: bool = True) -> Pa
|
|
|
214
205
|
|
|
215
206
|
|
|
216
207
|
def validate_url(url: str) -> str:
|
|
217
|
-
"""Validate URL format.
|
|
208
|
+
"""Validate URL format (HTTPS only).
|
|
218
209
|
|
|
219
210
|
Args:
|
|
220
211
|
url: URL to validate
|
|
@@ -226,7 +217,7 @@ def validate_url(url: str) -> str:
|
|
|
226
217
|
ValueError: If URL is invalid
|
|
227
218
|
"""
|
|
228
219
|
url_pattern = re.compile(
|
|
229
|
-
r"^https
|
|
220
|
+
r"^https://" # https:// only
|
|
230
221
|
r"(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?|" # domain...
|
|
231
222
|
r"localhost|" # localhost...
|
|
232
223
|
r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})" # ...or ip
|
|
@@ -236,7 +227,7 @@ def validate_url(url: str) -> str:
|
|
|
236
227
|
)
|
|
237
228
|
|
|
238
229
|
if not url_pattern.match(url):
|
|
239
|
-
raise ValueError(
|
|
230
|
+
raise ValueError("API URL must start with https:// and be a valid host.")
|
|
240
231
|
|
|
241
232
|
return url
|
|
242
233
|
|