glaip-sdk 0.6.12__py3-none-any.whl → 0.6.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (156) hide show
  1. glaip_sdk/__init__.py +42 -5
  2. {glaip_sdk-0.6.12.dist-info → glaip_sdk-0.6.14.dist-info}/METADATA +31 -37
  3. glaip_sdk-0.6.14.dist-info/RECORD +12 -0
  4. {glaip_sdk-0.6.12.dist-info → glaip_sdk-0.6.14.dist-info}/WHEEL +2 -1
  5. glaip_sdk-0.6.14.dist-info/entry_points.txt +2 -0
  6. glaip_sdk-0.6.14.dist-info/top_level.txt +1 -0
  7. glaip_sdk/agents/__init__.py +0 -27
  8. glaip_sdk/agents/base.py +0 -1191
  9. glaip_sdk/cli/__init__.py +0 -9
  10. glaip_sdk/cli/account_store.py +0 -540
  11. glaip_sdk/cli/agent_config.py +0 -78
  12. glaip_sdk/cli/auth.py +0 -699
  13. glaip_sdk/cli/commands/__init__.py +0 -5
  14. glaip_sdk/cli/commands/accounts.py +0 -746
  15. glaip_sdk/cli/commands/agents.py +0 -1509
  16. glaip_sdk/cli/commands/common_config.py +0 -101
  17. glaip_sdk/cli/commands/configure.py +0 -896
  18. glaip_sdk/cli/commands/mcps.py +0 -1356
  19. glaip_sdk/cli/commands/models.py +0 -69
  20. glaip_sdk/cli/commands/tools.py +0 -576
  21. glaip_sdk/cli/commands/transcripts.py +0 -755
  22. glaip_sdk/cli/commands/update.py +0 -61
  23. glaip_sdk/cli/config.py +0 -95
  24. glaip_sdk/cli/constants.py +0 -38
  25. glaip_sdk/cli/context.py +0 -150
  26. glaip_sdk/cli/core/__init__.py +0 -79
  27. glaip_sdk/cli/core/context.py +0 -124
  28. glaip_sdk/cli/core/output.py +0 -846
  29. glaip_sdk/cli/core/prompting.py +0 -649
  30. glaip_sdk/cli/core/rendering.py +0 -187
  31. glaip_sdk/cli/display.py +0 -355
  32. glaip_sdk/cli/hints.py +0 -57
  33. glaip_sdk/cli/io.py +0 -112
  34. glaip_sdk/cli/main.py +0 -604
  35. glaip_sdk/cli/masking.py +0 -136
  36. glaip_sdk/cli/mcp_validators.py +0 -287
  37. glaip_sdk/cli/pager.py +0 -266
  38. glaip_sdk/cli/parsers/__init__.py +0 -7
  39. glaip_sdk/cli/parsers/json_input.py +0 -177
  40. glaip_sdk/cli/resolution.py +0 -67
  41. glaip_sdk/cli/rich_helpers.py +0 -27
  42. glaip_sdk/cli/slash/__init__.py +0 -15
  43. glaip_sdk/cli/slash/accounts_controller.py +0 -578
  44. glaip_sdk/cli/slash/accounts_shared.py +0 -75
  45. glaip_sdk/cli/slash/agent_session.py +0 -285
  46. glaip_sdk/cli/slash/prompt.py +0 -256
  47. glaip_sdk/cli/slash/remote_runs_controller.py +0 -566
  48. glaip_sdk/cli/slash/session.py +0 -1708
  49. glaip_sdk/cli/slash/tui/__init__.py +0 -9
  50. glaip_sdk/cli/slash/tui/accounts_app.py +0 -876
  51. glaip_sdk/cli/slash/tui/background_tasks.py +0 -72
  52. glaip_sdk/cli/slash/tui/loading.py +0 -58
  53. glaip_sdk/cli/slash/tui/remote_runs_app.py +0 -628
  54. glaip_sdk/cli/transcript/__init__.py +0 -31
  55. glaip_sdk/cli/transcript/cache.py +0 -536
  56. glaip_sdk/cli/transcript/capture.py +0 -329
  57. glaip_sdk/cli/transcript/export.py +0 -38
  58. glaip_sdk/cli/transcript/history.py +0 -815
  59. glaip_sdk/cli/transcript/launcher.py +0 -77
  60. glaip_sdk/cli/transcript/viewer.py +0 -374
  61. glaip_sdk/cli/update_notifier.py +0 -290
  62. glaip_sdk/cli/utils.py +0 -263
  63. glaip_sdk/cli/validators.py +0 -238
  64. glaip_sdk/client/__init__.py +0 -11
  65. glaip_sdk/client/_agent_payloads.py +0 -520
  66. glaip_sdk/client/agent_runs.py +0 -147
  67. glaip_sdk/client/agents.py +0 -1335
  68. glaip_sdk/client/base.py +0 -502
  69. glaip_sdk/client/main.py +0 -249
  70. glaip_sdk/client/mcps.py +0 -370
  71. glaip_sdk/client/run_rendering.py +0 -700
  72. glaip_sdk/client/shared.py +0 -21
  73. glaip_sdk/client/tools.py +0 -661
  74. glaip_sdk/client/validators.py +0 -198
  75. glaip_sdk/config/constants.py +0 -52
  76. glaip_sdk/mcps/__init__.py +0 -21
  77. glaip_sdk/mcps/base.py +0 -345
  78. glaip_sdk/models/__init__.py +0 -90
  79. glaip_sdk/models/agent.py +0 -47
  80. glaip_sdk/models/agent_runs.py +0 -116
  81. glaip_sdk/models/common.py +0 -42
  82. glaip_sdk/models/mcp.py +0 -33
  83. glaip_sdk/models/tool.py +0 -33
  84. glaip_sdk/payload_schemas/__init__.py +0 -7
  85. glaip_sdk/payload_schemas/agent.py +0 -85
  86. glaip_sdk/registry/__init__.py +0 -55
  87. glaip_sdk/registry/agent.py +0 -164
  88. glaip_sdk/registry/base.py +0 -139
  89. glaip_sdk/registry/mcp.py +0 -253
  90. glaip_sdk/registry/tool.py +0 -232
  91. glaip_sdk/runner/__init__.py +0 -59
  92. glaip_sdk/runner/base.py +0 -84
  93. glaip_sdk/runner/deps.py +0 -115
  94. glaip_sdk/runner/langgraph.py +0 -782
  95. glaip_sdk/runner/mcp_adapter/__init__.py +0 -13
  96. glaip_sdk/runner/mcp_adapter/base_mcp_adapter.py +0 -43
  97. glaip_sdk/runner/mcp_adapter/langchain_mcp_adapter.py +0 -257
  98. glaip_sdk/runner/mcp_adapter/mcp_config_builder.py +0 -95
  99. glaip_sdk/runner/tool_adapter/__init__.py +0 -18
  100. glaip_sdk/runner/tool_adapter/base_tool_adapter.py +0 -44
  101. glaip_sdk/runner/tool_adapter/langchain_tool_adapter.py +0 -219
  102. glaip_sdk/tools/__init__.py +0 -22
  103. glaip_sdk/tools/base.py +0 -435
  104. glaip_sdk/utils/__init__.py +0 -86
  105. glaip_sdk/utils/a2a/__init__.py +0 -34
  106. glaip_sdk/utils/a2a/event_processor.py +0 -188
  107. glaip_sdk/utils/agent_config.py +0 -194
  108. glaip_sdk/utils/bundler.py +0 -267
  109. glaip_sdk/utils/client.py +0 -111
  110. glaip_sdk/utils/client_utils.py +0 -486
  111. glaip_sdk/utils/datetime_helpers.py +0 -58
  112. glaip_sdk/utils/discovery.py +0 -78
  113. glaip_sdk/utils/display.py +0 -135
  114. glaip_sdk/utils/export.py +0 -143
  115. glaip_sdk/utils/general.py +0 -61
  116. glaip_sdk/utils/import_export.py +0 -168
  117. glaip_sdk/utils/import_resolver.py +0 -492
  118. glaip_sdk/utils/instructions.py +0 -101
  119. glaip_sdk/utils/rendering/__init__.py +0 -115
  120. glaip_sdk/utils/rendering/formatting.py +0 -264
  121. glaip_sdk/utils/rendering/layout/__init__.py +0 -64
  122. glaip_sdk/utils/rendering/layout/panels.py +0 -156
  123. glaip_sdk/utils/rendering/layout/progress.py +0 -202
  124. glaip_sdk/utils/rendering/layout/summary.py +0 -74
  125. glaip_sdk/utils/rendering/layout/transcript.py +0 -606
  126. glaip_sdk/utils/rendering/models.py +0 -85
  127. glaip_sdk/utils/rendering/renderer/__init__.py +0 -55
  128. glaip_sdk/utils/rendering/renderer/base.py +0 -1024
  129. glaip_sdk/utils/rendering/renderer/config.py +0 -27
  130. glaip_sdk/utils/rendering/renderer/console.py +0 -55
  131. glaip_sdk/utils/rendering/renderer/debug.py +0 -178
  132. glaip_sdk/utils/rendering/renderer/factory.py +0 -138
  133. glaip_sdk/utils/rendering/renderer/stream.py +0 -202
  134. glaip_sdk/utils/rendering/renderer/summary_window.py +0 -79
  135. glaip_sdk/utils/rendering/renderer/thinking.py +0 -273
  136. glaip_sdk/utils/rendering/renderer/toggle.py +0 -182
  137. glaip_sdk/utils/rendering/renderer/tool_panels.py +0 -442
  138. glaip_sdk/utils/rendering/renderer/transcript_mode.py +0 -162
  139. glaip_sdk/utils/rendering/state.py +0 -204
  140. glaip_sdk/utils/rendering/step_tree_state.py +0 -100
  141. glaip_sdk/utils/rendering/steps/__init__.py +0 -34
  142. glaip_sdk/utils/rendering/steps/event_processor.py +0 -778
  143. glaip_sdk/utils/rendering/steps/format.py +0 -176
  144. glaip_sdk/utils/rendering/steps/manager.py +0 -387
  145. glaip_sdk/utils/rendering/timing.py +0 -36
  146. glaip_sdk/utils/rendering/viewer/__init__.py +0 -21
  147. glaip_sdk/utils/rendering/viewer/presenter.py +0 -184
  148. glaip_sdk/utils/resource_refs.py +0 -195
  149. glaip_sdk/utils/run_renderer.py +0 -41
  150. glaip_sdk/utils/runtime_config.py +0 -425
  151. glaip_sdk/utils/serialization.py +0 -424
  152. glaip_sdk/utils/sync.py +0 -142
  153. glaip_sdk/utils/tool_detection.py +0 -33
  154. glaip_sdk/utils/validation.py +0 -264
  155. glaip_sdk-0.6.12.dist-info/RECORD +0 -159
  156. glaip_sdk-0.6.12.dist-info/entry_points.txt +0 -3
@@ -1,424 +0,0 @@
1
- """Serialization utilities for JSON/YAML read/write and resource attribute collection.
2
-
3
- This module provides pure functions for file I/O operations and data serialization
4
- that can be used by both CLI and SDK layers without coupling to Click or Rich.
5
-
6
- Authors:
7
- Raymond Christopher (raymond.christopher@gdplabs.id)
8
- """
9
-
10
- import importlib
11
- import json
12
- from collections.abc import Callable, Iterable
13
- from pathlib import Path
14
- from typing import TYPE_CHECKING, Any
15
-
16
- import yaml
17
-
18
- if TYPE_CHECKING: # pragma: no cover - type-only imports
19
- from rich.console import Console
20
-
21
- from glaip_sdk.models import MCP
22
-
23
-
24
- def read_json(file_path: Path) -> dict[str, Any]:
25
- """Read data from JSON file.
26
-
27
- Args:
28
- file_path: Path to JSON file
29
-
30
- Returns:
31
- Parsed JSON data as dictionary
32
-
33
- Raises:
34
- FileNotFoundError: If file doesn't exist
35
- ValueError: If file is not valid JSON
36
- """
37
- with open(file_path, encoding="utf-8") as f:
38
- return json.load(f)
39
-
40
-
41
- def write_json(file_path: Path, data: dict[str, Any], indent: int = 2) -> None:
42
- """Write data to JSON file.
43
-
44
- Args:
45
- file_path: Path to write JSON file
46
- data: Data to write
47
- indent: JSON indentation level (default: 2)
48
- """
49
- with open(file_path, "w", encoding="utf-8") as f:
50
- json.dump(data, f, indent=indent, default=str)
51
-
52
-
53
- def read_yaml(file_path: Path) -> dict[str, Any]:
54
- """Read data from YAML file.
55
-
56
- Args:
57
- file_path: Path to YAML file
58
-
59
- Returns:
60
- Parsed YAML data as dictionary
61
-
62
- Raises:
63
- FileNotFoundError: If file doesn't exist
64
- ValueError: If file is not valid YAML
65
- """
66
- with open(file_path, encoding="utf-8") as f:
67
- data = yaml.safe_load(f)
68
-
69
- # Handle instruction_lines array format for user-friendly YAML
70
- if isinstance(data, dict) and "instruction_lines" in data and isinstance(data["instruction_lines"], list):
71
- data["instruction"] = "\n\n".join(data["instruction_lines"])
72
- del data["instruction_lines"]
73
-
74
- # Handle instruction as list from YAML export (convert back to string)
75
- if isinstance(data, dict) and "instruction" in data and isinstance(data["instruction"], list):
76
- data["instruction"] = "\n\n".join(data["instruction"])
77
-
78
- return data
79
-
80
-
81
- def write_yaml(file_path: Path, data: dict[str, Any]) -> None:
82
- """Write data to YAML file with user-friendly formatting.
83
-
84
- Args:
85
- file_path: Path to write YAML file
86
- data: Data to write
87
- """
88
-
89
- # Custom YAML dumper for user-friendly instruction formatting
90
- class LiteralString(str):
91
- """String subclass for YAML literal block scalar formatting."""
92
-
93
- pass
94
-
95
- def literal_string_representer(dumper: yaml.Dumper, data: "LiteralString") -> yaml.nodes.Node:
96
- """YAML representer for LiteralString to use literal block scalar style.
97
-
98
- Args:
99
- dumper: YAML dumper instance.
100
- data: LiteralString instance to represent.
101
-
102
- Returns:
103
- YAML node with literal block scalar style for multiline strings.
104
- """
105
- # Use literal block scalar (|) for multiline strings to preserve formatting
106
- if "\n" in data:
107
- return dumper.represent_scalar("tag:yaml.org,2002:str", data, style="|")
108
- return dumper.represent_scalar("tag:yaml.org,2002:str", data)
109
-
110
- # Add custom representer to the YAML dumper
111
- yaml.add_representer(LiteralString, literal_string_representer)
112
-
113
- # Convert instruction to LiteralString for proper formatting
114
- if isinstance(data, dict) and "instruction" in data and data["instruction"]:
115
- data = data.copy() # Don't modify original
116
- data["instruction"] = LiteralString(data["instruction"])
117
-
118
- with open(file_path, "w", encoding="utf-8") as f:
119
- yaml.dump(data, f, default_flow_style=False, allow_unicode=True, sort_keys=False)
120
-
121
-
122
- def load_resource_from_file(file_path: Path) -> dict[str, Any]:
123
- """Load resource data from JSON or YAML file.
124
-
125
- Args:
126
- file_path: Path to the file
127
-
128
- Returns:
129
- Dictionary with resource data
130
-
131
- Raises:
132
- ValueError: If file format is not supported
133
- """
134
- if file_path.suffix.lower() in [".yaml", ".yml"]:
135
- return read_yaml(file_path)
136
- elif file_path.suffix.lower() == ".json":
137
- return read_json(file_path)
138
- else:
139
- raise ValueError(f"Unsupported file format: {file_path.suffix}. Only JSON and YAML files are supported.")
140
-
141
-
142
- def write_resource_export(file_path: Path, data: dict[str, Any], format: str = "json") -> None:
143
- """Write resource export data to file.
144
-
145
- Args:
146
- file_path: Path to export file
147
- data: Resource data to export
148
- format: Export format ("json" or "yaml")
149
- """
150
- if format.lower() == "yaml" or file_path.suffix.lower() in [".yaml", ".yml"]:
151
- write_yaml(file_path, data)
152
- else:
153
- write_json(file_path, data)
154
-
155
-
156
- _EXCLUDED_ATTRS = {
157
- "id",
158
- "created_at",
159
- "updated_at",
160
- "_client",
161
- "_raw_data",
162
- }
163
- _EXCLUDED_NAMES = {
164
- "model_dump",
165
- "dict",
166
- "json",
167
- "get",
168
- "post",
169
- "put",
170
- "delete",
171
- "save",
172
- "refresh",
173
- "update",
174
- }
175
- _PREFERRED_MAPPERS: tuple[str, ...] = ("model_dump", "dict", "to_dict")
176
-
177
-
178
- def collect_attributes_for_export(resource: Any) -> dict[str, Any]:
179
- """Collect resource attributes suitable for export.
180
-
181
- The helper prefers structured dump methods when available and gracefully
182
- falls back to the object's attribute space. Internal fields, identifiers,
183
- and callables are filtered out so the result only contains user-configurable
184
- data.
185
- """
186
- mapping = _coerce_resource_to_mapping(resource)
187
- if mapping is None: # pragma: no cover - defensive fallback when attribute introspection fails
188
- items = ((name, _safe_getattr(resource, name)) for name in _iter_public_attribute_names(resource))
189
- else:
190
- items = mapping.items()
191
-
192
- export: dict[str, Any] = {}
193
- for key, value in items:
194
- if _should_include_attribute(key, value):
195
- export[key] = value
196
-
197
- # Post-process agent exports to clean up unwanted transformations
198
- if hasattr(resource, "__class__") and resource.__class__.__name__ == "Agent":
199
- export = _clean_agent_export_data(export)
200
-
201
- return export
202
-
203
-
204
- def _clean_agent_export_data(agent_data: dict[str, Any]) -> dict[str, Any]:
205
- """Clean up agent export data to remove unwanted transformations.
206
-
207
- This function addresses the issue where the backend API transforms
208
- the 'timeout' field into 'execution_timeout' in an 'agent_config' section
209
- during export, which is not desired for clean agent configuration exports.
210
- """
211
- cleaned = agent_data.copy()
212
-
213
- # Remove execution_timeout from agent_config if it exists
214
- if "agent_config" in cleaned and isinstance(cleaned["agent_config"], dict):
215
- agent_config = cleaned["agent_config"]
216
- if "execution_timeout" in agent_config:
217
- # Move execution_timeout back to root level as timeout
218
- cleaned["timeout"] = agent_config.pop("execution_timeout")
219
-
220
- return cleaned
221
-
222
-
223
- def _coerce_resource_to_mapping(resource: Any) -> dict[str, Any] | None:
224
- """Return a mapping representation of ``resource`` when possible."""
225
- for attr in _PREFERRED_MAPPERS:
226
- method = getattr(resource, attr, None)
227
- if callable(method):
228
- try:
229
- data = method()
230
- except Exception:
231
- continue
232
- if isinstance(data, dict):
233
- return data
234
-
235
- if isinstance(resource, dict):
236
- return resource
237
-
238
- try:
239
- if hasattr(resource, "__dict__"):
240
- return dict(resource.__dict__)
241
- except Exception: # pragma: no cover - pathological objects can still defeat coercion
242
- return None
243
-
244
- return None
245
-
246
-
247
- def _iter_public_attribute_names(resource: Any) -> Iterable[str]:
248
- """Yield attribute names we should inspect on ``resource``."""
249
- seen: set[str] = set()
250
- names: list[str] = []
251
-
252
- def _collect(candidates: Iterable[str] | None) -> None:
253
- """Collect unique candidate attribute names.
254
-
255
- Args:
256
- candidates: Iterable of candidate attribute names.
257
- """
258
- for candidate in candidates or ():
259
- if candidate not in seen:
260
- seen.add(candidate)
261
- names.append(candidate)
262
-
263
- # Collect from __dict__
264
- _collect_from_dict(resource, _collect)
265
-
266
- # Collect from __annotations__
267
- _collect_from_annotations(resource, _collect)
268
-
269
- # Collect from __slots__
270
- _collect(getattr(resource, "__slots__", ()))
271
-
272
- # Fallback to dir() if no names found
273
- if not names:
274
- _collect_from_dir(resource, _collect)
275
-
276
- return iter(names)
277
-
278
-
279
- def _collect_from_dict(resource: Any, collect_func: Callable[[Iterable[str]], None]) -> None:
280
- """Safely collect attribute names from __dict__."""
281
- try:
282
- if hasattr(resource, "__dict__"):
283
- dict_keys = getattr(resource, "__dict__", {})
284
- if dict_keys:
285
- collect_func(dict_keys.keys())
286
- except Exception: # pragma: no cover - defensive programming
287
- pass
288
-
289
-
290
- def _collect_from_annotations(resource: Any, collect_func: Callable[[Iterable[str]], None]) -> None:
291
- """Safely collect attribute names from __annotations__."""
292
- annotations = getattr(resource, "__annotations__", {})
293
- if annotations:
294
- collect_func(annotations.keys())
295
-
296
-
297
- def _collect_from_dir(resource: Any, collect_func: Callable[[Iterable[str]], None]) -> None:
298
- """Safely collect attribute names from dir()."""
299
- try:
300
- collect_func(name for name in dir(resource) if not name.startswith("__"))
301
- except Exception: # pragma: no cover - defensive programming
302
- pass
303
-
304
-
305
- def _safe_getattr(resource: Any, name: str) -> Any:
306
- """Return getattr(resource, name) but swallow any exception and return None."""
307
- try:
308
- return getattr(resource, name)
309
- except Exception:
310
- return None
311
-
312
-
313
- def _should_include_attribute(key: str, value: Any) -> bool:
314
- """Return True when an attribute should be serialized."""
315
- if key in _EXCLUDED_ATTRS or key in _EXCLUDED_NAMES:
316
- return False
317
- if key.startswith("_"):
318
- return False
319
- if callable(value):
320
- return False
321
- return True
322
-
323
-
324
- def strip_empty_fields(data: dict[str, Any]) -> dict[str, Any]:
325
- """Recursively remove None values and empty dictionaries from a dictionary.
326
-
327
- Args:
328
- data: Dictionary to clean
329
-
330
- Returns:
331
- Cleaned dictionary with None values and empty dicts removed
332
- """
333
- if not isinstance(data, dict):
334
- return data
335
-
336
- cleaned = {}
337
- for key, value in data.items():
338
- if value is None:
339
- continue
340
- if isinstance(value, dict):
341
- nested = strip_empty_fields(value)
342
- if nested: # Only include non-empty dicts
343
- cleaned[key] = nested
344
- else:
345
- cleaned[key] = value
346
-
347
- return cleaned
348
-
349
-
350
- def build_mcp_export_payload(
351
- mcp: "MCP",
352
- *,
353
- prompt_for_secrets: bool,
354
- placeholder: str,
355
- console: "Console",
356
- ) -> dict[str, Any]:
357
- """Build MCP export payload with authentication secret handling.
358
-
359
- This function prepares an MCP resource for export by:
360
- 1. Starting from model_dump(exclude_none=True) for API alignment
361
- 2. Cleaning internal fields (_client, empty metadata)
362
- 3. Processing authentication with secret capture/placeholder logic
363
- 4. Removing empty fields recursively
364
-
365
- Args:
366
- mcp: MCP model instance to export
367
- prompt_for_secrets: Whether to interactively prompt for missing secrets
368
- placeholder: Placeholder text for missing secrets
369
- console: Rich Console instance for user interaction
370
-
371
- Returns:
372
- Dictionary ready for export (JSON/YAML serialization)
373
-
374
- Raises:
375
- ImportError: If required modules (auth helpers) are not available
376
- """
377
- auth_module = importlib.import_module("glaip_sdk.cli.auth")
378
- prepare_authentication_export = auth_module.prepare_authentication_export
379
-
380
- # Start with model dump (excludes None values automatically)
381
- payload = mcp.model_dump(exclude_none=True)
382
-
383
- # Remove internal/CLI fields
384
- payload.pop("_client", None)
385
-
386
- # Remove empty metadata dict
387
- if "metadata" in payload and not payload["metadata"]:
388
- payload.pop("metadata")
389
-
390
- # Process authentication section
391
- if "authentication" in payload:
392
- processed_auth = prepare_authentication_export(
393
- payload["authentication"],
394
- prompt_for_secrets=prompt_for_secrets,
395
- placeholder=placeholder,
396
- console=console,
397
- )
398
- if processed_auth:
399
- payload["authentication"] = processed_auth
400
- else:
401
- payload.pop("authentication")
402
-
403
- # Apply final cleanup to remove any remaining empty fields
404
- payload = strip_empty_fields(payload)
405
-
406
- return payload
407
-
408
-
409
- def validate_json_string(json_str: str) -> dict[str, Any]:
410
- """Validate JSON string and return parsed data.
411
-
412
- Args:
413
- json_str: JSON string to validate
414
-
415
- Returns:
416
- Parsed JSON data
417
-
418
- Raises:
419
- ValueError: If JSON is invalid
420
- """
421
- try:
422
- return json.loads(json_str)
423
- except json.JSONDecodeError as e:
424
- raise ValueError(f"Invalid JSON: {e}") from e
glaip_sdk/utils/sync.py DELETED
@@ -1,142 +0,0 @@
1
- """Agent and tool synchronization (create/update) operations.
2
-
3
- This module provides convenience functions for tool classes that need bundling.
4
-
5
- For direct upsert operations, use the client methods:
6
- - client.agents.upsert_agent(identifier, **kwargs)
7
- - client.tools.upsert_tool(identifier, code, **kwargs)
8
- - client.mcps.upsert_mcp(identifier, **kwargs)
9
-
10
- Authors:
11
- Christian Trisno Sen Long Chen (christian.t.s.l.chen@gdplabs.id)
12
- """
13
-
14
- from __future__ import annotations
15
-
16
- from typing import TYPE_CHECKING, Any
17
-
18
- from glaip_sdk.utils.bundler import ToolBundler
19
- from glaip_sdk.utils.import_resolver import load_class
20
- from gllm_core.utils import LoggerManager
21
-
22
- if TYPE_CHECKING:
23
- from glaip_sdk.models import Agent, Tool
24
-
25
- logger = LoggerManager().get_logger(__name__)
26
-
27
-
28
- def _extract_tool_name(tool_class: Any) -> str:
29
- """Extract tool name from a class, handling Pydantic v2 models."""
30
- # Direct attribute access (works for non-Pydantic classes)
31
- if hasattr(tool_class, "name"):
32
- name = getattr(tool_class, "name", None)
33
- if isinstance(name, str):
34
- return name
35
-
36
- # Pydantic v2 model - check model_fields
37
- if hasattr(tool_class, "model_fields"):
38
- model_fields = getattr(tool_class, "model_fields", {})
39
- if "name" in model_fields:
40
- field_info = model_fields["name"]
41
- if hasattr(field_info, "default") and isinstance(field_info.default, str):
42
- return field_info.default
43
-
44
- raise ValueError(f"Cannot extract name from tool class: {tool_class}")
45
-
46
-
47
- def _extract_tool_description(tool_class: Any) -> str:
48
- """Extract tool description from a class, handling Pydantic v2 models."""
49
- # Direct attribute access
50
- if hasattr(tool_class, "description"):
51
- desc = getattr(tool_class, "description", None)
52
- if isinstance(desc, str):
53
- return desc
54
-
55
- # Pydantic v2 model - check model_fields
56
- if hasattr(tool_class, "model_fields"):
57
- model_fields = getattr(tool_class, "model_fields", {})
58
- if "description" in model_fields:
59
- field_info = model_fields["description"]
60
- if hasattr(field_info, "default") and isinstance(field_info.default, str):
61
- return field_info.default
62
-
63
- return ""
64
-
65
-
66
- def update_or_create_tool(tool_ref: Any) -> Tool:
67
- """Create or update a tool from a tool class with bundled source code.
68
-
69
- This function takes a tool class (LangChain BaseTool), bundles its source
70
- code with inlined imports, and creates/updates it in the backend.
71
-
72
- Args:
73
- tool_ref: A tool class (LangChain BaseTool subclass) or import path string.
74
-
75
- Returns:
76
- The created or updated tool.
77
-
78
- Example:
79
- >>> from glaip_sdk.utils.sync import update_or_create_tool
80
- >>> from my_tools import WeatherAPITool
81
- >>> tool = update_or_create_tool(WeatherAPITool)
82
- """
83
- from glaip_sdk.utils.client import get_client # noqa: PLC0415
84
-
85
- client = get_client()
86
-
87
- # Handle string import path
88
- if isinstance(tool_ref, str):
89
- tool_class = load_class(tool_ref)
90
- else:
91
- tool_class = tool_ref
92
-
93
- # Get tool info - handle Pydantic v2 model classes
94
- tool_name = _extract_tool_name(tool_class)
95
- tool_description = _extract_tool_description(tool_class)
96
-
97
- # Bundle source code
98
- bundler = ToolBundler(tool_class)
99
- bundled_source = bundler.bundle()
100
-
101
- logger.info("Tool info: name='%s', description='%s...'", tool_name, tool_description[:50])
102
- logger.info("Bundled source code: %d characters", len(bundled_source))
103
-
104
- # Use client's upsert method
105
- return client.tools.upsert_tool(
106
- tool_name,
107
- code=bundled_source,
108
- description=tool_description,
109
- )
110
-
111
-
112
- def update_or_create_agent(agent_config: dict[str, Any]) -> Agent:
113
- """Create or update an agent from configuration.
114
-
115
- Args:
116
- agent_config: Agent configuration dictionary containing:
117
- - name (str): Agent name (required)
118
- - description (str): Agent description
119
- - instruction (str): Agent instruction
120
- - tools (list, optional): List of tool IDs
121
- - agents (list, optional): List of sub-agent IDs
122
- - metadata (dict, optional): Additional metadata
123
-
124
- Returns:
125
- The created or updated agent.
126
-
127
- Example:
128
- >>> from glaip_sdk.utils.sync import update_or_create_agent
129
- >>> config = {
130
- ... "name": "weather_reporter",
131
- ... "description": "Weather reporting agent",
132
- ... "instruction": "You are a weather reporter.",
133
- ... }
134
- >>> agent = update_or_create_agent(config)
135
- """
136
- from glaip_sdk.utils.client import get_client # noqa: PLC0415
137
-
138
- client = get_client()
139
- agent_name = agent_config.pop("name")
140
-
141
- # Use client's upsert method
142
- return client.agents.upsert_agent(agent_name, **agent_config)
@@ -1,33 +0,0 @@
1
- """Shared utilities for tool type detection.
2
-
3
- Authors:
4
- Christian Trisno Sen Long Chen (christian.t.s.l.chen@gdplabs.id)
5
- """
6
-
7
- from typing import Any
8
-
9
-
10
- def is_langchain_tool(ref: Any) -> bool:
11
- """Check if ref is a LangChain BaseTool class or instance.
12
-
13
- Shared by:
14
- - ToolRegistry._is_custom_tool() (for upload detection)
15
- - LangChainToolAdapter._is_langchain_tool() (for adaptation)
16
-
17
- Args:
18
- ref: Object to check.
19
-
20
- Returns:
21
- True if ref is a LangChain BaseTool class or instance.
22
- """
23
- try:
24
- from langchain_core.tools import BaseTool # noqa: PLC0415
25
-
26
- if isinstance(ref, type) and issubclass(ref, BaseTool):
27
- return True
28
- if isinstance(ref, BaseTool):
29
- return True
30
- except ImportError:
31
- pass
32
-
33
- return False