kagent-adk 0.7.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kagent/adk/__init__.py +8 -0
- kagent/adk/_a2a.py +178 -0
- kagent/adk/_agent_executor.py +335 -0
- kagent/adk/_lifespan.py +36 -0
- kagent/adk/_session_service.py +178 -0
- kagent/adk/_token.py +80 -0
- kagent/adk/artifacts/__init__.py +13 -0
- kagent/adk/artifacts/artifacts_toolset.py +56 -0
- kagent/adk/artifacts/return_artifacts_tool.py +160 -0
- kagent/adk/artifacts/session_path.py +106 -0
- kagent/adk/artifacts/stage_artifacts_tool.py +170 -0
- kagent/adk/cli.py +249 -0
- kagent/adk/converters/__init__.py +0 -0
- kagent/adk/converters/error_mappings.py +60 -0
- kagent/adk/converters/event_converter.py +322 -0
- kagent/adk/converters/part_converter.py +206 -0
- kagent/adk/converters/request_converter.py +35 -0
- kagent/adk/models/__init__.py +3 -0
- kagent/adk/models/_openai.py +564 -0
- kagent/adk/models/_ssl.py +245 -0
- kagent/adk/sandbox_code_executer.py +77 -0
- kagent/adk/skill_fetcher.py +103 -0
- kagent/adk/tools/README.md +217 -0
- kagent/adk/tools/__init__.py +15 -0
- kagent/adk/tools/bash_tool.py +74 -0
- kagent/adk/tools/file_tools.py +192 -0
- kagent/adk/tools/skill_tool.py +104 -0
- kagent/adk/tools/skills_plugin.py +49 -0
- kagent/adk/tools/skills_toolset.py +68 -0
- kagent/adk/types.py +268 -0
- kagent_adk-0.7.11.dist-info/METADATA +35 -0
- kagent_adk-0.7.11.dist-info/RECORD +34 -0
- kagent_adk-0.7.11.dist-info/WHEEL +4 -0
- kagent_adk-0.7.11.dist-info/entry_points.txt +2 -0
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from typing import Any, Optional
|
|
3
|
+
|
|
4
|
+
import httpx
|
|
5
|
+
from google.adk.events.event import Event
|
|
6
|
+
from google.adk.sessions import Session
|
|
7
|
+
from google.adk.sessions.base_session_service import (
|
|
8
|
+
BaseSessionService,
|
|
9
|
+
GetSessionConfig,
|
|
10
|
+
ListSessionsResponse,
|
|
11
|
+
)
|
|
12
|
+
from typing_extensions import override
|
|
13
|
+
|
|
14
|
+
logger = logging.getLogger("kagent." + __name__)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class KAgentSessionService(BaseSessionService):
|
|
18
|
+
"""A session service implementation that uses the Kagent API.
|
|
19
|
+
This service integrates with the Kagent server to manage session state
|
|
20
|
+
and persistence through HTTP API calls.
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
def __init__(self, client: httpx.AsyncClient):
|
|
24
|
+
super().__init__()
|
|
25
|
+
self.client = client
|
|
26
|
+
|
|
27
|
+
@override
|
|
28
|
+
async def create_session(
|
|
29
|
+
self,
|
|
30
|
+
*,
|
|
31
|
+
app_name: str,
|
|
32
|
+
user_id: str,
|
|
33
|
+
state: Optional[dict[str, Any]] = None,
|
|
34
|
+
session_id: Optional[str] = None,
|
|
35
|
+
) -> Session:
|
|
36
|
+
# Prepare request data
|
|
37
|
+
request_data = {
|
|
38
|
+
"user_id": user_id,
|
|
39
|
+
"agent_ref": app_name, # Use app_name as agent reference
|
|
40
|
+
}
|
|
41
|
+
if session_id:
|
|
42
|
+
request_data["id"] = session_id
|
|
43
|
+
if state and state.get("session_name"):
|
|
44
|
+
request_data["name"] = state.get("session_name", "")
|
|
45
|
+
|
|
46
|
+
# Make API call to create session
|
|
47
|
+
response = await self.client.post(
|
|
48
|
+
"/api/sessions",
|
|
49
|
+
json=request_data,
|
|
50
|
+
headers={"X-User-ID": user_id},
|
|
51
|
+
)
|
|
52
|
+
response.raise_for_status()
|
|
53
|
+
|
|
54
|
+
data = response.json()
|
|
55
|
+
if not data.get("data"):
|
|
56
|
+
raise RuntimeError(f"Failed to create session: {data.get('message', 'Unknown error')}")
|
|
57
|
+
|
|
58
|
+
session_data = data["data"]
|
|
59
|
+
|
|
60
|
+
# Convert to ADK Session format
|
|
61
|
+
return Session(id=session_data["id"], user_id=session_data["user_id"], state=state or {}, app_name=app_name)
|
|
62
|
+
|
|
63
|
+
@override
|
|
64
|
+
async def get_session(
|
|
65
|
+
self,
|
|
66
|
+
*,
|
|
67
|
+
app_name: str,
|
|
68
|
+
user_id: str,
|
|
69
|
+
session_id: str,
|
|
70
|
+
config: Optional[GetSessionConfig] = None,
|
|
71
|
+
) -> Optional[Session]:
|
|
72
|
+
try:
|
|
73
|
+
url = f"/api/sessions/{session_id}?user_id={user_id}"
|
|
74
|
+
if config:
|
|
75
|
+
if config.after_timestamp:
|
|
76
|
+
# TODO: implement
|
|
77
|
+
# url += f"&after={config.after_timestamp}"
|
|
78
|
+
pass
|
|
79
|
+
if config.num_recent_events:
|
|
80
|
+
url += f"&limit={config.num_recent_events}"
|
|
81
|
+
else:
|
|
82
|
+
url += "&limit=-1"
|
|
83
|
+
else:
|
|
84
|
+
# return all
|
|
85
|
+
url += "&limit=-1"
|
|
86
|
+
|
|
87
|
+
# Make API call to get session
|
|
88
|
+
response: httpx.Response = await self.client.get(
|
|
89
|
+
url,
|
|
90
|
+
headers={"X-User-ID": user_id},
|
|
91
|
+
)
|
|
92
|
+
if response.status_code == 404:
|
|
93
|
+
return None
|
|
94
|
+
response.raise_for_status()
|
|
95
|
+
|
|
96
|
+
data = response.json()
|
|
97
|
+
if not data.get("data"):
|
|
98
|
+
return None
|
|
99
|
+
|
|
100
|
+
if not data.get("data").get("session"):
|
|
101
|
+
return None
|
|
102
|
+
session_data = data["data"]["session"]
|
|
103
|
+
|
|
104
|
+
events_data = data["data"]["events"]
|
|
105
|
+
|
|
106
|
+
events: list[Event] = []
|
|
107
|
+
for event_data in events_data:
|
|
108
|
+
events.append(Event.model_validate_json(event_data["data"]))
|
|
109
|
+
|
|
110
|
+
# Convert to ADK Session format
|
|
111
|
+
session = Session(
|
|
112
|
+
id=session_data["id"],
|
|
113
|
+
user_id=session_data["user_id"],
|
|
114
|
+
events=events,
|
|
115
|
+
app_name=app_name,
|
|
116
|
+
state={},
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
for event in events:
|
|
120
|
+
await super().append_event(session, event)
|
|
121
|
+
|
|
122
|
+
return session
|
|
123
|
+
except httpx.HTTPStatusError as e:
|
|
124
|
+
if e.response.status_code == 404:
|
|
125
|
+
return None
|
|
126
|
+
raise
|
|
127
|
+
|
|
128
|
+
@override
|
|
129
|
+
async def list_sessions(self, *, app_name: str, user_id: str) -> ListSessionsResponse:
|
|
130
|
+
# Make API call to list sessions
|
|
131
|
+
response = await self.client.get(f"/api/sessions?user_id={user_id}", headers={"X-User-ID": user_id})
|
|
132
|
+
response.raise_for_status()
|
|
133
|
+
|
|
134
|
+
data = response.json()
|
|
135
|
+
sessions_data = data.get("data", [])
|
|
136
|
+
|
|
137
|
+
# Convert to ADK Session format
|
|
138
|
+
sessions = []
|
|
139
|
+
for session_data in sessions_data:
|
|
140
|
+
session = Session(id=session_data["id"], user_id=session_data["user_id"], state={}, app_name=app_name)
|
|
141
|
+
sessions.append(session)
|
|
142
|
+
|
|
143
|
+
return ListSessionsResponse(sessions=sessions)
|
|
144
|
+
|
|
145
|
+
def list_sessions_sync(self, *, app_name: str, user_id: str) -> ListSessionsResponse:
|
|
146
|
+
raise NotImplementedError("not supported. use async")
|
|
147
|
+
|
|
148
|
+
@override
|
|
149
|
+
async def delete_session(self, *, app_name: str, user_id: str, session_id: str) -> None:
|
|
150
|
+
# Make API call to delete session
|
|
151
|
+
response = await self.client.delete(
|
|
152
|
+
f"/api/sessions/{session_id}?user_id={user_id}",
|
|
153
|
+
headers={"X-User-ID": user_id},
|
|
154
|
+
)
|
|
155
|
+
response.raise_for_status()
|
|
156
|
+
|
|
157
|
+
@override
|
|
158
|
+
async def append_event(self, session: Session, event: Event) -> Event:
|
|
159
|
+
# Convert ADK Event to JSON format
|
|
160
|
+
event_data = {
|
|
161
|
+
"id": event.id,
|
|
162
|
+
"data": event.model_dump_json(),
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
# Make API call to append event to session
|
|
166
|
+
response = await self.client.post(
|
|
167
|
+
f"/api/sessions/{session.id}/events?user_id={session.user_id}",
|
|
168
|
+
json=event_data,
|
|
169
|
+
headers={"X-User-ID": session.user_id},
|
|
170
|
+
)
|
|
171
|
+
response.raise_for_status()
|
|
172
|
+
|
|
173
|
+
# TODO: potentially pull and update the session from the server
|
|
174
|
+
# Update the in-memory session.
|
|
175
|
+
session.last_update_time = event.timestamp
|
|
176
|
+
await super().append_event(session=session, event=event)
|
|
177
|
+
|
|
178
|
+
return event
|
kagent/adk/_token.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import logging # noqa: I001
|
|
2
|
+
import asyncio
|
|
3
|
+
from contextlib import asynccontextmanager
|
|
4
|
+
from typing import Any, Optional
|
|
5
|
+
|
|
6
|
+
import httpx
|
|
7
|
+
|
|
8
|
+
KAGENT_TOKEN_PATH = "/var/run/secrets/tokens/kagent-token"
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class KAgentTokenService:
|
|
13
|
+
"""Reads a k8s token from a file, and reloads it
|
|
14
|
+
periodically.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
def __init__(self, app_name: str):
|
|
18
|
+
self.token = None
|
|
19
|
+
self.update_lock = asyncio.Lock()
|
|
20
|
+
self.update_task = None
|
|
21
|
+
self.app_name = app_name
|
|
22
|
+
|
|
23
|
+
def lifespan(self):
|
|
24
|
+
"""Returns an async context manager to start the token update loop"""
|
|
25
|
+
|
|
26
|
+
@asynccontextmanager
|
|
27
|
+
async def _lifespan(app: Any):
|
|
28
|
+
await self._update_token_loop()
|
|
29
|
+
yield
|
|
30
|
+
self._drain()
|
|
31
|
+
|
|
32
|
+
return _lifespan
|
|
33
|
+
|
|
34
|
+
def event_hooks(self):
|
|
35
|
+
"""Returns a dictionary of event hooks for the application
|
|
36
|
+
to use when creating the httpx.AsyncClient.
|
|
37
|
+
"""
|
|
38
|
+
return {"request": [self._add_bearer_token]}
|
|
39
|
+
|
|
40
|
+
async def _update_token_loop(self) -> None:
|
|
41
|
+
self.token = await self._read_kagent_token()
|
|
42
|
+
# keep it updated - launch a background task to refresh it periodically
|
|
43
|
+
self.update_task = asyncio.create_task(self._refresh_token())
|
|
44
|
+
|
|
45
|
+
def _drain(self):
|
|
46
|
+
if self.update_task:
|
|
47
|
+
self.update_task.cancel()
|
|
48
|
+
|
|
49
|
+
async def _get_token(self) -> str | None:
|
|
50
|
+
async with self.update_lock:
|
|
51
|
+
return self.token
|
|
52
|
+
|
|
53
|
+
async def _read_kagent_token(self) -> str | None:
|
|
54
|
+
return await asyncio.to_thread(read_token)
|
|
55
|
+
|
|
56
|
+
async def _refresh_token(self):
|
|
57
|
+
while True:
|
|
58
|
+
await asyncio.sleep(60) # Wait for 60 seconds before refreshing
|
|
59
|
+
token = await self._read_kagent_token()
|
|
60
|
+
if token is not None and token != self.token:
|
|
61
|
+
async with self.update_lock:
|
|
62
|
+
self.token = token
|
|
63
|
+
|
|
64
|
+
async def _add_bearer_token(self, request: httpx.Request):
|
|
65
|
+
# Your function to generate headers dynamically
|
|
66
|
+
token = await self._get_token()
|
|
67
|
+
headers = {"X-Agent-Name": self.app_name}
|
|
68
|
+
if token:
|
|
69
|
+
headers["Authorization"] = f"Bearer {token}"
|
|
70
|
+
request.headers.update(headers)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def read_token() -> str | None:
|
|
74
|
+
try:
|
|
75
|
+
with open(KAGENT_TOKEN_PATH, "r", encoding="utf-8") as f:
|
|
76
|
+
token = f.read()
|
|
77
|
+
return token.strip()
|
|
78
|
+
except OSError as e:
|
|
79
|
+
logger.error(f"Error reading token from {KAGENT_TOKEN_PATH}: {e}")
|
|
80
|
+
return None
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
from .artifacts_toolset import ArtifactsToolset
|
|
2
|
+
from .return_artifacts_tool import ReturnArtifactsTool
|
|
3
|
+
from .session_path import clear_session_cache, get_session_path, initialize_session_path
|
|
4
|
+
from .stage_artifacts_tool import StageArtifactsTool
|
|
5
|
+
|
|
6
|
+
__all__ = [
|
|
7
|
+
"ArtifactsToolset",
|
|
8
|
+
"ReturnArtifactsTool",
|
|
9
|
+
"StageArtifactsTool",
|
|
10
|
+
"get_session_path",
|
|
11
|
+
"initialize_session_path",
|
|
12
|
+
"clear_session_cache",
|
|
13
|
+
]
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from typing import List, Optional
|
|
5
|
+
|
|
6
|
+
try:
|
|
7
|
+
from typing_extensions import override
|
|
8
|
+
except ImportError:
|
|
9
|
+
from typing import override
|
|
10
|
+
|
|
11
|
+
from google.adk.agents.readonly_context import ReadonlyContext
|
|
12
|
+
from google.adk.tools import BaseTool
|
|
13
|
+
from google.adk.tools.base_toolset import BaseToolset
|
|
14
|
+
|
|
15
|
+
from .return_artifacts_tool import ReturnArtifactsTool
|
|
16
|
+
from .stage_artifacts_tool import StageArtifactsTool
|
|
17
|
+
|
|
18
|
+
logger = logging.getLogger("kagent_adk." + __name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class ArtifactsToolset(BaseToolset):
|
|
22
|
+
"""Toolset for managing artifact upload and download workflows.
|
|
23
|
+
|
|
24
|
+
This toolset provides tools for the complete artifact lifecycle:
|
|
25
|
+
1. StageArtifactsTool - Download artifacts from artifact service to working directory
|
|
26
|
+
2. ReturnArtifactsTool - Upload generated files from working directory to artifact service
|
|
27
|
+
|
|
28
|
+
Artifacts enable file-based interactions:
|
|
29
|
+
- Users upload files via frontend → stored as artifacts
|
|
30
|
+
- StageArtifactsTool copies them to working directory for processing
|
|
31
|
+
- Processing tools (bash, skills, etc.) work with files on disk
|
|
32
|
+
- ReturnArtifactsTool saves generated outputs back as artifacts
|
|
33
|
+
- Users download results via frontend
|
|
34
|
+
|
|
35
|
+
This toolset is independent of skills and can be used with any processing workflow.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
def __init__(self):
|
|
39
|
+
"""Initialize the artifacts toolset."""
|
|
40
|
+
super().__init__()
|
|
41
|
+
|
|
42
|
+
# Create artifact lifecycle tools
|
|
43
|
+
self.stage_artifacts_tool = StageArtifactsTool()
|
|
44
|
+
self.return_artifacts_tool = ReturnArtifactsTool()
|
|
45
|
+
|
|
46
|
+
@override
|
|
47
|
+
async def get_tools(self, readonly_context: Optional[ReadonlyContext] = None) -> List[BaseTool]:
|
|
48
|
+
"""Get both artifact tools.
|
|
49
|
+
|
|
50
|
+
Returns:
|
|
51
|
+
List containing StageArtifactsTool and ReturnArtifactsTool.
|
|
52
|
+
"""
|
|
53
|
+
return [
|
|
54
|
+
self.stage_artifacts_tool,
|
|
55
|
+
self.return_artifacts_tool,
|
|
56
|
+
]
|
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
"""Tool for returning generated files from working directory to artifact service."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
import mimetypes
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Any, Dict, List
|
|
9
|
+
|
|
10
|
+
from google.adk.tools import BaseTool, ToolContext
|
|
11
|
+
from google.genai import types
|
|
12
|
+
from typing_extensions import override
|
|
13
|
+
|
|
14
|
+
from .session_path import get_session_path
|
|
15
|
+
from .stage_artifacts_tool import MAX_ARTIFACT_SIZE_BYTES
|
|
16
|
+
|
|
17
|
+
logger = logging.getLogger("kagent_adk." + __name__)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class ReturnArtifactsTool(BaseTool):
|
|
21
|
+
"""Save generated files from working directory to artifact service for user download.
|
|
22
|
+
|
|
23
|
+
This tool enables users to download outputs generated during processing.
|
|
24
|
+
Files are saved to the artifact service where they can be retrieved by the frontend.
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
def __init__(self):
|
|
28
|
+
super().__init__(
|
|
29
|
+
name="return_artifacts",
|
|
30
|
+
description=(
|
|
31
|
+
"Save generated files from the working directory to the artifact service, "
|
|
32
|
+
"making them available for user download.\n\n"
|
|
33
|
+
"WORKFLOW:\n"
|
|
34
|
+
"1. Generate output files in the 'outputs/' directory\n"
|
|
35
|
+
"2. Use this tool to save those files to the artifact service\n"
|
|
36
|
+
"3. Users can then download the files via the frontend\n\n"
|
|
37
|
+
"USAGE EXAMPLE:\n"
|
|
38
|
+
"- bash('python scripts/analyze.py > outputs/report.txt')\n"
|
|
39
|
+
"- return_artifacts(file_paths=['outputs/report.txt'])\n"
|
|
40
|
+
" Returns: 'Saved 1 file(s): report.txt (v0, 15.2 KB)'\n\n"
|
|
41
|
+
"PARAMETERS:\n"
|
|
42
|
+
"- file_paths: List of relative paths from working directory (required)\n"
|
|
43
|
+
"- artifact_names: Optional custom names for artifacts (default: use filename)\n\n"
|
|
44
|
+
"BEST PRACTICES:\n"
|
|
45
|
+
"- Generate outputs in 'outputs/' directory for clarity\n"
|
|
46
|
+
"- Use descriptive filenames (they become artifact names)\n"
|
|
47
|
+
"- Return all outputs at once for efficiency"
|
|
48
|
+
),
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
def _get_declaration(self) -> types.FunctionDeclaration | None:
|
|
52
|
+
return types.FunctionDeclaration(
|
|
53
|
+
name=self.name,
|
|
54
|
+
description=self.description,
|
|
55
|
+
parameters=types.Schema(
|
|
56
|
+
type=types.Type.OBJECT,
|
|
57
|
+
properties={
|
|
58
|
+
"file_paths": types.Schema(
|
|
59
|
+
type=types.Type.ARRAY,
|
|
60
|
+
description=(
|
|
61
|
+
"List of relative file paths from the working directory to save as artifacts. "
|
|
62
|
+
"Example: ['outputs/report.pdf', 'outputs/data.csv']. "
|
|
63
|
+
"Files must exist in the working directory and be within size limits."
|
|
64
|
+
),
|
|
65
|
+
items=types.Schema(type=types.Type.STRING),
|
|
66
|
+
),
|
|
67
|
+
"artifact_names": types.Schema(
|
|
68
|
+
type=types.Type.ARRAY,
|
|
69
|
+
description=(
|
|
70
|
+
"Optional custom names for the artifacts. "
|
|
71
|
+
"If not provided, the filename will be used. "
|
|
72
|
+
"Must match the length of file_paths if provided."
|
|
73
|
+
),
|
|
74
|
+
items=types.Schema(type=types.Type.STRING),
|
|
75
|
+
),
|
|
76
|
+
},
|
|
77
|
+
required=["file_paths"],
|
|
78
|
+
),
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
@override
|
|
82
|
+
async def run_async(self, *, args: Dict[str, Any], tool_context: ToolContext) -> str:
|
|
83
|
+
file_paths: List[str] = args.get("file_paths", [])
|
|
84
|
+
artifact_names: List[str] = args.get("artifact_names", [])
|
|
85
|
+
|
|
86
|
+
if not file_paths:
|
|
87
|
+
return "Error: No file paths provided."
|
|
88
|
+
|
|
89
|
+
if artifact_names and len(artifact_names) != len(file_paths):
|
|
90
|
+
return "Error: artifact_names length must match file_paths length."
|
|
91
|
+
|
|
92
|
+
if not tool_context._invocation_context.artifact_service:
|
|
93
|
+
return "Error: Artifact service is not available in this context."
|
|
94
|
+
|
|
95
|
+
try:
|
|
96
|
+
working_dir = get_session_path(session_id=tool_context.session.id)
|
|
97
|
+
|
|
98
|
+
saved_artifacts = []
|
|
99
|
+
for idx, rel_path in enumerate(file_paths):
|
|
100
|
+
file_path = (working_dir / rel_path).resolve()
|
|
101
|
+
|
|
102
|
+
# Security: Ensure file is within working directory
|
|
103
|
+
if not file_path.is_relative_to(working_dir):
|
|
104
|
+
logger.warning(f"Skipping file outside working directory: {rel_path}")
|
|
105
|
+
continue
|
|
106
|
+
|
|
107
|
+
# Check file exists
|
|
108
|
+
if not file_path.exists():
|
|
109
|
+
logger.warning(f"File not found: {rel_path}")
|
|
110
|
+
continue
|
|
111
|
+
|
|
112
|
+
# Check file size
|
|
113
|
+
file_size = file_path.stat().st_size
|
|
114
|
+
if file_size > MAX_ARTIFACT_SIZE_BYTES:
|
|
115
|
+
size_mb = file_size / (1024 * 1024)
|
|
116
|
+
logger.warning(f"File too large: {rel_path} ({size_mb:.1f} MB)")
|
|
117
|
+
continue
|
|
118
|
+
|
|
119
|
+
# Determine artifact name
|
|
120
|
+
artifact_name = artifact_names[idx] if artifact_names else file_path.name
|
|
121
|
+
|
|
122
|
+
# Read file data and detect MIME type
|
|
123
|
+
file_data = file_path.read_bytes()
|
|
124
|
+
mime_type = self._detect_mime_type(file_path)
|
|
125
|
+
|
|
126
|
+
# Create artifact Part
|
|
127
|
+
artifact_part = types.Part.from_bytes(data=file_data, mime_type=mime_type)
|
|
128
|
+
|
|
129
|
+
# Save to artifact service
|
|
130
|
+
version = await tool_context.save_artifact(
|
|
131
|
+
filename=artifact_name,
|
|
132
|
+
artifact=artifact_part,
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
size_kb = file_size / 1024
|
|
136
|
+
saved_artifacts.append(f"{artifact_name} (v{version}, {size_kb:.1f} KB)")
|
|
137
|
+
logger.info(f"Saved artifact: {artifact_name} v{version} ({size_kb:.1f} KB)")
|
|
138
|
+
|
|
139
|
+
if not saved_artifacts:
|
|
140
|
+
return "No valid files were saved as artifacts."
|
|
141
|
+
|
|
142
|
+
return f"Saved {len(saved_artifacts)} file(s) for download:\n" + "\n".join(
|
|
143
|
+
f" • {artifact}" for artifact in saved_artifacts
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
except Exception as e:
|
|
147
|
+
logger.error("Error returning artifacts: %s", e, exc_info=True)
|
|
148
|
+
return f"An error occurred while returning artifacts: {e}"
|
|
149
|
+
|
|
150
|
+
def _detect_mime_type(self, file_path: Path) -> str:
|
|
151
|
+
"""Detect MIME type from file extension.
|
|
152
|
+
|
|
153
|
+
Args:
|
|
154
|
+
file_path: Path to the file
|
|
155
|
+
|
|
156
|
+
Returns:
|
|
157
|
+
MIME type string, defaults to 'application/octet-stream' if unknown
|
|
158
|
+
"""
|
|
159
|
+
mime_type, _ = mimetypes.guess_type(str(file_path))
|
|
160
|
+
return mime_type or "application/octet-stream"
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import tempfile
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
logger = logging.getLogger("kagent_adk." + __name__)
|
|
6
|
+
|
|
7
|
+
# Cache of initialized session paths to avoid re-creating symlinks
|
|
8
|
+
_session_path_cache: dict[str, Path] = {}
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def initialize_session_path(session_id: str, skills_directory: str) -> Path:
|
|
12
|
+
"""Initialize a session's working directory with skills symlink.
|
|
13
|
+
|
|
14
|
+
This is called by SkillsPlugin.before_agent_callback() to ensure the session
|
|
15
|
+
is set up before any tools run. Creates the directory structure and symlink
|
|
16
|
+
to the skills directory.
|
|
17
|
+
|
|
18
|
+
Directory structure:
|
|
19
|
+
/tmp/kagent/{session_id}/
|
|
20
|
+
├── skills/ -> symlink to skills_directory (read-only shared skills)
|
|
21
|
+
├── uploads/ -> staged user files (temporary)
|
|
22
|
+
└── outputs/ -> generated files for return
|
|
23
|
+
|
|
24
|
+
Args:
|
|
25
|
+
session_id: The unique ID of the current session.
|
|
26
|
+
skills_directory: Path to the shared skills directory.
|
|
27
|
+
|
|
28
|
+
Returns:
|
|
29
|
+
The resolved path to the session's root directory.
|
|
30
|
+
"""
|
|
31
|
+
# Return cached path if already initialized
|
|
32
|
+
if session_id in _session_path_cache:
|
|
33
|
+
return _session_path_cache[session_id]
|
|
34
|
+
|
|
35
|
+
# Initialize new session path
|
|
36
|
+
base_path = Path(tempfile.gettempdir()) / "kagent"
|
|
37
|
+
session_path = base_path / session_id
|
|
38
|
+
|
|
39
|
+
# Create working directories
|
|
40
|
+
(session_path / "uploads").mkdir(parents=True, exist_ok=True)
|
|
41
|
+
(session_path / "outputs").mkdir(parents=True, exist_ok=True)
|
|
42
|
+
|
|
43
|
+
# Create symlink to skills directory
|
|
44
|
+
skills_mount = Path(skills_directory)
|
|
45
|
+
skills_link = session_path / "skills"
|
|
46
|
+
if skills_mount.exists() and not skills_link.exists():
|
|
47
|
+
try:
|
|
48
|
+
skills_link.symlink_to(skills_mount)
|
|
49
|
+
logger.debug(f"Created symlink: {skills_link} -> {skills_mount}")
|
|
50
|
+
except FileExistsError:
|
|
51
|
+
# Symlink already exists (race condition from concurrent session setup)
|
|
52
|
+
pass
|
|
53
|
+
except Exception as e:
|
|
54
|
+
# Log but don't fail - skills can still be accessed via absolute path
|
|
55
|
+
logger.warning(f"Failed to create skills symlink for session {session_id}: {e}")
|
|
56
|
+
|
|
57
|
+
# Cache and return
|
|
58
|
+
resolved_path = session_path.resolve()
|
|
59
|
+
_session_path_cache[session_id] = resolved_path
|
|
60
|
+
return resolved_path
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def get_session_path(session_id: str) -> Path:
|
|
64
|
+
"""Get the working directory path for a session.
|
|
65
|
+
|
|
66
|
+
This function retrieves the cached session path that was initialized by
|
|
67
|
+
SkillsPlugin. If the session hasn't been initialized (plugin not used),
|
|
68
|
+
it falls back to auto-initialization with default /skills directory.
|
|
69
|
+
|
|
70
|
+
Tools should call this function to get their working directory. The session
|
|
71
|
+
must be initialized by SkillsPlugin before tools run, which happens automatically
|
|
72
|
+
via the before_agent_callback() hook.
|
|
73
|
+
|
|
74
|
+
Args:
|
|
75
|
+
session_id: The unique ID of the current session.
|
|
76
|
+
|
|
77
|
+
Returns:
|
|
78
|
+
The resolved path to the session's root directory.
|
|
79
|
+
|
|
80
|
+
Note:
|
|
81
|
+
If session is not initialized, automatically initializes with /skills.
|
|
82
|
+
For custom skills directories, ensure SkillsPlugin is installed.
|
|
83
|
+
"""
|
|
84
|
+
# Return cached path if already initialized
|
|
85
|
+
if session_id in _session_path_cache:
|
|
86
|
+
return _session_path_cache[session_id]
|
|
87
|
+
|
|
88
|
+
# Fallback: auto-initialize with default /skills
|
|
89
|
+
logger.warning(
|
|
90
|
+
f"Session {session_id} not initialized by SkillsPlugin. "
|
|
91
|
+
f"Auto-initializing with default /skills. "
|
|
92
|
+
f"Install SkillsPlugin for custom skills directories."
|
|
93
|
+
)
|
|
94
|
+
return initialize_session_path(session_id, "/skills")
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def clear_session_cache(session_id: str | None = None) -> None:
|
|
98
|
+
"""Clear cached session path(s).
|
|
99
|
+
|
|
100
|
+
Args:
|
|
101
|
+
session_id: Specific session to clear. If None, clears all cached sessions.
|
|
102
|
+
"""
|
|
103
|
+
if session_id:
|
|
104
|
+
_session_path_cache.pop(session_id, None)
|
|
105
|
+
else:
|
|
106
|
+
_session_path_cache.clear()
|